Source code for airflow.providers.google.cloud.operators.datafusion
# Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License."""This module contains Google DataFusion operators."""from__future__importannotationsimporttimefromtypingimportTYPE_CHECKING,Any,Sequencefromdeprecatedimportdeprecatedfromgoogle.api_core.retryimportexponential_sleep_generatorfromgoogleapiclient.errorsimportHttpErrorfromairflow.configurationimportconffromairflow.exceptionsimportAirflowException,AirflowProviderDeprecationWarningfromairflow.providers.google.cloud.hooks.datafusionimportSUCCESS_STATES,DataFusionHook,PipelineStatesfromairflow.providers.google.cloud.links.datafusionimport(DataFusionInstanceLink,DataFusionPipelineLink,DataFusionPipelinesLink,)fromairflow.providers.google.cloud.operators.cloud_baseimportGoogleCloudBaseOperatorfromairflow.providers.google.cloud.triggers.datafusionimportDataFusionStartPipelineTriggerfromairflow.providers.google.cloud.utils.datafusionimportDataFusionPipelineTypefromairflow.providers.google.cloud.utils.helpersimportresource_path_to_dictfromairflow.providers.google.common.hooks.base_googleimportPROVIDE_PROJECT_IDifTYPE_CHECKING:fromairflow.utils.contextimportContext
[docs]classDataFusionPipelineLinkHelper:""" Helper class for Pipeline links. .. warning:: This class is deprecated. Consider using ``resource_path_to_dict()`` instead. """@staticmethod@deprecated(reason="Please use `airflow.providers.google.cloud.utils.helpers.resource_path_to_dict` instead.",category=AirflowProviderDeprecationWarning,)
[docs]classCloudDataFusionRestartInstanceOperator(GoogleCloudBaseOperator):""" Restart a single Data Fusion instance. At the end of an operation instance is fully restarted. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:CloudDataFusionRestartInstanceOperator` :param instance_name: The name of the instance to restart. :param location: The Cloud Data Fusion location in which to handle the request. :param project_id: The ID of the Google Cloud project that the instance belongs to. :param api_version: The version of the api that will be requested for example 'v3'. :param gcp_conn_id: The connection ID to use when fetching connection info. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """
[docs]defexecute(self,context:Context)->None:hook=DataFusionHook(gcp_conn_id=self.gcp_conn_id,api_version=self.api_version,impersonation_chain=self.impersonation_chain,)self.log.info("Restarting Data Fusion instance: %s",self.instance_name)operation=hook.restart_instance(instance_name=self.instance_name,location=self.location,project_id=self.project_id,)instance=hook.wait_for_operation(operation)self.log.info("Instance %s restarted successfully",self.instance_name)project_id=resource_path_to_dict(resource_name=instance["name"])["projects"]DataFusionInstanceLink.persist(context=context,task_instance=self,project_id=project_id,instance_name=self.instance_name,location=self.location,)
[docs]classCloudDataFusionDeleteInstanceOperator(GoogleCloudBaseOperator):""" Deletes a single Date Fusion instance. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:CloudDataFusionDeleteInstanceOperator` :param instance_name: The name of the instance to restart. :param location: The Cloud Data Fusion location in which to handle the request. :param project_id: The ID of the Google Cloud project that the instance belongs to. :param api_version: The version of the api that will be requested for example 'v3'. :param gcp_conn_id: The connection ID to use when fetching connection info. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """
[docs]defexecute(self,context:Context)->None:hook=DataFusionHook(gcp_conn_id=self.gcp_conn_id,api_version=self.api_version,impersonation_chain=self.impersonation_chain,)self.log.info("Deleting Data Fusion instance: %s",self.instance_name)operation=hook.delete_instance(instance_name=self.instance_name,location=self.location,project_id=self.project_id,)hook.wait_for_operation(operation)self.log.info("Instance %s deleted successfully",self.instance_name)
[docs]classCloudDataFusionCreateInstanceOperator(GoogleCloudBaseOperator):""" Creates a new Data Fusion instance in the specified project and location. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:CloudDataFusionCreateInstanceOperator` :param instance_name: The name of the instance to create. :param instance: An instance of Instance. https://cloud.google.com/data-fusion/docs/reference/rest/v1beta1/projects.locations.instances#Instance :param location: The Cloud Data Fusion location in which to handle the request. :param project_id: The ID of the Google Cloud project that the instance belongs to. :param api_version: The version of the api that will be requested for example 'v3'. :param gcp_conn_id: The connection ID to use when fetching connection info. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """
[docs]defexecute(self,context:Context)->dict:hook=DataFusionHook(gcp_conn_id=self.gcp_conn_id,api_version=self.api_version,impersonation_chain=self.impersonation_chain,)self.log.info("Creating Data Fusion instance: %s",self.instance_name)try:operation=hook.create_instance(instance_name=self.instance_name,instance=self.instance,location=self.location,project_id=self.project_id,)instance=hook.wait_for_operation(operation)self.log.info("Instance %s created successfully",self.instance_name)exceptHttpErroraserr:iferr.resp.statusnotin(409,"409"):raiseself.log.info("Instance %s already exists",self.instance_name)instance=hook.get_instance(instance_name=self.instance_name,location=self.location,project_id=self.project_id)# Wait for instance to be readyfortime_to_waitinexponential_sleep_generator(initial=10,maximum=120):ifinstance["state"]!="CREATING":breaktime.sleep(time_to_wait)instance=hook.get_instance(instance_name=self.instance_name,location=self.location,project_id=self.project_id)project_id=resource_path_to_dict(resource_name=instance["name"])["projects"]DataFusionInstanceLink.persist(context=context,task_instance=self,project_id=project_id,instance_name=self.instance_name,location=self.location,)returninstance
[docs]classCloudDataFusionUpdateInstanceOperator(GoogleCloudBaseOperator):""" Updates a single Data Fusion instance. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:CloudDataFusionUpdateInstanceOperator` :param instance_name: The name of the instance to create. :param instance: An instance of Instance. https://cloud.google.com/data-fusion/docs/reference/rest/v1beta1/projects.locations.instances#Instance :param update_mask: Field mask is used to specify the fields that the update will overwrite in an instance resource. The fields specified in the updateMask are relative to the resource, not the full request. A field will be overwritten if it is in the mask. If the user does not provide a mask, all the supported fields (labels and options currently) will be overwritten. A comma-separated list of fully qualified names of fields. Example: "user.displayName,photo". https://developers.google.com/protocol-buffers/docs/reference/google.protobuf?_ga=2.205612571.-968688242.1573564810#google.protobuf.FieldMask :param location: The Cloud Data Fusion location in which to handle the request. :param project_id: The ID of the Google Cloud project that the instance belongs to. :param api_version: The version of the api that will be requested for example 'v3'. :param gcp_conn_id: The connection ID to use when fetching connection info. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """
[docs]defexecute(self,context:Context)->None:hook=DataFusionHook(gcp_conn_id=self.gcp_conn_id,api_version=self.api_version,impersonation_chain=self.impersonation_chain,)self.log.info("Updating Data Fusion instance: %s",self.instance_name)operation=hook.patch_instance(instance_name=self.instance_name,instance=self.instance,update_mask=self.update_mask,location=self.location,project_id=self.project_id,)instance=hook.wait_for_operation(operation)self.log.info("Instance %s updated successfully",self.instance_name)project_id=resource_path_to_dict(resource_name=instance["name"])["projects"]DataFusionInstanceLink.persist(context=context,task_instance=self,project_id=project_id,instance_name=self.instance_name,location=self.location,)
[docs]classCloudDataFusionGetInstanceOperator(GoogleCloudBaseOperator):""" Gets details of a single Data Fusion instance. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:CloudDataFusionGetInstanceOperator` :param instance_name: The name of the instance. :param location: The Cloud Data Fusion location in which to handle the request. :param project_id: The ID of the Google Cloud project that the instance belongs to. :param api_version: The version of the api that will be requested for example 'v3'. :param gcp_conn_id: The connection ID to use when fetching connection info. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """
[docs]defexecute(self,context:Context)->dict:hook=DataFusionHook(gcp_conn_id=self.gcp_conn_id,api_version=self.api_version,impersonation_chain=self.impersonation_chain,)self.log.info("Retrieving Data Fusion instance: %s",self.instance_name)instance=hook.get_instance(instance_name=self.instance_name,location=self.location,project_id=self.project_id,)project_id=resource_path_to_dict(resource_name=instance["name"])["projects"]DataFusionInstanceLink.persist(context=context,task_instance=self,project_id=project_id,instance_name=self.instance_name,location=self.location,)returninstance
[docs]classCloudDataFusionCreatePipelineOperator(GoogleCloudBaseOperator):""" Creates a Cloud Data Fusion pipeline. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:CloudDataFusionCreatePipelineOperator` :param pipeline_name: Your pipeline name. :param pipeline: The pipeline definition. For more information check: https://docs.cdap.io/cdap/current/en/developer-manual/pipelines/developing-pipelines.html#pipeline-configuration-file-format :param instance_name: The name of the instance. :param location: The Cloud Data Fusion location in which to handle the request. :param namespace: If your pipeline belongs to a Basic edition instance, the namespace ID is always default. If your pipeline belongs to an Enterprise edition instance, you can create a namespace. :param api_version: The version of the api that will be requested for example 'v3'. :param gcp_conn_id: The connection ID to use when fetching connection info. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """
[docs]defexecute(self,context:Context)->None:hook=DataFusionHook(gcp_conn_id=self.gcp_conn_id,api_version=self.api_version,impersonation_chain=self.impersonation_chain,)self.log.info("Creating Data Fusion pipeline: %s",self.pipeline_name)instance=hook.get_instance(instance_name=self.instance_name,location=self.location,project_id=self.project_id,)api_url=instance["apiEndpoint"]hook.create_pipeline(pipeline_name=self.pipeline_name,pipeline=self.pipeline,instance_url=api_url,namespace=self.namespace,)DataFusionPipelineLink.persist(context=context,task_instance=self,uri=instance["serviceEndpoint"],pipeline_name=self.pipeline_name,namespace=self.namespace,)self.log.info("Pipeline %s created",self.pipeline_name)
[docs]classCloudDataFusionDeletePipelineOperator(GoogleCloudBaseOperator):""" Deletes a Cloud Data Fusion pipeline. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:CloudDataFusionDeletePipelineOperator` :param pipeline_name: Your pipeline name. :param version_id: Version of pipeline to delete :param instance_name: The name of the instance. :param location: The Cloud Data Fusion location in which to handle the request. :param namespace: If your pipeline belongs to a Basic edition instance, the namespace ID is always default. If your pipeline belongs to an Enterprise edition instance, you can create a namespace. :param api_version: The version of the api that will be requested for example 'v3'. :param gcp_conn_id: The connection ID to use when fetching connection info. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """
[docs]defexecute(self,context:Context)->None:hook=DataFusionHook(gcp_conn_id=self.gcp_conn_id,api_version=self.api_version,impersonation_chain=self.impersonation_chain,)self.log.info("Deleting Data Fusion pipeline: %s",self.pipeline_name)instance=hook.get_instance(instance_name=self.instance_name,location=self.location,project_id=self.project_id,)api_url=instance["apiEndpoint"]hook.delete_pipeline(pipeline_name=self.pipeline_name,version_id=self.version_id,instance_url=api_url,namespace=self.namespace,)self.log.info("Pipeline deleted")
[docs]classCloudDataFusionListPipelinesOperator(GoogleCloudBaseOperator):""" Lists Cloud Data Fusion pipelines. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:CloudDataFusionListPipelinesOperator` :param instance_name: The name of the instance. :param location: The Cloud Data Fusion location in which to handle the request. :param artifact_version: Artifact version to filter instances :param artifact_name: Artifact name to filter instances :param namespace: If your pipeline belongs to a Basic edition instance, the namespace ID is always default. If your pipeline belongs to an Enterprise edition instance, you can create a namespace. :param api_version: The version of the api that will be requested for example 'v3'. :param gcp_conn_id: The connection ID to use when fetching connection info. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """
[docs]defexecute(self,context:Context)->dict:hook=DataFusionHook(gcp_conn_id=self.gcp_conn_id,api_version=self.api_version,impersonation_chain=self.impersonation_chain,)self.log.info("Listing Data Fusion pipelines")instance=hook.get_instance(instance_name=self.instance_name,location=self.location,project_id=self.project_id,)api_url=instance["apiEndpoint"]service_endpoint=instance["serviceEndpoint"]pipelines=hook.list_pipelines(instance_url=api_url,namespace=self.namespace,artifact_version=self.artifact_version,artifact_name=self.artifact_name,)self.log.info("Pipelines: %s",pipelines)DataFusionPipelinesLink.persist(context=context,task_instance=self,uri=service_endpoint,namespace=self.namespace,)returnpipelines
[docs]classCloudDataFusionStartPipelineOperator(GoogleCloudBaseOperator):""" Starts a Cloud Data Fusion pipeline. Works for both batch and stream pipelines. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:CloudDataFusionStartPipelineOperator` :param pipeline_name: Your pipeline name. :param pipeline_type: Optional pipeline type (BATCH by default). :param instance_name: The name of the instance. :param success_states: If provided the operator will wait for pipeline to be in one of the provided states. :param pipeline_timeout: How long (in seconds) operator should wait for the pipeline to be in one of ``success_states``. Works only if ``success_states`` are provided. :param location: The Cloud Data Fusion location in which to handle the request. :param runtime_args: Optional runtime args to be passed to the pipeline :param namespace: If your pipeline belongs to a Basic edition instance, the namespace ID is always default. If your pipeline belongs to an Enterprise edition instance, you can create a namespace. :param api_version: The version of the api that will be requested for example 'v3'. :param gcp_conn_id: The connection ID to use when fetching connection info. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). :param asynchronous: Flag to return after submitting the pipeline ID to the Data Fusion API. This is useful for submitting long-running pipelines and waiting on them asynchronously using the CloudDataFusionPipelineStateSensor :param deferrable: Run operator in the deferrable mode. Is not related to asynchronous parameter. While asynchronous parameter gives a possibility to wait until pipeline reaches terminate state using sleep() method, deferrable mode checks for the state using asynchronous calls. It is not possible to use both asynchronous and deferrable parameters at the same time. :param poll_interval: Polling period in seconds to check for the status. Used only in deferrable mode. """
[docs]defexecute(self,context:Context)->str:hook=DataFusionHook(gcp_conn_id=self.gcp_conn_id,api_version=self.api_version,impersonation_chain=self.impersonation_chain,)self.log.info("Starting Data Fusion pipeline: %s",self.pipeline_name)instance=hook.get_instance(instance_name=self.instance_name,location=self.location,project_id=self.project_id,)api_url=instance["apiEndpoint"]pipeline_id=hook.start_pipeline(pipeline_name=self.pipeline_name,pipeline_type=self.pipeline_type,instance_url=api_url,namespace=self.namespace,runtime_args=self.runtime_args,)self.log.info("Pipeline %s submitted successfully.",pipeline_id)DataFusionPipelineLink.persist(context=context,task_instance=self,uri=instance["serviceEndpoint"],pipeline_name=self.pipeline_name,namespace=self.namespace,)ifself.deferrable:ifself.asynchronous:raiseAirflowException("Both asynchronous and deferrable parameters were passed. Please, provide only one.")self.defer(trigger=DataFusionStartPipelineTrigger(success_states=self.success_states,instance_url=api_url,namespace=self.namespace,pipeline_name=self.pipeline_name,pipeline_type=self.pipeline_type.value,pipeline_id=pipeline_id,poll_interval=self.poll_interval,gcp_conn_id=self.gcp_conn_id,impersonation_chain=self.impersonation_chain,),method_name="execute_complete",)else:ifnotself.asynchronous:# when NOT using asynchronous mode it will just wait for pipeline to finish and print messageself.log.info("Waiting when pipeline %s will be in one of the success states",pipeline_id)hook.wait_for_pipeline_state(success_states=self.success_states,pipeline_id=pipeline_id,pipeline_name=self.pipeline_name,pipeline_type=self.pipeline_type,namespace=self.namespace,instance_url=api_url,timeout=self.pipeline_timeout,)self.log.info("Pipeline %s discovered success state.",pipeline_id)# otherwise, return pipeline_id so that sensor can use it later to check the pipeline statereturnpipeline_id
[docs]defexecute_complete(self,context:Context,event:dict[str,Any]):""" Act as a callback for when the trigger fires - returns immediately. Relies on trigger to throw an exception, otherwise it assumes execution was successful. """ifevent["status"]=="error":raiseAirflowException(event["message"])self.log.info("%s completed with response %s ",self.task_id,event["message"],)returnevent["pipeline_id"]
[docs]classCloudDataFusionStopPipelineOperator(GoogleCloudBaseOperator):""" Stops a Cloud Data Fusion pipeline. Works for both batch and stream pipelines. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:CloudDataFusionStopPipelineOperator` :param pipeline_name: Your pipeline name. :param instance_name: The name of the instance. :param location: The Cloud Data Fusion location in which to handle the request. :param namespace: If your pipeline belongs to a Basic edition instance, the namespace ID is always default. If your pipeline belongs to an Enterprise edition instance, you can create a namespace. :param api_version: The version of the api that will be requested for example 'v3'. :param gcp_conn_id: The connection ID to use when fetching connection info. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """
[docs]defexecute(self,context:Context)->None:hook=DataFusionHook(gcp_conn_id=self.gcp_conn_id,api_version=self.api_version,impersonation_chain=self.impersonation_chain,)self.log.info("Data Fusion pipeline: %s is going to be stopped",self.pipeline_name)instance=hook.get_instance(instance_name=self.instance_name,location=self.location,project_id=self.project_id,)api_url=instance["apiEndpoint"]DataFusionPipelineLink.persist(context=context,task_instance=self,uri=instance["serviceEndpoint"],pipeline_name=self.pipeline_name,namespace=self.namespace,)hook.stop_pipeline(pipeline_name=self.pipeline_name,instance_url=api_url,namespace=self.namespace,)self.log.info("Pipeline stopped")