Source code for airflow.providers.google.cloud.hooks.dataproc
## Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License.#"""This module contains a Google Cloud Dataproc hook."""importtimeimportuuidimportwarningsfromtypingimportAny,Dict,Iterable,List,Optional,Sequence,Tuple,Unionfromgoogle.api_core.exceptionsimportServerErrorfromgoogle.api_core.operationimportOperationfromgoogle.api_core.retryimportRetryfromgoogle.cloud.dataproc_v1import(Batch,BatchControllerClient,Cluster,ClusterControllerClient,Job,JobControllerClient,JobStatus,WorkflowTemplate,WorkflowTemplateServiceClient,)fromgoogle.protobuf.duration_pb2importDurationfromgoogle.protobuf.field_mask_pb2importFieldMaskfromairflow.exceptionsimportAirflowExceptionfromairflow.providers.google.common.constsimportCLIENT_INFOfromairflow.providers.google.common.hooks.base_googleimportGoogleBaseHookfromairflow.versionimportversionasairflow_version
[docs]classDataProcJobBuilder:"""A helper class for building Dataproc job."""def__init__(self,project_id:str,task_id:str,cluster_name:str,job_type:str,properties:Optional[Dict[str,str]]=None,)->None:name=task_id+"_"+str(uuid.uuid4())[:8]self.job_type=job_typeself.job={"job":{"reference":{"project_id":project_id,"job_id":name},"placement":{"cluster_name":cluster_name},"labels":{'airflow-version':'v'+airflow_version.replace('.','-').replace('+','-')},job_type:{},}}# type: Dict[str, Any]ifpropertiesisnotNone:self.job["job"][job_type]["properties"]=properties
[docs]defadd_labels(self,labels:Optional[dict]=None)->None:""" Set labels for Dataproc job. :param labels: Labels for the job query. """iflabels:self.job["job"]["labels"].update(labels)
[docs]defadd_variables(self,variables:Optional[Dict]=None)->None:""" Set variables for Dataproc job. :param variables: Variables for the job query. """ifvariablesisnotNone:self.job["job"][self.job_type]["script_variables"]=variables
[docs]defadd_args(self,args:Optional[List[str]]=None)->None:""" Set args for Dataproc job. :param args: Args for the job query. """ifargsisnotNone:self.job["job"][self.job_type]["args"]=args
[docs]defadd_query(self,query:str)->None:""" Set query for Dataproc job. :param query: query for the job. """self.job["job"][self.job_type]["query_list"]={'queries':[query]}
[docs]defadd_query_uri(self,query_uri:str)->None:""" Set query uri for Dataproc job. :param query_uri: URI for the job query. """self.job["job"][self.job_type]["query_file_uri"]=query_uri
[docs]defadd_jar_file_uris(self,jars:Optional[List[str]]=None)->None:""" Set jars uris for Dataproc job. :param jars: List of jars URIs """ifjarsisnotNone:self.job["job"][self.job_type]["jar_file_uris"]=jars
[docs]defadd_archive_uris(self,archives:Optional[List[str]]=None)->None:""" Set archives uris for Dataproc job. :param archives: List of archives URIs """ifarchivesisnotNone:self.job["job"][self.job_type]["archive_uris"]=archives
[docs]defadd_file_uris(self,files:Optional[List[str]]=None)->None:""" Set file uris for Dataproc job. :param files: List of files URIs """iffilesisnotNone:self.job["job"][self.job_type]["file_uris"]=files
[docs]defadd_python_file_uris(self,pyfiles:Optional[List[str]]=None)->None:""" Set python file uris for Dataproc job. :param pyfiles: List of python files URIs """ifpyfilesisnotNone:self.job["job"][self.job_type]["python_file_uris"]=pyfiles
[docs]defset_main(self,main_jar:Optional[str]=None,main_class:Optional[str]=None)->None:""" Set Dataproc main class. :param main_jar: URI for the main file. :param main_class: Name of the main class. :raises: Exception """ifmain_classisnotNoneandmain_jarisnotNone:raiseException("Set either main_jar or main_class")ifmain_jar:self.job["job"][self.job_type]["main_jar_file_uri"]=main_jarelse:self.job["job"][self.job_type]["main_class"]=main_class
[docs]defset_python_main(self,main:str)->None:""" Set Dataproc main python file uri. :param main: URI for the python main file. """self.job["job"][self.job_type]["main_python_file_uri"]=main
[docs]defset_job_name(self,name:str)->None:""" Set Dataproc job name. :param name: Job name. """self.job["job"]["reference"]["job_id"]=name+"_"+str(uuid.uuid4())[:8]
[docs]classDataprocHook(GoogleBaseHook):""" Hook for Google Cloud Dataproc APIs. All the methods in the hook where project_id is used must be called with keyword arguments rather than positional. """
[docs]defget_cluster_client(self,region:Optional[str]=None,location:Optional[str]=None)->ClusterControllerClient:"""Returns ClusterControllerClient."""iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationclient_options=Noneifregionandregion!='global':client_options={'api_endpoint':f'{region}-dataproc.googleapis.com:443'}returnClusterControllerClient(credentials=self._get_credentials(),client_info=CLIENT_INFO,client_options=client_options
)
[docs]defget_template_client(self,region:Optional[str]=None,location:Optional[str]=None)->WorkflowTemplateServiceClient:"""Returns WorkflowTemplateServiceClient."""iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationclient_options=Noneifregionandregion!='global':client_options={'api_endpoint':f'{region}-dataproc.googleapis.com:443'}returnWorkflowTemplateServiceClient(credentials=self._get_credentials(),client_info=CLIENT_INFO,client_options=client_options
)
[docs]defget_job_client(self,region:Optional[str]=None,location:Optional[str]=None)->JobControllerClient:"""Returns JobControllerClient."""iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationclient_options=Noneifregionandregion!='global':client_options={'api_endpoint':f'{region}-dataproc.googleapis.com:443'}returnJobControllerClient(credentials=self._get_credentials(),client_info=CLIENT_INFO,client_options=client_options
)
[docs]defget_batch_client(self,region:Optional[str]=None,location:Optional[str]=None)->BatchControllerClient:"""Returns BatchControllerClient"""iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationclient_options=Noneifregionandregion!='global':client_options={'api_endpoint':f'{region}-dataproc.googleapis.com:443'}returnBatchControllerClient(credentials=self._get_credentials(),client_info=CLIENT_INFO,client_options=client_options
)
[docs]defwait_for_operation(self,operation:Operation,timeout:Optional[float]=None):"""Waits for long-lasting operation to complete."""try:returnoperation.result(timeout=timeout)exceptException:error=operation.exception(timeout=timeout)raiseAirflowException(error)
@GoogleBaseHook.fallback_to_default_project_id
[docs]defcreate_cluster(self,region:str,project_id:str,cluster_name:str,cluster_config:Union[Dict,Cluster],labels:Optional[Dict[str,str]]=None,request_id:Optional[str]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),):""" Creates a cluster in a project. :param project_id: Required. The ID of the Google Cloud project that the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param cluster_name: Name of the cluster to create :param labels: Labels that will be assigned to created cluster :param cluster_config: Required. The cluster config to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1.types.ClusterConfig` :param request_id: Optional. A unique id used to identify the request. If the server receives two ``CreateClusterRequest`` requests with the same id, then the second request will be ignored and the first ``google.longrunning.Operation`` created and stored in the backend is returned. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """# Dataproc labels must conform to the following regex:# [a-z]([-a-z0-9]*[a-z0-9])? (current airflow version string follows# semantic versioning spec: x.y.z).labels=labelsor{}labels.update({'airflow-version':'v'+airflow_version.replace('.','-').replace('+','-')})cluster={"project_id":project_id,"cluster_name":cluster_name,"config":cluster_config,"labels":labels,}client=self.get_cluster_client(region=region)result=client.create_cluster(request={'project_id':project_id,'region':region,'cluster':cluster,'request_id':request_id,},retry=retry,timeout=timeout,metadata=metadata,)returnresult
@GoogleBaseHook.fallback_to_default_project_id
[docs]defdelete_cluster(self,region:str,cluster_name:str,project_id:str,cluster_uuid:Optional[str]=None,request_id:Optional[str]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),):""" Deletes a cluster in a project. :param project_id: Required. The ID of the Google Cloud project that the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param cluster_name: Required. The cluster name. :param cluster_uuid: Optional. Specifying the ``cluster_uuid`` means the RPC should fail if cluster with specified UUID does not exist. :param request_id: Optional. A unique id used to identify the request. If the server receives two ``DeleteClusterRequest`` requests with the same id, then the second request will be ignored and the first ``google.longrunning.Operation`` created and stored in the backend is returned. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """client=self.get_cluster_client(region=region)result=client.delete_cluster(request={'project_id':project_id,'region':region,'cluster_name':cluster_name,'cluster_uuid':cluster_uuid,'request_id':request_id,},retry=retry,timeout=timeout,metadata=metadata,)returnresult
@GoogleBaseHook.fallback_to_default_project_id
[docs]defdiagnose_cluster(self,region:str,cluster_name:str,project_id:str,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),):""" Gets cluster diagnostic information. After the operation completes GCS uri to diagnose is returned :param project_id: Required. The ID of the Google Cloud project that the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param cluster_name: Required. The cluster name. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """client=self.get_cluster_client(region=region)operation=client.diagnose_cluster(request={'project_id':project_id,'region':region,'cluster_name':cluster_name},retry=retry,timeout=timeout,metadata=metadata,)operation.result()gcs_uri=str(operation.operation.response.value)returngcs_uri
@GoogleBaseHook.fallback_to_default_project_id
[docs]defget_cluster(self,region:str,cluster_name:str,project_id:str,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),):""" Gets the resource representation for a cluster in a project. :param project_id: Required. The ID of the Google Cloud project that the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param cluster_name: Required. The cluster name. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """client=self.get_cluster_client(region=region)result=client.get_cluster(request={'project_id':project_id,'region':region,'cluster_name':cluster_name},retry=retry,timeout=timeout,metadata=metadata,)returnresult
@GoogleBaseHook.fallback_to_default_project_id
[docs]deflist_clusters(self,region:str,filter_:str,project_id:str,page_size:Optional[int]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),):""" Lists all regions/{region}/clusters in a project. :param project_id: Required. The ID of the Google Cloud project that the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param filter_: Optional. A filter constraining the clusters to list. Filters are case-sensitive. :param page_size: The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """client=self.get_cluster_client(region=region)result=client.list_clusters(request={'project_id':project_id,'region':region,'filter':filter_,'page_size':page_size},retry=retry,timeout=timeout,metadata=metadata,)returnresult
@GoogleBaseHook.fallback_to_default_project_id
[docs]defupdate_cluster(self,cluster_name:str,cluster:Union[Dict,Cluster],update_mask:Union[Dict,FieldMask],project_id:str,region:Optional[str]=None,location:Optional[str]=None,graceful_decommission_timeout:Optional[Union[Dict,Duration]]=None,request_id:Optional[str]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),):""" Updates a cluster in a project. :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param location: (To be deprecated). The Cloud Dataproc region in which to handle the request. :param cluster_name: Required. The cluster name. :param cluster: Required. The changes to the cluster. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1.types.Cluster` :param update_mask: Required. Specifies the path, relative to ``Cluster``, of the field to update. For example, to change the number of workers in a cluster to 5, the ``update_mask`` parameter would be specified as ``config.worker_config.num_instances``, and the ``PATCH`` request body would specify the new value, as follows: :: { "config":{ "workerConfig":{ "numInstances":"5" } } } Similarly, to change the number of preemptible workers in a cluster to 5, the ``update_mask`` parameter would be ``config.secondary_worker_config.num_instances``, and the ``PATCH`` request body would be set as follows: :: { "config":{ "secondaryWorkerConfig":{ "numInstances":"5" } } } If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1.types.FieldMask` :param graceful_decommission_timeout: Optional. Timeout for graceful YARN decommissioning. Graceful decommissioning allows removing nodes from the cluster without interrupting jobs in progress. Timeout specifies how long to wait for jobs in progress to finish before forcefully removing nodes (and potentially interrupting jobs). Default timeout is 0 (for forceful decommission), and the maximum allowed timeout is 1 day. Only supported on Dataproc image versions 1.2 and higher. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1.types.Duration` :param request_id: Optional. A unique id used to identify the request. If the server receives two ``UpdateClusterRequest`` requests with the same id, then the second request will be ignored and the first ``google.longrunning.Operation`` created and stored in the backend is returned. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """ifregionisNone:iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationelse:raiseTypeError("missing 1 required keyword argument: 'region'")client=self.get_cluster_client(region=region)operation=client.update_cluster(request={'project_id':project_id,'region':region,'cluster_name':cluster_name,'cluster':cluster,'update_mask':update_mask,'graceful_decommission_timeout':graceful_decommission_timeout,'request_id':request_id,},retry=retry,timeout=timeout,metadata=metadata,)returnoperation
@GoogleBaseHook.fallback_to_default_project_id
[docs]defcreate_workflow_template(self,template:Union[Dict,WorkflowTemplate],project_id:str,region:Optional[str]=None,location:Optional[str]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),)->WorkflowTemplate:""" Creates new workflow template. :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param location: (To be deprecated). The Cloud Dataproc region in which to handle the request. :param template: The Dataproc workflow template to create. If a dict is provided, it must be of the same form as the protobuf message WorkflowTemplate. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """ifregionisNone:iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationelse:raiseTypeError("missing 1 required keyword argument: 'region'")metadata=metadataor()client=self.get_template_client(region)parent=f'projects/{project_id}/regions/{region}'returnclient.create_workflow_template(request={'parent':parent,'template':template},retry=retry,timeout=timeout,metadata=metadata
)@GoogleBaseHook.fallback_to_default_project_id
[docs]definstantiate_workflow_template(self,template_name:str,project_id:str,region:Optional[str]=None,location:Optional[str]=None,version:Optional[int]=None,request_id:Optional[str]=None,parameters:Optional[Dict[str,str]]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),):""" Instantiates a template and begins execution. :param template_name: Name of template to instantiate. :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param location: (To be deprecated). The Cloud Dataproc region in which to handle the request. :param version: Optional. The version of workflow template to instantiate. If specified, the workflow will be instantiated only if the current version of the workflow template has the supplied version. This option cannot be used to instantiate a previous version of workflow template. :param request_id: Optional. A tag that prevents multiple concurrent workflow instances with the same tag from running. This mitigates risk of concurrent instances started due to retries. :param parameters: Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 100 characters. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """ifregionisNone:iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationelse:raiseTypeError("missing 1 required keyword argument: 'region'")metadata=metadataor()client=self.get_template_client(region)name=f'projects/{project_id}/regions/{region}/workflowTemplates/{template_name}'operation=client.instantiate_workflow_template(request={'name':name,'version':version,'request_id':request_id,'parameters':parameters},retry=retry,timeout=timeout,metadata=metadata,)returnoperation
@GoogleBaseHook.fallback_to_default_project_id
[docs]definstantiate_inline_workflow_template(self,template:Union[Dict,WorkflowTemplate],project_id:str,region:Optional[str]=None,location:Optional[str]=None,request_id:Optional[str]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),):""" Instantiates a template and begins execution. :param template: The workflow template to instantiate. If a dict is provided, it must be of the same form as the protobuf message WorkflowTemplate :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param location: (To be deprecated). The Cloud Dataproc region in which to handle the request. :param request_id: Optional. A tag that prevents multiple concurrent workflow instances with the same tag from running. This mitigates risk of concurrent instances started due to retries. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """ifregionisNone:iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationelse:raiseTypeError("missing 1 required keyword argument: 'region'")metadata=metadataor()client=self.get_template_client(region)parent=f'projects/{project_id}/regions/{region}'operation=client.instantiate_inline_workflow_template(request={'parent':parent,'template':template,'request_id':request_id},retry=retry,timeout=timeout,metadata=metadata,)returnoperation
@GoogleBaseHook.fallback_to_default_project_id
[docs]defwait_for_job(self,job_id:str,project_id:str,wait_time:int=10,region:Optional[str]=None,location:Optional[str]=None,timeout:Optional[int]=None,)->None:""" Helper method which polls a job to check if it finishes. :param job_id: Id of the Dataproc job :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param location: (To be deprecated). The Cloud Dataproc region in which to handle the request. :param wait_time: Number of seconds between checks :param timeout: How many seconds wait for job to be ready. Used only if ``asynchronous`` is False """ifregionisNone:iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationelse:raiseTypeError("missing 1 required keyword argument: 'region'")state=Nonestart=time.monotonic()whilestatenotin(JobStatus.State.ERROR,JobStatus.State.DONE,JobStatus.State.CANCELLED):iftimeoutandstart+timeout<time.monotonic():raiseAirflowException(f"Timeout: dataproc job {job_id} is not ready after {timeout}s")time.sleep(wait_time)try:job=self.get_job(project_id=project_id,region=region,job_id=job_id)state=job.status.stateexceptServerErroraserr:self.log.info("Retrying. Dataproc API returned server error when waiting for job: %s",err)ifstate==JobStatus.State.ERROR:raiseAirflowException(f'Job failed:\n{job}')ifstate==JobStatus.State.CANCELLED:raiseAirflowException(f'Job was cancelled:\n{job}')
@GoogleBaseHook.fallback_to_default_project_id
[docs]defget_job(self,job_id:str,project_id:str,region:Optional[str]=None,location:Optional[str]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),)->Job:""" Gets the resource representation for a job in a project. :param job_id: Id of the Dataproc job :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param location: (To be deprecated). The Cloud Dataproc region in which to handle the request. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """ifregionisNone:iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationelse:raiseTypeError("missing 1 required keyword argument: 'region'")client=self.get_job_client(region=region)job=client.get_job(request={'project_id':project_id,'region':region,'job_id':job_id},retry=retry,timeout=timeout,metadata=metadata,)returnjob
@GoogleBaseHook.fallback_to_default_project_id
[docs]defsubmit_job(self,job:Union[dict,Job],project_id:str,region:Optional[str]=None,location:Optional[str]=None,request_id:Optional[str]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),)->Job:""" Submits a job to a cluster. :param job: The job resource. If a dict is provided, it must be of the same form as the protobuf message Job :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param location: (To be deprecated). The Cloud Dataproc region in which to handle the request. :param request_id: Optional. A tag that prevents multiple concurrent workflow instances with the same tag from running. This mitigates risk of concurrent instances started due to retries. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """ifregionisNone:iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationelse:raiseTypeError("missing 1 required keyword argument: 'region'")client=self.get_job_client(region=region)returnclient.submit_job(request={'project_id':project_id,'region':region,'job':job,'request_id':request_id},retry=retry,timeout=timeout,metadata=metadata,
)
[docs]defsubmit(self,project_id:str,job:dict,region:str='global',job_error_states:Optional[Iterable[str]]=None,)->None:""" Submits Google Cloud Dataproc job. :param project_id: The id of Google Cloud Dataproc project. :param job: The job to be submitted :param region: The region of Google Dataproc cluster. :param job_error_states: Job states that should be considered error states. """# TODO: Remover one daywarnings.warn("This method is deprecated. Please use `submit_job`",DeprecationWarning,stacklevel=2)job_object=self.submit_job(region=region,project_id=project_id,job=job)job_id=job_object.reference.job_idself.wait_for_job(job_id=job_id,region=region,project_id=project_id)
@GoogleBaseHook.fallback_to_default_project_id
[docs]defcancel_job(self,job_id:str,project_id:str,region:Optional[str]=None,location:Optional[str]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),)->Job:""" Starts a job cancellation request. :param project_id: Required. The ID of the Google Cloud project that the job belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param location: (To be deprecated). The Cloud Dataproc region in which to handle the request. :param job_id: Required. The job ID. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """ifregionisNone:iflocationisnotNone:warnings.warn("Parameter `location` will be deprecated. ""Please provide value through `region` parameter instead.",DeprecationWarning,stacklevel=2,)region=locationifregionisNone:warnings.warn("Default region value `global` will be deprecated. Please, provide region value.",DeprecationWarning,stacklevel=2,)region='global'client=self.get_job_client(region=region)job=client.cancel_job(request={'project_id':project_id,'region':region,'job_id':job_id},retry=retry,timeout=timeout,metadata=metadata,)returnjob
@GoogleBaseHook.fallback_to_default_project_id
[docs]defcreate_batch(self,region:str,project_id:str,batch:Union[Dict,Batch],batch_id:Optional[str]=None,request_id:Optional[str]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),):""" Creates a batch workload. :param project_id: Required. The ID of the Google Cloud project that the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param batch: Required. The batch to create. :param batch_id: Optional. The ID to use for the batch, which will become the final component of the batch's resource name. This value must be 4-63 characters. Valid characters are /[a-z][0-9]-/. :param request_id: Optional. A unique id used to identify the request. If the server receives two ``CreateBatchRequest`` requests with the same id, then the second request will be ignored and the first ``google.longrunning.Operation`` created and stored in the backend is returned. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """client=self.get_batch_client(region)parent=f'projects/{project_id}/regions/{region}'result=client.create_batch(request={'parent':parent,'batch':batch,'batch_id':batch_id,'request_id':request_id,},retry=retry,timeout=timeout,metadata=metadata,)returnresult
@GoogleBaseHook.fallback_to_default_project_id
[docs]defdelete_batch(self,batch_id:str,region:str,project_id:str,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),)->None:""" Deletes the batch workload resource. :param batch_id: Required. The ID to use for the batch, which will become the final component of the batch's resource name. This value must be 4-63 characters. Valid characters are /[a-z][0-9]-/. :param project_id: Required. The ID of the Google Cloud project that the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """client=self.get_batch_client(region)name=f"projects/{project_id}/regions/{region}/batches/{batch_id}"client.delete_batch(request={'name':name,},retry=retry,timeout=timeout,metadata=metadata,
)@GoogleBaseHook.fallback_to_default_project_id
[docs]defget_batch(self,batch_id:str,region:str,project_id:str,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),)->Batch:""" Gets the batch workload resource representation. :param batch_id: Required. The ID to use for the batch, which will become the final component of the batch's resource name. This value must be 4-63 characters. Valid characters are /[a-z][0-9]-/. :param project_id: Required. The ID of the Google Cloud project that the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """client=self.get_batch_client(region)name=f"projects/{project_id}/regions/{region}/batches/{batch_id}"result=client.get_batch(request={'name':name,},retry=retry,timeout=timeout,metadata=metadata,)returnresult
@GoogleBaseHook.fallback_to_default_project_id
[docs]deflist_batches(self,region:str,project_id:str,page_size:Optional[int]=None,page_token:Optional[str]=None,retry:Optional[Retry]=None,timeout:Optional[float]=None,metadata:Sequence[Tuple[str,str]]=(),):""" Lists batch workloads. :param project_id: Required. The ID of the Google Cloud project that the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param page_size: Optional. The maximum number of batches to return in each response. The service may return fewer than this value. The default page size is 20; the maximum page size is 1000. :param page_token: Optional. A page token received from a previous ``ListBatches`` call. Provide this token to retrieve the subsequent page. :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be retried. :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. :param metadata: Additional metadata that is provided to the method. """client=self.get_batch_client(region)parent=f'projects/{project_id}/regions/{region}'result=client.list_batches(request={'parent':parent,'page_size':page_size,'page_token':page_token,},retry=retry,timeout=timeout,metadata=metadata,)returnresult