airflow.contrib.hooks.gcp_dataflow_hook

Module Contents

airflow.contrib.hooks.gcp_dataflow_hook.DEFAULT_DATAFLOW_LOCATION = us-central1[source]
class airflow.contrib.hooks.gcp_dataflow_hook._DataflowJob(dataflow, project_number, name, location, poll_sleep=10, job_id=None, num_retries=None)[source]

Bases: airflow.utils.log.logging_mixin.LoggingMixin

_get_job_id_from_name(self)[source]
_get_job(self)[source]
wait_for_done(self)[source]
get(self)[source]
class airflow.contrib.hooks.gcp_dataflow_hook._Dataflow(cmd)[source]

Bases: airflow.utils.log.logging_mixin.LoggingMixin

_line(self, fd)[source]
static _extract_job(line)[source]
wait_for_done(self)[source]
class airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook(gcp_conn_id='google_cloud_default', delegate_to=None, poll_sleep=10)[source]

Bases: airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook

get_conn(self)[source]

Returns a Google Cloud Dataflow service object.

_start_dataflow(self, variables, name, command_prefix, label_formatter)[source]
static _set_variables(variables)[source]
start_java_dataflow(self, job_name, variables, dataflow, job_class=None, append_job_name=True)[source]
start_template_dataflow(self, job_name, variables, parameters, dataflow_template, append_job_name=True)[source]
start_python_dataflow(self, job_name, variables, dataflow, py_options, append_job_name=True)[source]
static _build_dataflow_job_name(job_name, append_job_name=True)[source]
static _build_cmd(variables, label_formatter)[source]
_start_template_dataflow(self, name, variables, parameters, dataflow_template)[source]

Was this entry helpful?