Example Airflow DAG for DataprocSubmitJobOperator with pyspark job.
Module Contents
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.ENV_ID[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.DAG_ID = 'dataproc_pyspark'[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.PROJECT_ID[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.CLUSTER_NAME_BASE[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.CLUSTER_NAME_FULL[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.CLUSTER_NAME[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.BUCKET_NAME[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.RESOURCE_DATA_BUCKET = 'airflow-system-tests-resources'[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.JOB_FILE = 'dataproc-pyspark-job-pi.py'[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.GCS_JOB_FILE[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.REGION = 'europe-west1'[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.CLUSTER_CONFIG[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.PYSPARK_JOB[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.create_bucket[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.test_run[source]