Example Airflow DAG for DataprocSubmitJobOperator with pyspark job.
Module Contents
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.ENV_ID[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.DAG_ID = 'dataproc_pyspark'[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.PROJECT_ID[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.JOB_FILE_URI = 'gs://airflow-system-tests-resources/dataproc/pyspark/dataproc-pyspark-job-pi.py'[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.CLUSTER_NAME[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.REGION = 'europe-west1'[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.CLUSTER_CONFIG[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.PYSPARK_JOB[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.create_cluster[source]
-
tests.system.providers.google.cloud.dataproc.example_dataproc_pyspark.test_run[source]