Example Airflow DAG for DataprocSubmitJobOperator with hadoop job.
Module Contents
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.ENV_ID[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.DAG_ID = 'dataproc_hadoop'[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.PROJECT_ID[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.BUCKET_NAME[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.CLUSTER_NAME_BASE[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.CLUSTER_NAME_FULL[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.CLUSTER_NAME[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.REGION = 'europe-west1'[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.OUTPUT_FOLDER = 'wordcount'[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.OUTPUT_PATH[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.CLUSTER_CONFIG[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.HADOOP_JOB[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.create_bucket[source]
-
tests.system.google.cloud.dataproc.example_dataproc_hadoop.test_run[source]