tests.system.google.cloud.dataproc.example_dataproc_hadoop

Example Airflow DAG for DataprocSubmitJobOperator with hadoop job.

Attributes

ENV_ID

DAG_ID

PROJECT_ID

BUCKET_NAME

CLUSTER_NAME_BASE

CLUSTER_NAME_FULL

CLUSTER_NAME

REGION

OUTPUT_FOLDER

OUTPUT_PATH

CLUSTER_CONFIG

HADOOP_JOB

create_bucket

test_run

Module Contents

tests.system.google.cloud.dataproc.example_dataproc_hadoop.ENV_ID[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.DAG_ID = 'dataproc_hadoop'[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.PROJECT_ID[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.BUCKET_NAME = 'bucket_dataproc_hadoop_Uninferable'[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.CLUSTER_NAME_BASE = ''[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.CLUSTER_NAME_FULL = ''[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.CLUSTER_NAME = ''[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.REGION = 'europe-west1'[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.OUTPUT_FOLDER = 'wordcount'[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.OUTPUT_PATH = 'gs://bucket_dataproc_hadoop_Uninferable/wordcount/'[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.CLUSTER_CONFIG[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.HADOOP_JOB[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.create_bucket[source]
tests.system.google.cloud.dataproc.example_dataproc_hadoop.test_run[source]

Was this entry helpful?