Skip to content

Commit b0f7f91

Browse files
Standardise dataproc location param to region (#16034)
* Standardise dataproc location param to region Standardises DataProc hook & operators `location` parameter to `region` in line with underlying google DataProc Python client library. * Adding back `location` parameter for backward compability * Fix test * Update airflow/providers/google/CHANGELOG.rst Co-authored-by: Jarek Potiuk <[email protected]>
1 parent 5a5f30f commit b0f7f91

File tree

8 files changed

+849
-159
lines changed

8 files changed

+849
-159
lines changed

airflow/providers/google/CHANGELOG.rst

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
specific language governing permissions and limitations
1616
under the License.
1717
18-
1918
Changelog
2019
---------
2120

airflow/providers/google/cloud/example_dags/example_dataproc.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@
170170
update_mask=UPDATE_MASK,
171171
graceful_decommission_timeout=TIMEOUT,
172172
project_id=PROJECT_ID,
173-
location=REGION,
173+
region=REGION,
174174
)
175175
# [END how_to_cloud_dataproc_update_cluster_operator]
176176

@@ -179,7 +179,7 @@
179179
task_id="create_workflow_template",
180180
template=WORKFLOW_TEMPLATE,
181181
project_id=PROJECT_ID,
182-
location=REGION,
182+
region=REGION,
183183
)
184184
# [END how_to_cloud_dataproc_create_workflow_template]
185185

@@ -190,24 +190,24 @@
190190
# [END how_to_cloud_dataproc_trigger_workflow_template]
191191

192192
pig_task = DataprocSubmitJobOperator(
193-
task_id="pig_task", job=PIG_JOB, location=REGION, project_id=PROJECT_ID
193+
task_id="pig_task", job=PIG_JOB, region=REGION, project_id=PROJECT_ID
194194
)
195195
spark_sql_task = DataprocSubmitJobOperator(
196-
task_id="spark_sql_task", job=SPARK_SQL_JOB, location=REGION, project_id=PROJECT_ID
196+
task_id="spark_sql_task", job=SPARK_SQL_JOB, region=REGION, project_id=PROJECT_ID
197197
)
198198

199199
spark_task = DataprocSubmitJobOperator(
200-
task_id="spark_task", job=SPARK_JOB, location=REGION, project_id=PROJECT_ID
200+
task_id="spark_task", job=SPARK_JOB, region=REGION, project_id=PROJECT_ID
201201
)
202202

203203
# [START cloud_dataproc_async_submit_sensor]
204204
spark_task_async = DataprocSubmitJobOperator(
205-
task_id="spark_task_async", job=SPARK_JOB, location=REGION, project_id=PROJECT_ID, asynchronous=True
205+
task_id="spark_task_async", job=SPARK_JOB, region=REGION, project_id=PROJECT_ID, asynchronous=True
206206
)
207207

208208
spark_task_async_sensor = DataprocJobSensor(
209209
task_id='spark_task_async_sensor_task',
210-
location=REGION,
210+
region=REGION,
211211
project_id=PROJECT_ID,
212212
dataproc_job_id="{{task_instance.xcom_pull(task_ids='spark_task_async')}}",
213213
poke_interval=10,
@@ -216,20 +216,20 @@
216216

217217
# [START how_to_cloud_dataproc_submit_job_to_cluster_operator]
218218
pyspark_task = DataprocSubmitJobOperator(
219-
task_id="pyspark_task", job=PYSPARK_JOB, location=REGION, project_id=PROJECT_ID
219+
task_id="pyspark_task", job=PYSPARK_JOB, region=REGION, project_id=PROJECT_ID
220220
)
221221
# [END how_to_cloud_dataproc_submit_job_to_cluster_operator]
222222

223223
sparkr_task = DataprocSubmitJobOperator(
224-
task_id="sparkr_task", job=SPARKR_JOB, location=REGION, project_id=PROJECT_ID
224+
task_id="sparkr_task", job=SPARKR_JOB, region=REGION, project_id=PROJECT_ID
225225
)
226226

227227
hive_task = DataprocSubmitJobOperator(
228-
task_id="hive_task", job=HIVE_JOB, location=REGION, project_id=PROJECT_ID
228+
task_id="hive_task", job=HIVE_JOB, region=REGION, project_id=PROJECT_ID
229229
)
230230

231231
hadoop_task = DataprocSubmitJobOperator(
232-
task_id="hadoop_task", job=HADOOP_JOB, location=REGION, project_id=PROJECT_ID
232+
task_id="hadoop_task", job=HADOOP_JOB, region=REGION, project_id=PROJECT_ID
233233
)
234234

235235
# [START how_to_cloud_dataproc_delete_cluster_operator]

0 commit comments

Comments
 (0)