Skip to content

Commit cb90ca7

Browse files
authored
Migrate Google sheets example DAG to new design AIP-47 (#24351)
related: #22430, #22447
1 parent 19dd9f5 commit cb90ca7

File tree

3 files changed

+39
-45
lines changed

3 files changed

+39
-45
lines changed

docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ Upload data from Google Sheets to GCS
3838
To upload data from Google Spreadsheet to Google Cloud Storage you can use the
3939
:class:`~airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator`.
4040

41-
.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py
41+
.. exampleinclude:: /../../tests/system/providers/google/sheets/example_sheets_to_gcs.py
4242
:language: python
4343
:dedent: 4
4444
:start-after: [START upload_sheet_to_gcs]

tests/providers/google/cloud/transfers/test_sheets_to_gcs_system.py

Lines changed: 0 additions & 40 deletions
This file was deleted.

airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py renamed to tests/system/providers/google/sheets/example_sheets_to_gcs.py

Lines changed: 38 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,22 +20,56 @@
2020
from datetime import datetime
2121

2222
from airflow import models
23+
from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
2324
from airflow.providers.google.cloud.transfers.sheets_to_gcs import GoogleSheetsToGCSOperator
25+
from airflow.utils.trigger_rule import TriggerRule
2426

25-
BUCKET = os.environ.get("GCP_GCS_BUCKET", "test28397yeo")
27+
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
28+
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
29+
DAG_ID = "example_sheets_to_gcs"
30+
31+
BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
2632
SPREADSHEET_ID = os.environ.get("SPREADSHEET_ID", "1234567890qwerty")
2733

2834
with models.DAG(
29-
"example_sheets_to_gcs",
35+
DAG_ID,
3036
schedule_interval='@once', # Override to match your needs
3137
start_date=datetime(2021, 1, 1),
3238
catchup=False,
33-
tags=["example"],
39+
tags=["example", "sheets"],
3440
) as dag:
41+
create_bucket = GCSCreateBucketOperator(
42+
task_id="create_bucket", bucket_name=BUCKET_NAME, project_id=PROJECT_ID
43+
)
44+
3545
# [START upload_sheet_to_gcs]
3646
upload_sheet_to_gcs = GoogleSheetsToGCSOperator(
3747
task_id="upload_sheet_to_gcs",
38-
destination_bucket=BUCKET,
48+
destination_bucket=BUCKET_NAME,
3949
spreadsheet_id=SPREADSHEET_ID,
4050
)
4151
# [END upload_sheet_to_gcs]
52+
53+
delete_bucket = GCSDeleteBucketOperator(
54+
task_id="delete_bucket", bucket_name=BUCKET_NAME, trigger_rule=TriggerRule.ALL_DONE
55+
)
56+
57+
(
58+
# TEST SETUP
59+
create_bucket
60+
# TEST BODY
61+
>> upload_sheet_to_gcs
62+
# TEST TEARDOWN
63+
>> delete_bucket
64+
)
65+
66+
from tests.system.utils.watcher import watcher
67+
68+
# This test needs watcher in order to properly mark success/failure
69+
# when "tearDown" task with trigger rule is part of the DAG
70+
list(dag.tasks) >> watcher()
71+
72+
from tests.system.utils import get_test_run # noqa: E402
73+
74+
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
75+
test_run = get_test_run(dag)

0 commit comments

Comments
 (0)