File tree Expand file tree Collapse file tree 4 files changed +18
-11
lines changed
tests/providers/amazon/aws/transfers Expand file tree Collapse file tree 4 files changed +18
-11
lines changed Original file line number Diff line number Diff line change @@ -94,10 +94,12 @@ def __init__(
94
94
self .gzip = gzip
95
95
self .acl_policy = acl_policy
96
96
97
+ def _check_inputs (self ):
97
98
if 's3://' in self .dest_key and self .dest_bucket is not None :
98
99
raise TypeError ('dest_bucket should be None when dest_key is provided as a full s3:// file path.' )
99
100
100
101
def execute (self , context ):
102
+ self ._check_inputs ()
101
103
s3_hook = S3Hook (aws_conn_id = self .aws_conn_id , verify = self .verify )
102
104
s3_hook .load_file (
103
105
self .filename ,
Original file line number Diff line number Diff line change @@ -103,7 +103,8 @@ def __init__(
103
103
self .gzip = gzip
104
104
self .google_impersonation_chain = google_impersonation_chain
105
105
106
- if dest_gcs and not gcs_object_is_directory (self .dest_gcs ):
106
+ def _check_inputs (self ) -> None :
107
+ if self .dest_gcs and not gcs_object_is_directory (self .dest_gcs ):
107
108
self .log .info (
108
109
'Destination Google Cloud Storage path is not a valid '
109
110
'"directory", define a path that ends with a slash "/" or '
@@ -114,6 +115,7 @@ def __init__(
114
115
)
115
116
116
117
def execute (self , context ):
118
+ self ._check_inputs ()
117
119
azure_fileshare_hook = AzureFileShareHook (self .azure_fileshare_conn_id )
118
120
files = azure_fileshare_hook .list_files (
119
121
share_name = self .share_name , directory_name = self .directory_name
Original file line number Diff line number Diff line change @@ -147,7 +147,8 @@ def __init__(
147
147
self .gzip = gzip
148
148
self .google_impersonation_chain = google_impersonation_chain
149
149
150
- if dest_gcs and not gcs_object_is_directory (self .dest_gcs ):
150
+ def _check_inputs (self ) -> None :
151
+ if self .dest_gcs and not gcs_object_is_directory (self .dest_gcs ):
151
152
self .log .info (
152
153
'Destination Google Cloud Storage path is not a valid '
153
154
'"directory", define a path that ends with a slash "/" or '
@@ -158,6 +159,7 @@ def __init__(
158
159
)
159
160
160
161
def execute (self , context ):
162
+ self ._check_inputs ()
161
163
# use the super method to list all the files in an S3 bucket/key
162
164
files = super ().execute (context )
163
165
Original file line number Diff line number Diff line change @@ -61,16 +61,17 @@ def test_init(self):
61
61
assert operator .encrypt == self ._config ['encrypt' ]
62
62
assert operator .gzip == self ._config ['gzip' ]
63
63
64
- def test_init_exception (self ):
64
+ def test_execute_exception (self ):
65
+ operator = LocalFilesystemToS3Operator (
66
+ task_id = 'file_to_s3_operatro_exception' ,
67
+ dag = self .dag ,
68
+ filename = self .testfile1 ,
69
+ dest_key = f's3://dummy/{ self .dest_key } ' ,
70
+ dest_bucket = self .dest_bucket ,
71
+ ** self ._config ,
72
+ )
65
73
with self .assertRaises (TypeError ):
66
- LocalFilesystemToS3Operator (
67
- task_id = 'file_to_s3_operatro_exception' ,
68
- dag = self .dag ,
69
- filename = self .testfile1 ,
70
- dest_key = f's3://dummy/{ self .dest_key } ' ,
71
- dest_bucket = self .dest_bucket ,
72
- ** self ._config ,
73
- )
74
+ operator .execute (None )
74
75
75
76
@mock_s3
76
77
def test_execute (self ):
You can’t perform that action at this time.
0 commit comments