Skip to content

Commit 2d10940

Browse files
authored
Bump pre-commit hook versions (#22887)
1 parent 5b1ab96 commit 2d10940

File tree

22 files changed

+65
-97
lines changed

22 files changed

+65
-97
lines changed

.pre-commit-config.yaml

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ repos:
3939
- "--maxlevel"
4040
- "2"
4141
- repo: https://github.com/Lucas-C/pre-commit-hooks
42-
rev: v1.1.10
42+
rev: v1.1.13
4343
hooks:
4444
- id: forbid-tabs
4545
name: Fail if tabs are used in the project
@@ -146,6 +146,7 @@ repos:
146146
- --fuzzy-match-generates-todo
147147
files: >
148148
\.cfg$|\.conf$|\.ini$|\.ldif$|\.properties$|\.readthedocs$|\.service$|\.tf$|Dockerfile.*$
149+
# Keep version of black in sync wit blackend-docs and pre-commit-hook-names
149150
- repo: https://github.com/psf/black
150151
rev: 22.3.0
151152
hooks:
@@ -161,7 +162,7 @@ repos:
161162
alias: black
162163
additional_dependencies: [black==22.3.0]
163164
- repo: https://github.com/pre-commit/pre-commit-hooks
164-
rev: v4.1.0
165+
rev: v4.2.0
165166
hooks:
166167
- id: check-merge-conflict
167168
name: Check that merge conflicts are not being committed
@@ -203,7 +204,7 @@ repos:
203204
pass_filenames: true
204205
# TODO: Bump to Python 3.8 when support for Python 3.7 is dropped in Airflow.
205206
- repo: https://github.com/asottile/pyupgrade
206-
rev: v2.31.0
207+
rev: v2.32.0
207208
hooks:
208209
- id: pyupgrade
209210
name: Upgrade Python code automatically
@@ -264,7 +265,7 @@ repos:
264265
^airflow/_vendor/
265266
additional_dependencies: ['flake8>=4.0.1']
266267
- repo: https://github.com/ikamensh/flynt
267-
rev: '0.69'
268+
rev: '0.76'
268269
hooks:
269270
- id: flynt
270271
name: Run flynt string format converter for Python
@@ -546,7 +547,7 @@ repos:
546547
- id: run-shellcheck
547548
name: Check Shell scripts syntax correctness
548549
language: docker_image
549-
entry: koalaman/shellcheck:v0.7.2 -x -a
550+
entry: koalaman/shellcheck:v0.8.0 -x -a
550551
files: ^breeze-legacy$|^breeze-complete$|\.sh$|^hooks/build$|^hooks/push$|\.bash$
551552
exclude: ^dev/breeze/autocomplete/.*$
552553
- id: lint-css

airflow/cli/commands/connection_command.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -270,12 +270,8 @@ def connections_add(args):
270270
or urlunparse(
271271
(
272272
new_conn.conn_type,
273-
'{login}:{password}@{host}:{port}'.format(
274-
login=new_conn.login or '',
275-
password='******' if new_conn.password else '',
276-
host=new_conn.host or '',
277-
port=new_conn.port or '',
278-
),
273+
f"{new_conn.login or ''}:{'******' if new_conn.password else ''}"
274+
f"@{new_conn.host or ''}:{new_conn.port or ''}",
279275
new_conn.schema or '',
280276
'',
281277
'',

airflow/example_dags/example_branch_python_dop_operator_3.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,7 @@ def should_run(**kwargs):
3636
:rtype: str
3737
"""
3838
print(
39-
'------------- exec dttm = {} and minute = {}'.format(
40-
kwargs['execution_date'], kwargs['execution_date'].minute
41-
)
39+
f"------------- exec dttm = {kwargs['execution_date']} and minute = {kwargs['execution_date'].minute}"
4240
)
4341
if kwargs['execution_date'].minute % 2 == 0:
4442
return "empty_task_1"

airflow/operators/subdag.py

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -115,14 +115,9 @@ def _validate_pool(self, session):
115115
pool = session.query(Pool).filter(Pool.slots == 1).filter(Pool.pool == self.pool).first()
116116
if pool and any(t.pool == self.pool for t in self.subdag.tasks):
117117
raise AirflowException(
118-
'SubDagOperator {sd} and subdag task{plural} {t} both '
119-
'use pool {p}, but the pool only has 1 slot. The '
120-
'subdag tasks will never run.'.format(
121-
sd=self.task_id,
122-
plural=len(conflicts) > 1,
123-
t=', '.join(t.task_id for t in conflicts),
124-
p=self.pool,
125-
)
118+
f"SubDagOperator {self.task_id} and subdag task{'s' if len(conflicts) > 1 else ''} "
119+
f"{', '.join(t.task_id for t in conflicts)} both use pool {self.pool}, "
120+
f"but the pool only has 1 slot. The subdag tasks will never run."
126121
)
127122

128123
def _get_dagrun(self, execution_date):

airflow/providers/amazon/aws/operators/ecs.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -467,9 +467,8 @@ def _check_success_task(self) -> None:
467467
# https://docs.aws.amazon.com/AmazonECS/latest/developerguide/stopped-task-errors.html
468468
if re.match(r'Host EC2 \(instance .+?\) (stopped|terminated)\.', task.get('stoppedReason', '')):
469469
raise AirflowException(
470-
'The task was stopped because the host instance terminated: {}'.format(
471-
task.get('stoppedReason', '')
472-
)
470+
f"The task was stopped because the host instance terminated:"
471+
f" {task.get('stoppedReason', '')}"
473472
)
474473
containers = task['containers']
475474
for container in containers:
@@ -488,9 +487,8 @@ def _check_success_task(self) -> None:
488487
raise AirflowException(f'This task is still pending {task}')
489488
elif 'error' in container.get('reason', '').lower():
490489
raise AirflowException(
491-
'This containers encounter an error during launching : {}'.format(
492-
container.get('reason', '').lower()
493-
)
490+
f"This containers encounter an error during launching: "
491+
f"{container.get('reason', '').lower()}"
494492
)
495493

496494
def get_hook(self) -> AwsBaseHook:

airflow/providers/amazon/aws/sensors/emr.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -258,8 +258,9 @@ def failure_message_from_response(response: Dict[str, Any]) -> Optional[str]:
258258
cluster_status = response['Cluster']['Status']
259259
state_change_reason = cluster_status.get('StateChangeReason')
260260
if state_change_reason:
261-
return 'for code: {} with message {}'.format(
262-
state_change_reason.get('Code', 'No code'), state_change_reason.get('Message', 'Unknown')
261+
return (
262+
f"for code: {state_change_reason.get('Code', 'No code')} "
263+
f"with message {state_change_reason.get('Message', 'Unknown')}"
263264
)
264265
return None
265266

@@ -338,7 +339,8 @@ def failure_message_from_response(response: Dict[str, Any]) -> Optional[str]:
338339
"""
339340
fail_details = response['Step']['Status'].get('FailureDetails')
340341
if fail_details:
341-
return 'for reason {} with message {} and log file {}'.format(
342-
fail_details.get('Reason'), fail_details.get('Message'), fail_details.get('LogFile')
342+
return (
343+
f"for reason {fail_details.get('Reason')} "
344+
f"with message {fail_details.get('Message')} and log file {fail_details.get('LogFile')}"
343345
)
344346
return None

airflow/providers/apache/hive/operators/hive_stats.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -76,8 +76,9 @@ def __init__(
7676
) -> None:
7777
if 'col_blacklist' in kwargs:
7878
warnings.warn(
79-
'col_blacklist kwarg passed to {c} (task_id: {t}) is deprecated, please rename it to '
80-
'excluded_columns instead'.format(c=self.__class__.__name__, t=kwargs.get('task_id')),
79+
f"col_blacklist kwarg passed to {self.__class__.__name__} "
80+
f"(task_id: {kwargs.get('task_id')}) is deprecated, "
81+
f"please rename it to excluded_columns instead",
8182
category=FutureWarning,
8283
stacklevel=2,
8384
)

airflow/providers/dingding/example_dags/example_dingding.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,12 +32,10 @@ def failure_callback(context):
3232
:param context: The context of the executed task.
3333
"""
3434
message = (
35-
'AIRFLOW TASK FAILURE TIPS:\n'
36-
'DAG: {}\n'
37-
'TASKS: {}\n'
38-
'Reason: {}\n'.format(
39-
context['task_instance'].dag_id, context['task_instance'].task_id, context['exception']
40-
)
35+
f"AIRFLOW TASK FAILURE TIPS:\n"
36+
f"DAG: {context['task_instance'].dag_id}\n"
37+
f"TASKS: {context['task_instance'].task_id}\n"
38+
f"Reason: {context['exception']}\n"
4139
)
4240
return DingdingOperator(
4341
task_id='dingding_success_callback',

airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -495,8 +495,7 @@ def operations_contain_expected_statuses(
495495

496496
if len(NEGATIVE_STATUSES - current_statuses) != len(NEGATIVE_STATUSES):
497497
raise AirflowException(
498-
'An unexpected operation status was encountered. Expected: {}'.format(
499-
", ".join(expected_statuses_set)
500-
)
498+
f"An unexpected operation status was encountered. "
499+
f"Expected: {', '.join(expected_statuses_set)}"
501500
)
502501
return False

airflow/providers/influxdb/hooks/influxdb.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -68,11 +68,9 @@ def get_uri(self, conn: Connection):
6868
based on SSL or other InfluxDB host requirements
6969
7070
"""
71-
return '{scheme}://{host}:{port}'.format(
72-
scheme='https' if conn.schema is None else f'{conn.schema}',
73-
host=conn.host,
74-
port='7687' if conn.port is None else f'{conn.port}',
75-
)
71+
conn_scheme = 'https' if conn.schema is None else conn.schema
72+
conn_port = 7687 if conn.port is None else conn.port
73+
return f"{conn_scheme}://{conn.host}:{conn_port}"
7674

7775
def get_conn(self) -> InfluxDBClient:
7876
"""

0 commit comments

Comments
 (0)