Skip to content

Commit c585ad5

Browse files
authored
Upgrade ruff to 0.0.262 (#30809)
1 parent 676a95b commit c585ad5

File tree

30 files changed

+46
-46
lines changed

30 files changed

+46
-46
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@ repos:
179179
# Since ruff makes use of multiple cores we _purposefully_ don't run this in docker so it can use the
180180
# host CPU to it's fullest
181181
entry: ruff --fix --no-update-check --force-exclude
182-
additional_dependencies: ['ruff==0.0.226']
182+
additional_dependencies: ['ruff==0.0.262']
183183
files: \.pyi?$
184184
exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py
185185
- repo: https://github.com/asottile/blacken-docs

airflow/kubernetes/pod.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929

3030
with warnings.catch_warnings():
3131
warnings.simplefilter("ignore", RemovedInAirflow3Warning)
32-
from airflow.providers.cncf.kubernetes.backcompat.pod import Port, Resources # noqa: autoflake
32+
from airflow.providers.cncf.kubernetes.backcompat.pod import Port, Resources
3333

3434
warnings.warn(
3535
"This module is deprecated. Please use `kubernetes.client.models` for `V1ResourceRequirements` and `Port`.",

airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ class DynamoDBToS3Operator(AwsToAwsBaseOperator):
8787
:param dynamodb_scan_kwargs: kwargs pass to <https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.scan>
8888
:param s3_key_prefix: Prefix of s3 object key
8989
:param process_func: How we transforms a dynamodb item to bytes. By default we dump the json
90-
""" # noqa: E501
90+
"""
9191

9292
template_fields: Sequence[str] = (
9393
*AwsToAwsBaseOperator.template_fields,

airflow/providers/amazon/aws/triggers/redshift_cluster.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
5555
},
5656
)
5757

58-
async def run(self) -> AsyncIterator["TriggerEvent"]:
58+
async def run(self) -> AsyncIterator[TriggerEvent]:
5959
hook = RedshiftAsyncHook(aws_conn_id=self.aws_conn_id)
6060
while self.attempts >= 1:
6161
self.attempts = self.attempts - 1

airflow/providers/apache/kafka/hooks/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def _get_client(self, config):
5858

5959
@cached_property
6060
def get_conn(self) -> Any:
61-
"""get the configuration object"""
61+
"""Get the configuration object"""
6262
config = self.get_connection(self.kafka_config_id).extra_dejson
6363

6464
if not (config.get("bootstrap.servers", None)):

airflow/providers/apache/kafka/hooks/client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def create_topic(
4141
self,
4242
topics: Sequence[Sequence[Any]],
4343
) -> None:
44-
"""creates a topic
44+
"""Creates a topic
4545
4646
:param topics: a list of topics to create including the number of partitions for the topic
4747
and the replication factor. Format: [ ("topic_name", number of partitions, replication factor)]

airflow/providers/apache/livy/triggers/livy.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
7878
},
7979
)
8080

81-
async def run(self) -> AsyncIterator["TriggerEvent"]:
81+
async def run(self) -> AsyncIterator[TriggerEvent]:
8282
"""
8383
Checks if the _polling_interval > 0, in that case it pools Livy for
8484
batch termination asynchronously.

airflow/providers/cncf/kubernetes/triggers/pod.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
116116
},
117117
)
118118

119-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
119+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
120120
"""Gets current pod status and yields a TriggerEvent"""
121121
hook = self._get_async_hook()
122122
self.log.info("Checking pod %r in namespace %r.", self.pod_name, self.pod_namespace)

airflow/providers/dbt/cloud/operators/dbt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,7 @@ def execute(self, context: Context):
173173
)
174174
return self.run_id
175175

176-
def execute_complete(self, context: "Context", event: dict[str, Any]) -> int:
176+
def execute_complete(self, context: Context, event: dict[str, Any]) -> int:
177177
"""
178178
Callback for when the trigger fires - returns immediately.
179179
Relies on trigger to throw an exception, otherwise it assumes execution was

airflow/providers/dbt/cloud/triggers/dbt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
6464
},
6565
)
6666

67-
async def run(self) -> AsyncIterator["TriggerEvent"]:
67+
async def run(self) -> AsyncIterator[TriggerEvent]:
6868
"""Make async connection to Dbt, polls for the pipeline run status"""
6969
hook = DbtCloudHook(self.conn_id)
7070
try:

airflow/providers/google/cloud/hooks/bigtable.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@ def delete_table(self, instance_id: str, table_id: str, project_id: str) -> None
248248
"""
249249
instance = self.get_instance(instance_id=instance_id, project_id=project_id)
250250
if instance is None:
251-
raise RuntimeError("Instance %s did not exist; unable to delete table %s" % instance_id, table_id)
251+
raise RuntimeError(f"Instance {instance_id} did not exist; unable to delete table {table_id}")
252252
table = instance.table(table_id=table_id)
253253
table.delete()
254254

airflow/providers/google/cloud/triggers/bigquery.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
7171
},
7272
)
7373

74-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
74+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
7575
"""Gets current job execution status and yields a TriggerEvent"""
7676
hook = self._get_async_hook()
7777
while True:
@@ -122,7 +122,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
122122
},
123123
)
124124

125-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
125+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
126126
"""Gets current job execution status and yields a TriggerEvent"""
127127
hook = self._get_async_hook()
128128
while True:
@@ -181,7 +181,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
181181
},
182182
)
183183

184-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
184+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
185185
"""Gets current job execution status and yields a TriggerEvent with response data"""
186186
hook = self._get_async_hook()
187187
while True:
@@ -286,7 +286,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
286286
},
287287
)
288288

289-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
289+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
290290
"""Gets current job execution status and yields a TriggerEvent"""
291291
hook = self._get_async_hook()
292292
while True:
@@ -414,7 +414,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
414414
},
415415
)
416416

417-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
417+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
418418
"""Gets current job execution status and yields a TriggerEvent"""
419419
hook = self._get_async_hook()
420420
while True:
@@ -487,7 +487,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
487487
def _get_async_hook(self) -> BigQueryTableAsyncHook:
488488
return BigQueryTableAsyncHook(gcp_conn_id=self.gcp_conn_id)
489489

490-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
490+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
491491
"""Will run until the table exists in the Google Big Query."""
492492
while True:
493493
try:
@@ -562,7 +562,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
562562
},
563563
)
564564

565-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
565+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
566566
"""Will run until the table exists in the Google Big Query."""
567567
hook = BigQueryAsyncHook(gcp_conn_id=self.gcp_conn_id)
568568
job_id = None

airflow/providers/google/cloud/triggers/cloud_build.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
7575
},
7676
)
7777

78-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
78+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
7979
"""Gets current build execution status and yields a TriggerEvent"""
8080
hook = self._get_async_hook()
8181
while True:

airflow/providers/google/cloud/triggers/datafusion.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
8080
},
8181
)
8282

83-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
83+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
8484
"""Gets current pipeline status and yields a TriggerEvent"""
8585
hook = self._get_async_hook()
8686
while True:

airflow/providers/google/cloud/triggers/dataproc.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
143143
},
144144
)
145145

146-
async def run(self) -> AsyncIterator["TriggerEvent"]:
146+
async def run(self) -> AsyncIterator[TriggerEvent]:
147147
while True:
148148
cluster = await self.get_async_hook().get_cluster(
149149
project_id=self.project_id, region=self.region, cluster_name=self.cluster_name
@@ -261,7 +261,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
261261
},
262262
)
263263

264-
async def run(self) -> AsyncIterator["TriggerEvent"]:
264+
async def run(self) -> AsyncIterator[TriggerEvent]:
265265
"""Wait until cluster is deleted completely"""
266266
while self.end_time > time.time():
267267
try:
@@ -309,7 +309,7 @@ def serialize(self):
309309
},
310310
)
311311

312-
async def run(self) -> AsyncIterator["TriggerEvent"]:
312+
async def run(self) -> AsyncIterator[TriggerEvent]:
313313
hook = self.get_async_hook()
314314
while True:
315315
try:

airflow/providers/google/cloud/triggers/gcs.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,8 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
6666
},
6767
)
6868

69-
async def run(self) -> AsyncIterator["TriggerEvent"]:
70-
"""loop until the relevant file/folder is found."""
69+
async def run(self) -> AsyncIterator[TriggerEvent]:
70+
"""Loop until the relevant file/folder is found."""
7171
try:
7272
hook = self._get_async_hook()
7373
while True:
@@ -144,7 +144,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
144144
},
145145
)
146146

147-
async def run(self) -> AsyncIterator["TriggerEvent"]:
147+
async def run(self) -> AsyncIterator[TriggerEvent]:
148148
"""Loop until the object updated time is greater than target datetime"""
149149
try:
150150
hook = self._get_async_hook()

airflow/providers/google/cloud/triggers/kubernetes_engine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
157157
},
158158
)
159159

160-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
160+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
161161
"""Gets operation status and yields corresponding event."""
162162
hook = self._get_hook()
163163
while True:

airflow/providers/google/cloud/triggers/mlengine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
8888
},
8989
)
9090

91-
async def run(self) -> AsyncIterator["TriggerEvent"]: # type: ignore[override]
91+
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
9292
"""Gets current job execution status and yields a TriggerEvent"""
9393
hook = self._get_async_hook()
9494
while True:

airflow/providers/http/hooks/http.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -306,7 +306,7 @@ async def run(
306306
data: dict[str, Any] | str | None = None,
307307
headers: dict[str, Any] | None = None,
308308
extra_options: dict[str, Any] | None = None,
309-
) -> "ClientResponse":
309+
) -> ClientResponse:
310310
r"""
311311
Performs an asynchronous HTTP request call
312312

airflow/providers/microsoft/azure/triggers/data_factory.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
6767
},
6868
)
6969

70-
async def run(self) -> AsyncIterator["TriggerEvent"]:
70+
async def run(self) -> AsyncIterator[TriggerEvent]:
7171
"""Make async connection to Azure Data Factory, polls for the pipeline run status"""
7272
hook = AzureDataFactoryAsyncHook(azure_data_factory_conn_id=self.azure_data_factory_conn_id)
7373
try:
@@ -140,7 +140,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
140140
},
141141
)
142142

143-
async def run(self) -> AsyncIterator["TriggerEvent"]:
143+
async def run(self) -> AsyncIterator[TriggerEvent]:
144144
"""Make async connection to Azure Data Factory, polls for the pipeline run status"""
145145
hook = AzureDataFactoryAsyncHook(azure_data_factory_conn_id=self.azure_data_factory_conn_id)
146146
try:

airflow/providers/microsoft/azure/triggers/wasb.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
6363
},
6464
)
6565

66-
async def run(self) -> AsyncIterator["TriggerEvent"]:
66+
async def run(self) -> AsyncIterator[TriggerEvent]:
6767
"""Makes async connection to Azure WASB and polls for existence of the given blob name."""
6868
blob_exists = False
6969
hook = WasbAsyncHook(wasb_conn_id=self.wasb_conn_id, public_read=self.public_read)
@@ -138,7 +138,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
138138
},
139139
)
140140

141-
async def run(self) -> AsyncIterator["TriggerEvent"]:
141+
async def run(self) -> AsyncIterator[TriggerEvent]:
142142
"""Makes async connection to Azure WASB and polls for existence of a blob with given prefix."""
143143
prefix_exists = False
144144
hook = WasbAsyncHook(wasb_conn_id=self.wasb_conn_id, public_read=self.public_read)

airflow/stats.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,7 @@ def wrapper(
301301
if stat is not None and tags is not None:
302302
for k, v in tags.items():
303303
if self.metric_tags_validator.test(k):
304-
if all((c not in [",", "="] for c in v + k)):
304+
if all(c not in [",", "="] for c in v + k):
305305
stat += f",{k}={v}"
306306
else:
307307
log.error("Dropping invalid tag: %s=%s.", k, v)

airflow/triggers/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
6060
raise NotImplementedError("Triggers must implement serialize()")
6161

6262
@abc.abstractmethod
63-
async def run(self) -> AsyncIterator["TriggerEvent"]:
63+
async def run(self) -> AsyncIterator[TriggerEvent]:
6464
"""
6565
Runs the trigger in an asynchronous context.
6666

airflow/triggers/external_task.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
7272
},
7373
)
7474

75-
async def run(self) -> typing.AsyncIterator["TriggerEvent"]:
75+
async def run(self) -> typing.AsyncIterator[TriggerEvent]:
7676
"""
7777
Checks periodically in the database to see if the task exists, and has
7878
hit one of the states yet, or not.
@@ -136,7 +136,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
136136
},
137137
)
138138

139-
async def run(self) -> typing.AsyncIterator["TriggerEvent"]:
139+
async def run(self) -> typing.AsyncIterator[TriggerEvent]:
140140
"""
141141
Checks periodically in the database to see if the dag run exists, and has
142142
hit one of the states yet, or not.

airflow/triggers/file.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
5858
},
5959
)
6060

61-
async def run(self) -> typing.AsyncIterator["TriggerEvent"]:
61+
async def run(self) -> typing.AsyncIterator[TriggerEvent]:
6262
"""Loop until the relevant files are found."""
6363
while True:
6464
for path in glob(self.filepath, recursive=self.recursive):

docs/exts/exampleinclude.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
from sphinx.util.nodes import set_source_info
3737

3838
try:
39-
import sphinx_airflow_theme # noqa: autoflake
39+
import sphinx_airflow_theme
4040

4141
airflow_theme_is_available = True
4242
except ImportError:

scripts/ci/pre_commit/pre_commit_insert_extras.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,8 @@
2727
sys.path.insert(0, str(AIRFLOW_SOURCES_DIR)) # make sure setup is imported from Airflow
2828
# flake8: noqa: F401
2929

30-
from common_precommit_utils import insert_documentation # isort: skip # noqa E402
31-
from setup import EXTRAS_DEPENDENCIES # isort:skip # noqa
30+
from common_precommit_utils import insert_documentation # isort: skip
31+
from setup import EXTRAS_DEPENDENCIES # isort:skip
3232

3333
sys.path.append(str(AIRFLOW_SOURCES_DIR))
3434

scripts/ci/pre_commit/pre_commit_local_yml_mounts.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,16 +22,16 @@
2222

2323
sys.path.insert(0, str(Path(__file__).parent.resolve())) # make sure common_precommit_utils is imported
2424

25-
from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH # isort: skip # noqa E402
25+
from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH # isort: skip
2626

2727
sys.path.insert(0, str(AIRFLOW_SOURCES_ROOT_PATH)) # make sure setup is imported from Airflow
2828
sys.path.insert(
2929
0, str(AIRFLOW_SOURCES_ROOT_PATH / "dev" / "breeze" / "src")
3030
) # make sure setup is imported from Airflow
3131
# flake8: noqa: F401
32-
from airflow_breeze.utils.docker_command_utils import VOLUMES_FOR_SELECTED_MOUNTS # isort: skip # noqa E402
32+
from airflow_breeze.utils.docker_command_utils import VOLUMES_FOR_SELECTED_MOUNTS # isort: skip
3333

34-
from common_precommit_utils import insert_documentation # isort: skip # noqa E402
34+
from common_precommit_utils import insert_documentation # isort: skip
3535

3636
sys.path.append(str(AIRFLOW_SOURCES_ROOT_PATH))
3737

tests/cli/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
from tests.test_utils.config import conf_vars
2828

2929
# Create custom executors here because conftest is imported first
30-
custom_executor_module = type(sys)("custom_executor") # noqa
30+
custom_executor_module = type(sys)("custom_executor")
3131
custom_executor_module.CustomCeleryExecutor = type( # type: ignore
3232
"CustomCeleryExecutor", (celery_executor.CeleryExecutor,), {}
3333
)

tests/system/providers/google/cloud/bigtable/example_bigtable.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040
See https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance.cluster
4141
* CBT_TABLE_ID - desired ID of the Table
4242
* CBT_POKE_INTERVAL - number of seconds between every attempt of Sensor check
43-
""" # noqa: E501
43+
"""
4444
from __future__ import annotations
4545

4646
import os

0 commit comments

Comments
 (0)