Skip to content

Commit 8f9223b

Browse files
chore(internal): ban usage of lru_cache (#1331)
1 parent a853ee2 commit 8f9223b

File tree

4 files changed

+11
-5
lines changed

4 files changed

+11
-5
lines changed

pyproject.toml

+6-1
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,9 @@ select = [
167167
"T201",
168168
"T203",
169169
# misuse of typing.TYPE_CHECKING
170-
"TCH004"
170+
"TCH004",
171+
# import rules
172+
"TID251",
171173
]
172174
ignore = [
173175
# mutable defaults
@@ -183,6 +185,9 @@ ignore-init-module-imports = true
183185
[tool.ruff.format]
184186
docstring-code-format = true
185187

188+
[tool.ruff.lint.flake8-tidy-imports.banned-api]
189+
"functools.lru_cache".msg = "This function does not retain type information for the wrapped function's arguments; The `lru_cache` function from `_utils` should be used instead"
190+
186191
[tool.ruff.lint.isort]
187192
length-sort = true
188193
length-sort-straight = true

src/openai/_base_client.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
cast,
3030
overload,
3131
)
32-
from functools import lru_cache
3332
from typing_extensions import Literal, override, get_origin
3433

3534
import anyio
@@ -61,7 +60,7 @@
6160
RequestOptions,
6261
ModelBuilderProtocol,
6362
)
64-
from ._utils import is_dict, is_list, is_given, is_mapping
63+
from ._utils import is_dict, is_list, is_given, lru_cache, is_mapping
6564
from ._compat import model_copy, model_dump
6665
from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type
6766
from ._response import (

src/openai/_models.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import inspect
55
from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast
66
from datetime import date, datetime
7-
from functools import lru_cache
87
from typing_extensions import (
98
Unpack,
109
Literal,
@@ -37,6 +36,7 @@
3736
PropertyInfo,
3837
is_list,
3938
is_given,
39+
lru_cache,
4040
is_mapping,
4141
parse_date,
4242
coerce_boolean,

src/openai/_utils/_utils.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -395,5 +395,7 @@ def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]:
395395
"""A version of functools.lru_cache that retains the type signature
396396
for the wrapped function arguments.
397397
"""
398-
wrapper = functools.lru_cache(maxsize=maxsize)
398+
wrapper = functools.lru_cache( # noqa: TID251
399+
maxsize=maxsize,
400+
)
399401
return cast(Any, wrapper) # type: ignore[no-any-return]

0 commit comments

Comments
 (0)