Skip to content

Commit 4cc7bc6

Browse files
cborneteyurtsev
andauthored
core: Add ruff rules PLR (#30696)
Add ruff rules [PLR](https://docs.astral.sh/ruff/rules/#refactor-plr) Except PLR09xxx and PLR2004. Co-authored-by: Eugene Yurtsev <[email protected]>
1 parent 68361f9 commit 4cc7bc6

File tree

19 files changed

+278
-310
lines changed

19 files changed

+278
-310
lines changed

libs/core/langchain_core/callbacks/file.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -53,11 +53,10 @@ def on_chain_start(
5353
"""
5454
if "name" in kwargs:
5555
name = kwargs["name"]
56+
elif serialized:
57+
name = serialized.get("name", serialized.get("id", ["<unknown>"])[-1])
5658
else:
57-
if serialized:
58-
name = serialized.get("name", serialized.get("id", ["<unknown>"])[-1])
59-
else:
60-
name = "<unknown>"
59+
name = "<unknown>"
6160
print_text(
6261
f"\n\n\033[1m> Entering new {name} chain...\033[0m",
6362
end="\n",

libs/core/langchain_core/callbacks/manager.py

+14-18
Original file line numberDiff line numberDiff line change
@@ -364,19 +364,16 @@ async def _ahandle_event_for_handler(
364364
event = getattr(handler, event_name)
365365
if asyncio.iscoroutinefunction(event):
366366
await event(*args, **kwargs)
367+
elif handler.run_inline:
368+
event(*args, **kwargs)
367369
else:
368-
if handler.run_inline:
369-
event(*args, **kwargs)
370-
else:
371-
await asyncio.get_event_loop().run_in_executor(
372-
None,
373-
cast(
374-
"Callable",
375-
functools.partial(
376-
copy_context().run, event, *args, **kwargs
377-
),
378-
),
379-
)
370+
await asyncio.get_event_loop().run_in_executor(
371+
None,
372+
cast(
373+
"Callable",
374+
functools.partial(copy_context().run, event, *args, **kwargs),
375+
),
376+
)
380377
except NotImplementedError as e:
381378
if event_name == "on_chat_model_start":
382379
message_strings = [get_buffer_string(m) for m in args[1]]
@@ -2426,12 +2423,11 @@ def _configure(
24262423
for handler in callback_manager.handlers
24272424
):
24282425
callback_manager.add_handler(var_handler, inheritable)
2429-
else:
2430-
if not any(
2431-
isinstance(handler, handler_class)
2432-
for handler in callback_manager.handlers
2433-
):
2434-
callback_manager.add_handler(var_handler, inheritable)
2426+
elif not any(
2427+
isinstance(handler, handler_class)
2428+
for handler in callback_manager.handlers
2429+
):
2430+
callback_manager.add_handler(var_handler, inheritable)
24352431
return callback_manager
24362432

24372433

libs/core/langchain_core/callbacks/stdout.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -37,11 +37,10 @@ def on_chain_start(
3737
"""
3838
if "name" in kwargs:
3939
name = kwargs["name"]
40+
elif serialized:
41+
name = serialized.get("name", serialized.get("id", ["<unknown>"])[-1])
4042
else:
41-
if serialized:
42-
name = serialized.get("name", serialized.get("id", ["<unknown>"])[-1])
43-
else:
44-
name = "<unknown>"
43+
name = "<unknown>"
4544
print(f"\n\n\033[1m> Entering new {name} chain...\033[0m") # noqa: T201
4645

4746
@override

libs/core/langchain_core/indexing/api.py

+7-8
Original file line numberDiff line numberDiff line change
@@ -316,7 +316,7 @@ def index(
316316
)
317317
raise ValueError(msg)
318318

319-
if (cleanup == "incremental" or cleanup == "scoped_full") and source_id_key is None:
319+
if (cleanup in {"incremental", "scoped_full"}) and source_id_key is None:
320320
msg = (
321321
"Source id key is required when cleanup mode is incremental or scoped_full."
322322
)
@@ -379,7 +379,7 @@ def index(
379379
source_id_assigner(doc) for doc in hashed_docs
380380
]
381381

382-
if cleanup == "incremental" or cleanup == "scoped_full":
382+
if cleanup in {"incremental", "scoped_full"}:
383383
# source ids are required.
384384
for source_id, hashed_doc in zip(source_ids, hashed_docs):
385385
if source_id is None:
@@ -622,7 +622,7 @@ async def aindex(
622622
)
623623
raise ValueError(msg)
624624

625-
if (cleanup == "incremental" or cleanup == "scoped_full") and source_id_key is None:
625+
if (cleanup in {"incremental", "scoped_full"}) and source_id_key is None:
626626
msg = (
627627
"Source id key is required when cleanup mode is incremental or scoped_full."
628628
)
@@ -667,11 +667,10 @@ async def aindex(
667667
# In such a case, we use the load method and convert it to an async
668668
# iterator.
669669
async_doc_iterator = _to_async_iterator(docs_source.load())
670+
elif hasattr(docs_source, "__aiter__"):
671+
async_doc_iterator = docs_source # type: ignore[assignment]
670672
else:
671-
if hasattr(docs_source, "__aiter__"):
672-
async_doc_iterator = docs_source # type: ignore[assignment]
673-
else:
674-
async_doc_iterator = _to_async_iterator(docs_source)
673+
async_doc_iterator = _to_async_iterator(docs_source)
675674

676675
source_id_assigner = _get_source_id_assigner(source_id_key)
677676

@@ -694,7 +693,7 @@ async def aindex(
694693
source_id_assigner(doc) for doc in hashed_docs
695694
]
696695

697-
if cleanup == "incremental" or cleanup == "scoped_full":
696+
if cleanup in {"incremental", "scoped_full"}:
698697
# If the cleanup mode is incremental, source ids are required.
699698
for source_id, hashed_doc in zip(source_ids, hashed_docs):
700699
if source_id is None:

libs/core/langchain_core/language_models/chat_models.py

+10-12
Original file line numberDiff line numberDiff line change
@@ -955,13 +955,12 @@ def _generate_with_cache(
955955
)
956956
chunks.append(chunk)
957957
result = generate_from_stream(iter(chunks))
958+
elif inspect.signature(self._generate).parameters.get("run_manager"):
959+
result = self._generate(
960+
messages, stop=stop, run_manager=run_manager, **kwargs
961+
)
958962
else:
959-
if inspect.signature(self._generate).parameters.get("run_manager"):
960-
result = self._generate(
961-
messages, stop=stop, run_manager=run_manager, **kwargs
962-
)
963-
else:
964-
result = self._generate(messages, stop=stop, **kwargs)
963+
result = self._generate(messages, stop=stop, **kwargs)
965964

966965
# Add response metadata to each generation
967966
for idx, generation in enumerate(result.generations):
@@ -1028,13 +1027,12 @@ async def _agenerate_with_cache(
10281027
)
10291028
chunks.append(chunk)
10301029
result = generate_from_stream(iter(chunks))
1030+
elif inspect.signature(self._agenerate).parameters.get("run_manager"):
1031+
result = await self._agenerate(
1032+
messages, stop=stop, run_manager=run_manager, **kwargs
1033+
)
10311034
else:
1032-
if inspect.signature(self._agenerate).parameters.get("run_manager"):
1033-
result = await self._agenerate(
1034-
messages, stop=stop, run_manager=run_manager, **kwargs
1035-
)
1036-
else:
1037-
result = await self._agenerate(messages, stop=stop, **kwargs)
1035+
result = await self._agenerate(messages, stop=stop, **kwargs)
10381036

10391037
# Add response metadata to each generation
10401038
for idx, generation in enumerate(result.generations):

libs/core/langchain_core/messages/base.py

+9-10
Original file line numberDiff line numberDiff line change
@@ -170,17 +170,16 @@ def merge_content(
170170
# If both are lists
171171
merged = merge_lists(cast("list", merged), content) # type: ignore
172172
# If the first content is a list, and the second content is a string
173+
# If the last element of the first content is a string
174+
# Add the second content to the last element
175+
elif merged and isinstance(merged[-1], str):
176+
merged[-1] += content
177+
# If second content is an empty string, treat as a no-op
178+
elif content == "":
179+
pass
173180
else:
174-
# If the last element of the first content is a string
175-
# Add the second content to the last element
176-
if merged and isinstance(merged[-1], str):
177-
merged[-1] += content
178-
# If second content is an empty string, treat as a no-op
179-
elif content == "":
180-
pass
181-
else:
182-
# Otherwise, add the second content as a new element of the list
183-
merged.append(content)
181+
# Otherwise, add the second content as a new element of the list
182+
merged.append(content)
184183
return merged
185184

186185

0 commit comments

Comments
 (0)