Skip to content

Commit 56f52d5

Browse files
committed
fix: typos in api
Signed-off-by: yihong0618 <[email protected]>
1 parent 284bb7a commit 56f52d5

File tree

7 files changed

+16
-16
lines changed

7 files changed

+16
-16
lines changed

api/commands.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -259,15 +259,15 @@ def migrate_knowledge_vector_database():
259259
skipped_count = 0
260260
total_count = 0
261261
vector_type = dify_config.VECTOR_STORE
262-
upper_colletion_vector_types = {
262+
upper_collection_vector_types = {
263263
VectorType.MILVUS,
264264
VectorType.PGVECTOR,
265265
VectorType.RELYT,
266266
VectorType.WEAVIATE,
267267
VectorType.ORACLE,
268268
VectorType.ELASTICSEARCH,
269269
}
270-
lower_colletion_vector_types = {
270+
lower_collection_vector_types = {
271271
VectorType.ANALYTICDB,
272272
VectorType.CHROMA,
273273
VectorType.MYSCALE,
@@ -307,7 +307,7 @@ def migrate_knowledge_vector_database():
307307
continue
308308
collection_name = ""
309309
dataset_id = dataset.id
310-
if vector_type in upper_colletion_vector_types:
310+
if vector_type in upper_collection_vector_types:
311311
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
312312
elif vector_type == VectorType.QDRANT:
313313
if dataset.collection_binding_id:
@@ -323,7 +323,7 @@ def migrate_knowledge_vector_database():
323323
else:
324324
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
325325

326-
elif vector_type in lower_colletion_vector_types:
326+
elif vector_type in lower_collection_vector_types:
327327
collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower()
328328
else:
329329
raise ValueError(f"Vector store {vector_type} is not supported.")

api/core/app/apps/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
Due to the presence of tasks in App Runner that require long execution times, such as LLM generation and external requests, Flask-Sqlalchemy's strategy for database connection pooling is to allocate one connection (transaction) per request. This approach keeps a connection occupied even during non-DB tasks, leading to the inability to acquire new connections during high concurrency requests due to multiple long-running tasks.
44

5-
Therefore, the database operations in App Runner and Task Pipeline must ensure connections are closed immediately after use, and it's better to pass IDs rather than Model objects to avoid deattach errors.
5+
Therefore, the database operations in App Runner and Task Pipeline must ensure connections are closed immediately after use, and it's better to pass IDs rather than Model objects to avoid detach errors.
66

77
Examples:
88

api/core/model_runtime/docs/zh_Hans/interfaces.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ class XinferenceProvider(Provider):
9191
"""
9292
```
9393

94-
也可以直接抛出对应Erros,并做如下定义,这样在之后的调用中可以直接抛出`InvokeConnectionError`等异常。
94+
也可以直接抛出对应 Errors,并做如下定义,这样在之后的调用中可以直接抛出`InvokeConnectionError`等异常。
9595

9696
```python
9797
@property

api/core/tools/provider/builtin/wolframalpha/tools/wolframalpha.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def _invoke(
6161
params["input"] = query
6262
else:
6363
finished = True
64-
if "souces" in response_data["queryresult"]:
64+
if "sources" in response_data["queryresult"]:
6565
return self.create_link_message(response_data["queryresult"]["sources"]["url"])
6666
elif "pods" in response_data["queryresult"]:
6767
result = response_data["queryresult"]["pods"][0]["subpods"][0]["plaintext"]

api/core/workflow/nodes/if_else/if_else_node.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def _run(self) -> NodeRunResult:
2424
"""
2525
node_inputs: dict[str, list] = {"conditions": []}
2626

27-
process_datas: dict[str, list] = {"condition_results": []}
27+
process_data: dict[str, list] = {"condition_results": []}
2828

2929
input_conditions = []
3030
final_result = False
@@ -40,7 +40,7 @@ def _run(self) -> NodeRunResult:
4040
operator=case.logical_operator,
4141
)
4242

43-
process_datas["condition_results"].append(
43+
process_data["condition_results"].append(
4444
{
4545
"group": case.model_dump(),
4646
"results": group_result,
@@ -65,23 +65,23 @@ def _run(self) -> NodeRunResult:
6565

6666
selected_case_id = "true" if final_result else "false"
6767

68-
process_datas["condition_results"].append(
68+
process_data["condition_results"].append(
6969
{"group": "default", "results": group_result, "final_result": final_result}
7070
)
7171

7272
node_inputs["conditions"] = input_conditions
7373

7474
except Exception as e:
7575
return NodeRunResult(
76-
status=WorkflowNodeExecutionStatus.FAILED, inputs=node_inputs, process_data=process_datas, error=str(e)
76+
status=WorkflowNodeExecutionStatus.FAILED, inputs=node_inputs, process_data=process_data, error=str(e)
7777
)
7878

7979
outputs = {"result": final_result, "selected_case_id": selected_case_id}
8080

8181
data = NodeRunResult(
8282
status=WorkflowNodeExecutionStatus.SUCCEEDED,
8383
inputs=node_inputs,
84-
process_data=process_datas,
84+
process_data=process_data,
8585
edge_source_handle=selected_case_id or "false", # Use case ID or 'default'
8686
outputs=outputs,
8787
)

api/core/workflow/nodes/variable_assigner/v2/exc.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@
77

88

99
class OperationNotSupportedError(VariableOperatorNodeError):
10-
def __init__(self, *, operation: Operation, varialbe_type: str):
11-
super().__init__(f"Operation {operation} is not supported for type {varialbe_type}")
10+
def __init__(self, *, operation: Operation, variable_type: str):
11+
super().__init__(f"Operation {operation} is not supported for type {variable_type}")
1212

1313

1414
class InputTypeNotSupportedError(VariableOperatorNodeError):

api/core/workflow/nodes/variable_assigner/v2/node.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def _run(self) -> NodeRunResult:
4545

4646
# Check if operation is supported
4747
if not helpers.is_operation_supported(variable_type=variable.value_type, operation=item.operation):
48-
raise OperationNotSupportedError(operation=item.operation, varialbe_type=variable.value_type)
48+
raise OperationNotSupportedError(operation=item.operation, variable_type=variable.value_type)
4949

5050
# Check if variable input is supported
5151
if item.input_type == InputType.VARIABLE and not helpers.is_variable_input_supported(
@@ -156,4 +156,4 @@ def _handle_item(
156156
case Operation.DIVIDE:
157157
return variable.value / value
158158
case _:
159-
raise OperationNotSupportedError(operation=operation, varialbe_type=variable.value_type)
159+
raise OperationNotSupportedError(operation=operation, variable_type=variable.value_type)

0 commit comments

Comments
 (0)