|
532 | 532 | "ingest_source": "Python",
|
533 | 533 | "is_response": True,
|
534 | 534 | "virtual_llm": True,
|
535 |
| - "content": "`", |
| 535 | + "content": "```html\n<!DOCTYPE html>\n<html>\n<head>\n <title>Math Quiz</title>\n</head>\n<body>\n <h2>Math Quiz Questions</h2>\n <ol>\n <li>What is the result of 5 + 3?</li>\n <ul>\n <li>A) 7</li>\n <li>B) 8</li>\n <li>C) 9</li>\n <li>D) 10</li>\n </ul>\n <li>What is the product of 6 x 7?</li>\n <ul>\n <li>A) 36</li>\n <li>B) 42</li>\n <li>C) 48</li>\n <li>D) 56</li>\n </ul>\n <li>What is the square root of 64?</li>\n <ul>\n <li>A) 6</li>\n <li>B) 7</li>\n <li>C) 8</li>\n <li>D) 9</li>\n </ul>\n <li>What is the result of 12 / 4?</li>\n <ul>\n <li>A) 2</li>\n <li>B) 3</li>\n <li>C) 4</li>\n <li>D) 5</li>\n </ul>\n <li>What is the sum of 15 + 9?</li>\n <ul>\n <li>A) 22</li>\n <li>B) 23</li>\n <li>C) 24</li>\n <li>D) 25</li>\n </ul>\n </ol>\n</body>\n</html>\n```", |
536 | 536 | },
|
537 | 537 | ],
|
538 | 538 | [
|
|
553 | 553 | ],
|
554 | 554 | ]
|
555 | 555 |
|
| 556 | +chat_completion_recorded_events_str_response = [ |
| 557 | + ( |
| 558 | + {"type": "LlmChatCompletionSummary"}, |
| 559 | + { |
| 560 | + "id": None, |
| 561 | + "llm.conversation_id": "my-awesome-id", |
| 562 | + "llm.foo": "bar", |
| 563 | + "span_id": None, |
| 564 | + "trace_id": "trace-id", |
| 565 | + "vendor": "langchain", |
| 566 | + "ingest_source": "Python", |
| 567 | + "virtual_llm": True, |
| 568 | + "request_id": None, |
| 569 | + "duration": None, |
| 570 | + "response.number_of_messages": 2, |
| 571 | + "metadata.id": "123", |
| 572 | + }, |
| 573 | + ), |
| 574 | + ( |
| 575 | + {"type": "LlmChatCompletionMessage"}, |
| 576 | + { |
| 577 | + "id": None, |
| 578 | + "llm.conversation_id": "my-awesome-id", |
| 579 | + "llm.foo": "bar", |
| 580 | + "request_id": None, |
| 581 | + "span_id": None, |
| 582 | + "trace_id": "trace-id", |
| 583 | + "content": "{'text': 'M'}", |
| 584 | + "completion_id": None, |
| 585 | + "sequence": 0, |
| 586 | + "vendor": "langchain", |
| 587 | + "ingest_source": "Python", |
| 588 | + "virtual_llm": True, |
| 589 | + }, |
| 590 | + ), |
| 591 | + ( |
| 592 | + {"type": "LlmChatCompletionMessage"}, |
| 593 | + { |
| 594 | + "id": None, |
| 595 | + "llm.conversation_id": "my-awesome-id", |
| 596 | + "llm.foo": "bar", |
| 597 | + "request_id": None, |
| 598 | + "span_id": None, |
| 599 | + "trace_id": "trace-id", |
| 600 | + "content": "Milo", |
| 601 | + "completion_id": None, |
| 602 | + "sequence": 1, |
| 603 | + "vendor": "langchain", |
| 604 | + "ingest_source": "Python", |
| 605 | + "is_response": True, |
| 606 | + "virtual_llm": True, |
| 607 | + }, |
| 608 | + ), |
| 609 | +] |
556 | 610 | chat_completion_recorded_events_list_response = [
|
557 | 611 | (
|
558 | 612 | {"type": "LlmChatCompletionSummary"},
|
|
682 | 736 | ]
|
683 | 737 |
|
684 | 738 |
|
| 739 | +@reset_core_stats_engine() |
| 740 | +@validate_custom_events(events_with_context_attrs(chat_completion_recorded_events_str_response)) |
| 741 | +@validate_custom_event_count(count=7) |
| 742 | +@validate_transaction_metrics( |
| 743 | + name="test_chain:test_langchain_chain_str_response", |
| 744 | + scoped_metrics=[("Llm/chain/LangChain/invoke", 1)], |
| 745 | + rollup_metrics=[("Llm/chain/LangChain/invoke", 1)], |
| 746 | + custom_metrics=[(f"Supportability/Python/ML/LangChain/{langchain.__version__}", 1)], |
| 747 | + background_task=True, |
| 748 | +) |
| 749 | +@background_task() |
| 750 | +def test_langchain_chain_str_response(set_trace_info, chat_openai_client): |
| 751 | + set_trace_info() |
| 752 | + add_custom_attribute("llm.conversation_id", "my-awesome-id") |
| 753 | + add_custom_attribute("llm.foo", "bar") |
| 754 | + add_custom_attribute("non_llm_attr", "python-agent") |
| 755 | + |
| 756 | + template = """You are a helpful assistant who generates a random first name. A user will pass in a first letter, and you should generate a name that starts with that first letter.""" |
| 757 | + human_template = "{text}" |
| 758 | + |
| 759 | + chat_prompt = langchain_core.prompts.ChatPromptTemplate.from_messages( |
| 760 | + [("system", template), ("human", human_template)] |
| 761 | + ) |
| 762 | + str_output_parser = langchain_core.output_parsers.string.StrOutputParser() |
| 763 | + chain = chat_prompt | chat_openai_client | str_output_parser |
| 764 | + with WithLlmCustomAttributes({"context": "attr"}): |
| 765 | + chain.invoke({"text": "M"}, config={"metadata": {"id": "123"}}) |
| 766 | + |
| 767 | + |
685 | 768 | @reset_core_stats_engine()
|
686 | 769 | @validate_custom_events(events_with_context_attrs(chat_completion_recorded_events_list_response))
|
687 | 770 | @validate_custom_event_count(count=7)
|
|
0 commit comments