|
| 1 | +# Copyright (c) Microsoft. All rights reserved. |
| 2 | +import asyncio |
| 3 | +from typing import Annotated |
| 4 | + |
| 5 | +from semantic_kernel.agents import AzureResponsesAgent |
| 6 | +from semantic_kernel.contents import AuthorRole, FunctionCallContent, FunctionResultContent |
| 7 | +from semantic_kernel.contents.chat_message_content import ChatMessageContent |
| 8 | +from semantic_kernel.functions import kernel_function |
| 9 | + |
| 10 | +""" |
| 11 | +The following sample demonstrates how to create an OpenAI |
| 12 | +Responses Agent using either Azure OpenAI or OpenAI. The |
| 13 | +Responses Agent allow for function calling, the use of file search and a |
| 14 | +web search tool. Responses Agent Threads are used to manage the |
| 15 | +conversation state, similar to a Semantic Kernel Chat History. |
| 16 | +Additionally, the invoke configures a message callback |
| 17 | +to receive the conversation messages during invocation. |
| 18 | +""" |
| 19 | + |
| 20 | + |
| 21 | +# Define a sample plugin for the sample |
| 22 | +class MenuPlugin: |
| 23 | + """A sample Menu Plugin used for the concept sample.""" |
| 24 | + |
| 25 | + @kernel_function(description="Provides a list of specials from the menu.") |
| 26 | + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: |
| 27 | + return """ |
| 28 | + Special Soup: Clam Chowder |
| 29 | + Special Salad: Cobb Salad |
| 30 | + Special Drink: Chai Tea |
| 31 | + """ |
| 32 | + |
| 33 | + @kernel_function(description="Provides the price of the requested menu item.") |
| 34 | + def get_item_price( |
| 35 | + self, menu_item: Annotated[str, "The name of the menu item."] |
| 36 | + ) -> Annotated[str, "Returns the price of the menu item."]: |
| 37 | + return "$9.99" |
| 38 | + |
| 39 | + |
| 40 | +intermediate_steps: list[ChatMessageContent] = [] |
| 41 | + |
| 42 | + |
| 43 | +async def handle_intermediate_steps(message: ChatMessageContent) -> None: |
| 44 | + intermediate_steps.append(message) |
| 45 | + |
| 46 | + |
| 47 | +async def main(): |
| 48 | + # 1. Create the client using Azure OpenAI resources and configuration |
| 49 | + client, model = AzureResponsesAgent.setup_resources() |
| 50 | + |
| 51 | + # 2. Create a Semantic Kernel agent for the OpenAI Responses API |
| 52 | + agent = AzureResponsesAgent( |
| 53 | + ai_model_id=model, |
| 54 | + client=client, |
| 55 | + name="Host", |
| 56 | + instructions="Answer questions about the menu.", |
| 57 | + plugins=[MenuPlugin()], |
| 58 | + ) |
| 59 | + |
| 60 | + # 3. Create a thread for the agent |
| 61 | + # If no thread is provided, a new thread will be |
| 62 | + # created and returned with the initial response |
| 63 | + thread = None |
| 64 | + |
| 65 | + user_inputs = ["Hello", "What is the special soup?", "What is the special drink?", "How much is that?", "Thank you"] |
| 66 | + |
| 67 | + try: |
| 68 | + for user_input in user_inputs: |
| 69 | + print(f"# {AuthorRole.USER}: '{user_input}'") |
| 70 | + async for response in agent.invoke( |
| 71 | + messages=user_input, |
| 72 | + thread=thread, |
| 73 | + on_intermediate_message=handle_intermediate_steps, |
| 74 | + ): |
| 75 | + thread = response.thread |
| 76 | + print(f"# {response.name}: {response.content}") |
| 77 | + finally: |
| 78 | + await thread.delete() if thread else None |
| 79 | + |
| 80 | + # Print the final chat history |
| 81 | + print("\nIntermediate Steps:") |
| 82 | + for msg in intermediate_steps: |
| 83 | + if any(isinstance(item, FunctionResultContent) for item in msg.items): |
| 84 | + for fr in msg.items: |
| 85 | + if isinstance(fr, FunctionResultContent): |
| 86 | + print(f"Function Result:> {fr.result} for function: {fr.name}") |
| 87 | + elif any(isinstance(item, FunctionCallContent) for item in msg.items): |
| 88 | + for fcc in msg.items: |
| 89 | + if isinstance(fcc, FunctionCallContent): |
| 90 | + print(f"Function Call:> {fcc.name} with arguments: {fcc.arguments}") |
| 91 | + else: |
| 92 | + print(f"{msg.role}: {msg.content}") |
| 93 | + |
| 94 | + """ |
| 95 | + Sample Output: |
| 96 | +
|
| 97 | + # AuthorRole.USER: 'Hello' |
| 98 | + # Host: Hi there! How can I assist you with the menu today? |
| 99 | + # AuthorRole.USER: 'What is the special soup?' |
| 100 | + # Host: The special soup is Clam Chowder. |
| 101 | + # AuthorRole.USER: 'What is the special drink?' |
| 102 | + # Host: The special drink is Chai Tea. |
| 103 | + # AuthorRole.USER: 'How much is that?' |
| 104 | + # Host: Could you please specify the menu item you are asking about? |
| 105 | + # AuthorRole.USER: 'Thank you' |
| 106 | + # Host: You're welcome! If you have any questions about the menu or need assistance, feel free to ask. |
| 107 | +
|
| 108 | + Intermediate Steps: |
| 109 | + AuthorRole.ASSISTANT: Hi there! How can I assist you with the menu today? |
| 110 | + AuthorRole.ASSISTANT: |
| 111 | + Function Result:> |
| 112 | + Special Soup: Clam Chowder |
| 113 | + Special Salad: Cobb Salad |
| 114 | + Special Drink: Chai Tea |
| 115 | + for function: MenuPlugin-get_specials |
| 116 | + AuthorRole.ASSISTANT: The special soup is Clam Chowder. |
| 117 | + AuthorRole.ASSISTANT: |
| 118 | + Function Result:> |
| 119 | + Special Soup: Clam Chowder |
| 120 | + Special Salad: Cobb Salad |
| 121 | + Special Drink: Chai Tea |
| 122 | + for function: MenuPlugin-get_specials |
| 123 | + AuthorRole.ASSISTANT: The special drink is Chai Tea. |
| 124 | + AuthorRole.ASSISTANT: Could you please specify the menu item you are asking about? |
| 125 | + AuthorRole.ASSISTANT: You're welcome! If you have any questions about the menu or need assistance, feel free to ask. |
| 126 | + """ |
| 127 | + |
| 128 | + |
| 129 | +if __name__ == "__main__": |
| 130 | + asyncio.run(main()) |
0 commit comments