Chat Client API Reference
Chat client classes for interacting with language models.
SupportsChatGetResponse
Protocol defining the interface that all chat clients must implement.
from agent_framework import SupportsChatGetResponse
Protocol Properties
Dictionary for storing additional client-specific properties.
get_response()
Send input and return the response.
response = await client.get_response(
messages=[Message(role="user", text="Hello")],
stream=False,
options={"temperature": 0.7}
)
messages
Sequence[Message]
required
The sequence of input messages to send.
Whether to stream the response.
Chat options as a TypedDict including model_id, temperature, max_tokens, etc.
return
Awaitable[ChatResponse] | ResponseStream[ChatResponseUpdate, ChatResponse]
When stream=False: An awaitable ChatResponse from the client.When stream=True: A ResponseStream yielding partial updates.
Example: Custom Chat Client
from agent_framework import SupportsChatGetResponse, ChatResponse, Message
from agent_framework import ChatResponseUpdate, ResponseStream
class CustomChatClient:
additional_properties: dict = {}
def get_response(self, messages, *, stream=False, options=None, **kwargs):
if stream:
async def _stream():
yield ChatResponseUpdate(
role="assistant",
contents=[{"type": "text", "text": "Hello!"}]
)
return ResponseStream(_stream())
else:
async def _response():
return ChatResponse(
messages=[Message(role="assistant", text="Hello!")],
response_id="custom"
)
return _response()
# Verify protocol compatibility
client = CustomChatClient()
assert isinstance(client, SupportsChatGetResponse)
BaseChatClient
Abstract base class for chat clients without middleware wrapping.
from agent_framework import BaseChatClient, ChatResponse, Message
Constructor
class CustomChatClient(BaseChatClient):
async def _inner_get_response(self, *, messages, stream, options, **kwargs):
# Implementation required
if stream:
# Return AsyncIterable[ChatResponseUpdate] or ResponseStream
pass
else:
# Return ChatResponse
pass
client = CustomChatClient(
additional_properties={"api_key": "sk-..."}
)
Additional properties for the client.
Additional keyword arguments merged into additional_properties.
Class Variables
Provider name for OpenTelemetry tracing. Override in subclasses.
Whether this client stores conversation history server-side by default.Clients that use server-side storage (e.g., OpenAI Responses API) should override to True.
Abstract Methods
_inner_get_response()
Internal method to be implemented by derived classes.
async def _inner_get_response(
self,
*,
messages: Sequence[Message],
stream: bool,
options: Mapping[str, Any],
**kwargs: Any
) -> Awaitable[ChatResponse] | ResponseStream[ChatResponseUpdate, ChatResponse]:
"""Send a chat request to the AI service."""
# Validate options first
validated_options = await self._validate_options(options)
if stream:
# Return streaming response
async def _stream():
yield ChatResponseUpdate(...)
return self._build_response_stream(_stream())
else:
# Return non-streaming response
return ChatResponse(...)
messages
Sequence[Message]
required
The prepared chat messages to send.
Whether to stream the response.
options
Mapping[str, Any]
required
The options dict for the request. Call _validate_options() first.
Additional keyword arguments.
return
Awaitable[ChatResponse] | ResponseStream[ChatResponseUpdate, ChatResponse]
When stream=False: An Awaitable ChatResponse from the model.When stream=True: A ResponseStream of ChatResponseUpdate instances.
Helper Methods
_validate_options()
Validate and normalize chat options.
validated = await self._validate_options(options)
options
Mapping[str, Any]
required
The raw options dict.
The validated and normalized options dict.
_build_response_stream()
Create a ResponseStream with the standard finalizer.
stream = self._build_response_stream(
_async_generator(),
response_format=MyModel
)
stream
AsyncIterable[ChatResponseUpdate] | Awaitable[AsyncIterable[ChatResponseUpdate]]
required
The async iterable or awaitable async iterable.
Optional response format for structured output.
return
ResponseStream[ChatResponseUpdate, ChatResponse]
A ResponseStream with automatic finalization.
Public Methods
get_response()
Get a response from the chat client.
response = await client.get_response(
messages=[Message(role="user", text="Hello")],
stream=False,
options={"temperature": 0.7, "max_tokens": 500}
)
messages
Sequence[Message]
required
The messages to send to the model.
Whether to stream the response.
Chat options as a TypedDict.
Additional keyword arguments for function-specific parameters.
return
Awaitable[ChatResponse] | ResponseStream[ChatResponseUpdate, ChatResponse]
When streaming: a response stream of ChatResponseUpdates.Otherwise: an Awaitable ChatResponse.
as_agent()
Create an Agent with this client.
agent = client.as_agent(
name="assistant",
instructions="You are a helpful assistant.",
tools=[get_weather],
default_options={"temperature": 0.7}
)
The unique identifier for the agent.
A brief description of the agent’s purpose.
Instructions sent to the chat client as a system message.
tools
ToolTypes | Callable | Sequence[ToolTypes | Callable] | None
The tools to use for the agent.
Default chat options. Enables IDE autocomplete for provider-specific options.
context_providers
Sequence[BaseContextProvider] | None
Context providers to include during agent invocation.
middleware
Sequence[MiddlewareTypes] | None
List of middleware to intercept agent and function invocations.
Additional keyword arguments stored as additional_properties.
An Agent instance configured with this chat client.
Example: Custom Chat Client Implementation
from agent_framework import BaseChatClient, ChatResponse, Message
from agent_framework import ChatResponseUpdate
from collections.abc import AsyncIterable
class CustomChatClient(BaseChatClient):
def __init__(self, api_key: str, **kwargs):
super().__init__(
additional_properties={"api_key": api_key},
**kwargs
)
async def _inner_get_response(
self,
*,
messages,
stream,
options,
**kwargs
):
# Validate options
validated = await self._validate_options(options)
if stream:
# Streaming implementation
async def _stream():
yield ChatResponseUpdate(
role="assistant",
contents=[{"type": "text", "text": "Hello!"}]
)
return self._build_response_stream(_stream())
else:
# Non-streaming implementation
return ChatResponse(
messages=[Message(role="assistant", text="Hello!")],
response_id="custom-response"
)
# Create and use the client
client = CustomChatClient(api_key="sk-...")
response = await client.get_response(
[Message(role="user", text="Hello, how are you?")]
)
# Or create an agent
agent = client.as_agent(
name="assistant",
instructions="You are helpful."
)
Protocols for runtime checking of tool support capabilities.
from agent_framework import SupportsCodeInterpreterTool
if isinstance(client, SupportsCodeInterpreterTool):
tool = client.get_code_interpreter_tool()
agent = Agent(client, tools=[tool])
from agent_framework import SupportsWebSearchTool
if isinstance(client, SupportsWebSearchTool):
tool = client.get_web_search_tool()
agent = Agent(client, tools=[tool])
from agent_framework import SupportsImageGenerationTool
if isinstance(client, SupportsImageGenerationTool):
tool = client.get_image_generation_tool()
agent = Agent(client, tools=[tool])
from agent_framework import SupportsMCPTool
if isinstance(client, SupportsMCPTool):
tool = client.get_mcp_tool(name="my_mcp", url="https://...")
agent = Agent(client, tools=[tool])
from agent_framework import SupportsFileSearchTool
if isinstance(client, SupportsFileSearchTool):
tool = client.get_file_search_tool(vector_store_ids=["vs_123"])
agent = Agent(client, tools=[tool])
Embedding Clients
SupportsGetEmbeddings
Protocol for embedding generation.
from agent_framework import SupportsGetEmbeddings
async def use_embeddings(client: SupportsGetEmbeddings) -> None:
result = await client.get_embeddings(["Hello, world!"])
for embedding in result:
print(embedding.vector)
BaseEmbeddingClient
Abstract base class for embedding clients.
from agent_framework import BaseEmbeddingClient, Embedding, GeneratedEmbeddings
class CustomEmbeddingClient(BaseEmbeddingClient):
async def get_embeddings(self, values, *, options=None):
return GeneratedEmbeddings(
[Embedding(vector=[0.1, 0.2, 0.3]) for _ in values]
)