import json
from writerai import Writer
# Initialize the Writer client. If you don't pass the `apiKey` parameter,
# the client looks for the `WRITER_API_KEY` environment variable.
client = Writer()
def calculate_mean(numbers: list) -> float:
return sum(numbers) / len(numbers)
tools = [
{
"type": "function",
"function": {
"name": "calculate_mean",
"description": "Calculate the mean (average) of a list of numbers.",
"parameters": {
"type": "object",
"properties": {
"numbers": {
"type": "array",
"items": {"type": "number"},
"description": "List of numbers"
}
},
"required": ["numbers"]
}
}
}
]
messages = [{"role": "user", "content": "what is the mean of [1,3,5,7,9]?"}]
# Step 1: Initial request with tools
response = client.chat.chat(
model="palmyra-x5",
messages=messages,
tools=tools,
tool_choice="auto",
stream=True
)
# Step 2: Process streaming response to collect tool calls
streaming_content = ""
function_calls = []
for chunk in response:
choice = chunk.choices[0]
if choice.delta:
# Collect tool calls as they stream in
if choice.delta.tool_calls:
for tool_call in choice.delta.tool_calls:
if tool_call.id:
# Start a new function call
function_calls.append({
"name": "",
"arguments": "",
"call_id": tool_call.id
})
if tool_call.function:
# Append to the most recent function call
if function_calls:
function_calls[-1]["name"] += tool_call.function.name or ""
function_calls[-1]["arguments"] += tool_call.function.arguments or ""
# Collect regular content (for cases where no tools are called)
elif choice.delta.content:
streaming_content += choice.delta.content
# Check if streaming is complete
if choice.finish_reason:
if choice.finish_reason == "stop":
# No tools were called, just regular response
print(f"Response: {streaming_content}")
messages.append({"role": "assistant", "content": streaming_content})
break
elif choice.finish_reason == "tool_calls":
# Step 3: Reconstruct and append assistant message with tool calls
tool_calls_for_message = []
for func_call in function_calls:
tool_calls_for_message.append({
"id": func_call["call_id"],
"type": "function",
"function": {
"name": func_call["name"],
"arguments": func_call["arguments"]
}
})
assistant_message = {
"role": "assistant",
"content": None,
"tool_calls": tool_calls_for_message
}
messages.append(assistant_message) # Append assistant response
# Step 4: Execute each function and add results to messages
for function_call in function_calls:
function_name = function_call["name"]
if function_name == "calculate_mean":
try:
arguments_dict = json.loads(function_call["arguments"])
function_response = calculate_mean(arguments_dict["numbers"])
# Add tool response to messages
messages.append({
"role": "tool",
"content": str(function_response),
"tool_call_id": function_call["call_id"],
"name": function_name,
})
except Exception as e:
# Handle errors gracefully
messages.append({
"role": "tool",
"content": f"Error: {str(e)}",
"tool_call_id": function_call["call_id"],
"name": function_name,
})
# Step 5: Get the final response from the model
final_response = client.chat.chat(
model="palmyra-x5",
messages=messages,
stream=True
)
final_content = ""
for chunk in final_response:
choice = chunk.choices[0]
if choice.delta and choice.delta.content:
final_content += choice.delta.content
print(choice.delta.content, end="", flush=True)
# Step 6: Add final response to message history
messages.append({
"role": "assistant",
"content": final_content
})
break