# pip install friendli
import json
import os
from friendli import SyncFriendli
token = os.environ.get("FRIENDLI_TOKEN") or "YOUR_FRIENDLI_TOKEN"
client = SyncFriendli(token=token)
user_prompt = "I live in New York. What should I wear for today's weather?"
messages = [
{
"role": "user",
"content": user_prompt,
},
]
tools=[
{
"type": "function",
"function": {
"name": "get_temperature",
"description": "Get the temperature information in a given location.",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The name of current location e.g., New York",
},
},
},
},
},
]
chat = client.serverless.chat.complete(
model="meta-llama-3.3-70b-instruct",
messages=messages,
tools=tools,
temperature=0,
frequency_penalty=1,
)
def get_temperature(location: str) -> int:
"""Mock function that returns the city temperature"""
if "new york" in location.lower():
return 45
if "san francisco" in location.lower():
return 72
return 30
func_kwargs = json.loads(chat.choices[0].message.tool_calls[0].function.arguments)
temperature_info = get_temperature(**func_kwargs)
messages.append(
{
"role": "assistant",
"tool_calls": [
tool_call.model_dump()
for tool_call in chat.choices[0].message.tool_calls
]
}
)
messages.append(
{
"role": "tool",
"content": str(temperature_info),
"tool_call_id": chat.choices[0].message.tool_calls[0].id
}
)
chat_w_info = client.serverless.chat.complete(
model="meta-llama-3.3-70b-instruct",
tools=tools,
messages=messages,
)
for choice in chat_w_info.choices:
print(choice.message.content)