Creating a simple text agent with a function call
In Yandex Cloud AI Studio, you can create a text agent with a function call that can communicate with the user in text format and maintain a dialogue closely resembling natural human interaction.
To use the example, you will need a service account with the ai.assistants.editor and ai.languageModels.user roles and an API key with the yc.ai.foundationModels.execute scope. The API key you can create in AI Studio has these permissions. Refer to the Getting started section for an example of how to configure your runtime environment.
Build a chat
Python
Node.js
Go
import openai
from openai import OpenAI
import json
YANDEX_MODEL = "yandexgpt"
YANDEX_API_KEY = "<API_key>"
YANDEX_FOLDER_ID = "<folder_ID>"
client = openai.OpenAI(
api_key=YANDEX_API_KEY,
base_url="https://llm.api.cloud.yandex.net/v1",
project=YANDEX_FOLDER_ID
)
# 1. Listing the functions the model can call
tools = [
{
"type": "function",
"name": "get_weather",
"description": "Getting current weather for the city you specify.",
"parameters": {
"type": "object",
"properties": {
"city": {
"type": "string",
"description": "City, e.g., Petersburg or Moscow",
},
},
"required": ["city"],
},
},
]
# The simplest implementation of the function (can be replaced with a call to a real API)
def get_weather(city):
# Here you can implement an integration, e.g., with Yandex Weather
return {
"city": city,
"temperature": "12 °C",
"condition": "Cloudy, light breeze"
}
# Forming a list of messages to be updated
input_list = [
{"role": "user", "content": "What is the weather like in Krasnoyarsk?"}
]
# 2. Requesting a model with specific features
response = client.responses.create(
model=f"gpt://{YANDEX_FOLDER_ID}/{YANDEX_MODEL}",
tools=tools,
input=input_list,
)
# Adding the model's input to history
input_list += response.output
for item in response.output:
if item.type == "function_call":
if item.name == "get_weather":
# 3. Executing the get_weather function
weather_info = get_weather(**json.loads(item.arguments))
# 4. Providing the result of the function back to the model
input_list.append({
"type": "function_call_output",
"call_id": item.call_id,
"output": json.dumps(weather_info)
})
print("Final input:")
for item in input_list:
if isinstance(item, dict) and item.get("type") == "function_call_output":
parsed = json.loads(item["output"])
print("function_call_output:", parsed)
else:
print(item)
response = client.responses.create(
model=f"gpt://{YANDEX_FOLDER_ID}/{YANDEX_MODEL}",
instructions="Respond only with the weather data returned by the function.",
tools=tools,
input=input_list,
)
# 5. Final response
print("Final output:")
print(response.model_dump_json(indent=2))
print("\n" + response.output_text)
Where:
YANDEX_API_KEY: API key for access to AI Studio.YANDEX_FOLDER_ID: Service account folder ID.YANDEX_MODEL: Model name to handle the request.instructions: System prompt shaping the model's behavior when generating the final response.
Response example:
Final input:
{'role': 'user', 'content': 'What is the weather like in Krasnoyarsk?'}
ResponseFunctionToolCall(arguments='{"city":"Krasnoyarsk"}', call_id='get_weather', name='get_weather', type='function_call', id='get_weather', status='completed', valid=True)
function_call_output: {'city': 'Krasnoyarsk', 'temperature': '12 °C', 'condition': 'Cloudy, light breeze'}
Final output:
{
"id": "70d96fac-1c4b-4f4a-9f80-56df********",
"created_at": 1758556157206.0,
"error": null,
"incomplete_details": null,
"instructions": "Respond only with the weather data returned by the function.",
"metadata": null,
"model": "gpt://b1gstllj8rgs********/yandexgpt",
"object": "response",
"output": [
{
"id": "f15c66e8-99a2-4647-a820-406e********",
"content": [
{
"annotations": [],
"text": "It is currently plus 12°C in Krasnoyarsk, cloudy, light breeze.",
"type": "output_text",
"logprobs": null,
"valid": true
}
],
"role": "assistant",
"status": "completed",
"type": "message",
"valid": true
}
],
"parallel_tool_calls": true,
"temperature": null,
"tool_choice": "auto",
"tools": [
{
"name": "get_weather",
"parameters": {
"type": "object",
"properties": {
"city": {
"type": "string",
"description": "City, e.g., Petersburg or Moscow"
}
},
"required": [
"city"
]
},
"strict": null,
"type": "function",
"description": "Getting current weather for the city you specify.",
"valid": true
}
],
"top_p": null,
"background": false,
"conversation": null,
"max_output_tokens": null,
"max_tool_calls": null,
"previous_response_id": null,
"prompt": null,
"prompt_cache_key": null,
"reasoning": null,
"safety_identifier": null,
"service_tier": null,
"status": "completed",
"text": null,
"top_logprobs": null,
"truncation": null,
"usage": null,
"user": "",
"valid": true
}
It is currently plus 12°C in Krasnoyarsk, cloudy, light breeze.
const OpenAI = require("openai");
const YANDEX_MODEL = "yandexgpt";
const YANDEX_API_KEY = "<API_key>";
const YANDEX_FOLDER_ID = "<folder_ID>";
const client = new OpenAI({
apiKey: YANDEX_API_KEY,
baseURL: "https://llm.api.cloud.yandex.net/v1",
project: YANDEX_FOLDER_ID,
});
// 1. Listing the functions the model can call
const tools = [
{
type: "function",
name: "get_weather",
description: "Getting current weather for the city you specify.",
parameters: {
type: "object",
properties: {
city: {
type: "string",
description: "City, e.g., Petersburg or Moscow",
},
},
required: ["city"],
},
},
];
// The simplest implementation of the function (can be replaced with a call to a real API)
function getWeather(city) {
// Here you can implement an integration, e.g., with Yandex Weather
return {
city: city,
temperature: "12 °C",
condition: "Cloudy, light breeze",
};
}
async function main() {
// Forming a list of messages to be updated
let inputList = [
{ role: "user", content: "What is the weather like in Krasnoyarsk?" },
];
// 2. Requesting a model with specific features
const response = await client.responses.create({
model: `gpt://${YANDEX_FOLDER_ID}/${YANDEX_MODEL}`,
tools: tools,
input: inputList,
});
// Adding the model's output to history
inputList = inputList.concat(response.output);
for (const item of response.output) {
if (item.type === "function_call") {
if (item.name === "get_weather") {
// 3. Executing the `getWeather` function
const args = JSON.parse(item.arguments);
const weatherInfo = getWeather(args.city);
// 4. Providing the result of the function back to the model
inputList.push({
type: "function_call_output",
call_id: item.call_id,
output: JSON.stringify(weatherInfo),
});
}
}
}
console.log("Final input:");
for (const item of inputList) {
if (
typeof item === "object" &&
item !== null &&
item.type === "function_call_output"
) {
const parsed = JSON.parse(item.output);
console.log("function_call_output:", parsed);
} else {
console.log(item);
}
}
const finalResponse = await client.responses.create({
model: `gpt://${YANDEX_FOLDER_ID}/${YANDEX_MODEL}`,
instructions: "Respond only with the weather data returned by the function.",
tools: tools,
input: inputList,
});
// 5. Final response
console.log("Final output:");
console.log(JSON.stringify(finalResponse, null, 2));
console.log("\n" + finalResponse.output_text);
}
main().catch(console.error);
Where:
YANDEX_API_KEY: API key for access to AI Studio.YANDEX_FOLDER_ID: Service account folder ID.YANDEX_MODEL: Model name to handle the request.instructions: System prompt shaping the model's behavior when generating the final response.
Response example:
Final input:
{'role': 'user', 'content': 'What is the weather like in Krasnoyarsk?'}
ResponseFunctionToolCall(arguments='{"city":"Krasnoyarsk"}', call_id='get_weather', name='get_weather', type='function_call', id='get_weather', status='completed', valid=True)
function_call_output: {'city': 'Krasnoyarsk', 'temperature': '12 °C', 'condition': 'Cloudy, light breeze'}
Final output:
{
"id": "70d96fac-1c4b-4f4a-9f80-56df********",
"created_at": 1758556157206.0,
"error": null,
"incomplete_details": null,
"instructions": "Respond only with the weather data returned by the function.",
"metadata": null,
"model": "gpt://b1gstllj8rgs********/yandexgpt",
"object": "response",
"output": [
{
"id": "f15c66e8-99a2-4647-a820-406e********",
"content": [
{
"annotations": [],
"text": "It is currently plus 12°C in Krasnoyarsk, cloudy, light breeze.",
"type": "output_text",
"logprobs": null,
"valid": true
}
],
"role": "assistant",
"status": "completed",
"type": "message",
"valid": true
}
],
"parallel_tool_calls": true,
"temperature": null,
"tool_choice": "auto",
"tools": [
{
"name": "get_weather",
"parameters": {
"type": "object",
"properties": {
"city": {
"type": "string",
"description": "City, e.g., Petersburg or Moscow"
}
},
"required": [
"city"
]
},
"strict": null,
"type": "function",
"description": "Getting current weather for the city you specify.",
"valid": true
}
],
"top_p": null,
"background": false,
"conversation": null,
"max_output_tokens": null,
"max_tool_calls": null,
"previous_response_id": null,
"prompt": null,
"prompt_cache_key": null,
"reasoning": null,
"safety_identifier": null,
"service_tier": null,
"status": "completed",
"text": null,
"top_logprobs": null,
"truncation": null,
"usage": null,
"user": "",
"valid": true
}
It is currently plus 12°C in Krasnoyarsk, cloudy, light breeze.
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/openai/openai-go/responses"
)
const (
YANDEX_MODEL = "yandexgpt"
YANDEX_API_KEY = "<API_key>"
YANDEX_FOLDER_ID = "<folder_ID>"
)
// 1. Identifying a tool
var tools = []responses.ToolUnionParam{
{
OfFunction: &responses.FunctionToolParam{
Name: "get_weather",
Description: openai.String("Getting current weather for the city you specify."),
Parameters: map[string]any{
"type": "object",
"properties": map[string]any{
"city": map[string]any{
"type": "string",
"description": "City, e.g., Petersburg or Moscow",
},
},
"required": []string{"city"},
},
Strict: openai.Bool(false),
},
},
}
// Simplest implementation of the function
func getWeather(city string) map[string]string {
return map[string]string{
"city": city,
"temperature": "12 °C",
"condition": "Cloudy, light breeze",
}
}
func main() {
client := openai.NewClient(
option.WithAPIKey(YANDEX_API_KEY),
option.WithBaseURL("https://llm.api.cloud.yandex.net/v1"),
option.WithProject(YANDEX_FOLDER_ID),
)
model := fmt.Sprintf("gpt://%s/%s", YANDEX_FOLDER_ID, YANDEX_MODEL)
// Forming a list of messages
inputList := []responses.ResponseInputItemUnionParam{
{
OfMessage: &responses.EasyInputMessageParam{
Role: responses.EasyInputMessageRoleUser,
Type: "message",
Content: responses.EasyInputMessageContentUnionParam{
OfString: openai.String("What is the weather like in Krasnoyarsk?"),
},
},
},
}
// 2. Requesting a model with tools
r1, err := client.Responses.New(context.Background(), responses.ResponseNewParams{
Model: model,
Tools: tools,
Input: responses.ResponseNewParamsInputUnion{
OfInputItemList: inputList,
},
})
if err != nil {
log.Fatalf("Error r1: %v", err)
}
// Adding the model's output to history
for _, item := range r1.Output {
switch item.Type {
case "function_call":
fc := item.AsFunctionCall()
inputList = append(inputList, responses.ResponseInputItemUnionParam{
OfFunctionCall: &responses.ResponseFunctionToolCallParam{
ID: openai.String(fc.ID),
CallID: fc.CallID,
Name: fc.Name,
Arguments: fc.Arguments,
},
})
case "message":
msg := item.AsMessage()
// Converting `content`
var content []responses.ResponseOutputMessageContentUnionParam
for _, c := range msg.Content {
content = append(content, responses.ResponseOutputMessageContentUnionParam{
OfOutputText: &responses.ResponseOutputTextParam{
Text: c.AsOutputText().Text,
},
})
}
inputList = append(inputList, responses.ResponseInputItemUnionParam{
OfOutputMessage: &responses.ResponseOutputMessageParam{
ID: msg.ID,
Status: responses.ResponseOutputMessageStatus(msg.Status),
Content: content,
},
})
}
}
// Handling function calls
for _, item := range r1.Output {
if item.Type == "function_call" {
fc := item.AsFunctionCall()
if fc.Name == "get_weather" {
// 3. Parsing the arguments and running the function
var args struct {
City string `json:"city"`
}
if err := json.Unmarshal([]byte(fc.Arguments), &args); err != nil {
log.Fatalf("Error parsing arguments: %v", err)
}
weatherInfo := getWeather(args.City)
weatherJSON, _ := json.Marshal(weatherInfo)
// 4. Providing the result back through the SDK helper
inputList = append(inputList,
responses.ResponseInputItemParamOfFunctionCallOutput(fc.CallID, string(weatherJSON)),
)
}
}
}
// Printing the final input
fmt.Println("Final input:")
for _, item := range inputList {
itemJSON, _ := json.MarshalIndent(item, "", " ")
fmt.Println(string(itemJSON))
}
// 5. Final request
r2, err := client.Responses.New(context.Background(), responses.ResponseNewParams{
Model: model,
Instructions: openai.String("Respond only with the weather data returned by the function."),
Tools: tools,
Input: responses.ResponseNewParamsInputUnion{
OfInputItemList: inputList,
},
})
if err != nil {
log.Fatalf("Error r2: %v", err)
}
// Final response
fmt.Println("\nFinal output:")
finalJSON, _ := json.MarshalIndent(r2, "", " ")
fmt.Println(string(finalJSON))
fmt.Println("\n" + r2.OutputText())
}
Where:
YANDEX_API_KEY: API key for access to AI Studio.YANDEX_FOLDER_ID: Service account folder ID.YANDEX_MODEL: Model name to handle the request.instructions: System prompt shaping the model's behavior when generating the final response.
Response example:
Final input:
{'role': 'user', 'content': 'What is the weather like in Krasnoyarsk?'}
ResponseFunctionToolCall(arguments='{"city":"Krasnoyarsk"}', call_id='get_weather', name='get_weather', type='function_call', id='get_weather', status='completed', valid=True)
function_call_output: {'city': 'Krasnoyarsk', 'temperature': '12 °C', 'condition': 'Cloudy, light breeze'}
Final output:
{
"id": "70d96fac-1c4b-4f4a-9f80-56df********",
"created_at": 1758556157206.0,
"error": null,
"incomplete_details": null,
"instructions": "Respond only with the weather data returned by the function.",
"metadata": null,
"model": "gpt://b1gstllj8rgs********/yandexgpt",
"object": "response",
"output": [
{
"id": "f15c66e8-99a2-4647-a820-406e********",
"content": [
{
"annotations": [],
"text": "It is currently plus 12°C in Krasnoyarsk, cloudy, light breeze.",
"type": "output_text",
"logprobs": null,
"valid": true
}
],
"role": "assistant",
"status": "completed",
"type": "message",
"valid": true
}
],
"parallel_tool_calls": true,
"temperature": null,
"tool_choice": "auto",
"tools": [
{
"name": "get_weather",
"parameters": {
"type": "object",
"properties": {
"city": {
"type": "string",
"description": "City, e.g., Petersburg or Moscow"
}
},
"required": [
"city"
]
},
"strict": null,
"type": "function",
"description": "Getting current weather for the city you specify.",
"valid": true
}
],
"top_p": null,
"background": false,
"conversation": null,
"max_output_tokens": null,
"max_tool_calls": null,
"previous_response_id": null,
"prompt": null,
"prompt_cache_key": null,
"reasoning": null,
"safety_identifier": null,
"service_tier": null,
"status": "completed",
"text": null,
"top_logprobs": null,
"truncation": null,
"usage": null,
"user": "",
"valid": true
}
It is currently plus 12°C in Krasnoyarsk, cloudy, light breeze.