Skip to content

Commit 45d2e01

Browse files
authored
types: add tool_name to message (#229)
1 parent 603457c commit 45d2e01

File tree

3 files changed

+144
-0
lines changed

3 files changed

+144
-0
lines changed

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,7 @@ ollama.chat(request)
6363
- `role` `<string>`: The role of the message sender ('user', 'system', or 'assistant').
6464
- `content` `<string>`: The content of the message.
6565
- `images` `<Uint8Array[] | string[]>`: (Optional) Images to be included in the message, either as Uint8Array or base64 encoded strings.
66+
- `tool_name` `<string>`: (Optional) Add the name of the tool that was executed to inform the model of the result
6667
- `format` `<string>`: (Optional) Set the expected format of the response (`json`).
6768
- `stream` `<boolean>`: (Optional) When true an `AsyncGenerator` is returned.
6869
- `think` `<boolean>`: (Optional) When true, the model will think about the response before responding. Requires thinking support from the model.

examples/tools/multi-tool.ts

Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
import ollama from 'ollama';
2+
3+
// Mock weather functions
4+
function getTemperature(args: { city: string }): string {
5+
const validCities = ['London', 'Paris', 'New York', 'Tokyo', 'Sydney'];
6+
7+
if (!validCities.includes(args.city)) {
8+
return 'Unknown city';
9+
}
10+
11+
return `${Math.floor(Math.random() * 36)} degrees Celsius`;
12+
}
13+
14+
function getConditions(args: { city: string }): string {
15+
const validCities = ['London', 'Paris', 'New York', 'Tokyo', 'Sydney'];
16+
17+
if (!validCities.includes(args.city)) {
18+
return 'Unknown city';
19+
}
20+
21+
const conditions = ['sunny', 'cloudy', 'rainy', 'snowy'];
22+
return conditions[Math.floor(Math.random() * conditions.length)];
23+
}
24+
25+
// Tool definitions
26+
const getTemperatureTool = {
27+
type: 'function',
28+
function: {
29+
name: 'getTemperature',
30+
description: 'Get the temperature for a city in Celsius',
31+
parameters: {
32+
type: 'object',
33+
required: ['city'],
34+
properties: {
35+
city: { type: 'string', description: 'The name of the city' }
36+
}
37+
}
38+
}
39+
};
40+
41+
const getConditionsTool = {
42+
type: 'function',
43+
function: {
44+
name: 'getConditions',
45+
description: 'Get the weather conditions for a city',
46+
parameters: {
47+
type: 'object',
48+
required: ['city'],
49+
properties: {
50+
city: { type: 'string', description: 'The name of the city' }
51+
}
52+
}
53+
}
54+
};
55+
56+
async function run(model: string) {
57+
const cities = ['London', 'Paris', 'New York', 'Tokyo', 'Sydney'];
58+
const city = cities[Math.floor(Math.random() * cities.length)];
59+
const city2 = cities[Math.floor(Math.random() * cities.length)];
60+
61+
const messages = [{
62+
role: 'user',
63+
content: `What is the temperature in ${city}? and what are the weather conditions in ${city2}?`
64+
}];
65+
console.log('----- Prompt:', messages[0].content, '\n');
66+
67+
const ollama = new Ollama();
68+
const availableFunctions = {
69+
getTemperature,
70+
getConditions
71+
};
72+
73+
const response = await ollama.chat({
74+
model: model,
75+
messages: messages,
76+
tools: [getTemperatureTool, getConditionsTool],
77+
stream: true,
78+
think: true
79+
});
80+
81+
for await (const chunk of response) {
82+
if (chunk.message.thinking) {
83+
process.stdout.write(chunk.message.thinking);
84+
}
85+
if (chunk.message.content) {
86+
process.stdout.write(chunk.message.content);
87+
}
88+
if (chunk.message.tool_calls) {
89+
for (const tool of chunk.message.tool_calls) {
90+
const functionToCall = availableFunctions[tool.function.name];
91+
if (functionToCall) {
92+
console.log('\nCalling function:', tool.function.name, 'with arguments:', tool.function.arguments);
93+
const output = functionToCall(tool.function.arguments);
94+
console.log('> Function output:', output, '\n');
95+
96+
messages.push(chunk.message);
97+
messages.push({
98+
role: 'tool',
99+
content: output.toString(),
100+
tool_name: tool.function.name,
101+
});
102+
} else {
103+
console.log('Function', tool.function.name, 'not found');
104+
}
105+
}
106+
}
107+
}
108+
109+
console.log('----- Sending result back to model \n');
110+
111+
if (messages.some(msg => msg.role === 'tool')) {
112+
const finalResponse = await ollama.chat({
113+
model: model,
114+
messages: messages,
115+
tools: [getTemperatureTool, getConditionsTool],
116+
stream: true,
117+
think: true
118+
});
119+
120+
let doneThinking = false;
121+
for await (const chunk of finalResponse) {
122+
if (chunk.message.thinking) {
123+
process.stdout.write(chunk.message.thinking);
124+
}
125+
if (chunk.message.content) {
126+
if (!doneThinking) {
127+
console.log('\n----- Final result:');
128+
doneThinking = true;
129+
}
130+
process.stdout.write(chunk.message.content);
131+
}
132+
if (chunk.message.tool_calls) {
133+
console.log('Model returned tool calls:');
134+
console.log(chunk.message.tool_calls);
135+
}
136+
}
137+
} else {
138+
console.log('No tool calls returned');
139+
}
140+
}
141+
142+
run('qwen3').catch(console.error);

src/interfaces.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ export interface Message {
6767
thinking?: string
6868
images?: Uint8Array[] | string[]
6969
tool_calls?: ToolCall[]
70+
tool_name?: string
7071
}
7172

7273
export interface ToolCall {

0 commit comments

Comments
 (0)