-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtool-calling.ts
More file actions
280 lines (244 loc) · 7.97 KB
/
tool-calling.ts
File metadata and controls
280 lines (244 loc) · 7.97 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
/**
* Example script demonstrating HelpingAI tool calling functionality.
*
* This script shows how to create and use tools with the HelpingAI SDK.
* Port of the Python tooluse.py example.
*/
import { HelpingAI, tools, getTools } from '../src';
/**
* Generate a response using HelpingAI with tool calling support.
*
* @param userPrompt - The user's input prompt
* @param prints - Whether to print debug information
* @returns The AI's response content
*/
async function generate(userPrompt: string, prints: boolean = true): Promise<string> {
// Create a client instance
const client = new HelpingAI({
apiKey: 'your-api-key', // Replace with your actual API key
});
// Define tools configuration - equivalent to Python version
const toolsConfig = [
{
mcpServers: {
time: {
command: 'uvx',
args: ['mcp-server-time'],
},
fetch: {
command: 'uvx',
args: ['mcp-server-fetch'],
},
// Commented out like in Python version
// 'ddg-search': {
// command: 'npx',
// args: ['-y', '@oevortex/ddg_search@latest']
// }
},
},
'code_interpreter',
// 'web_search' // Commented out like in Python version
];
// Initialize messages
const messages: Array<{
role: 'user' | 'assistant' | 'system' | 'tool';
content: string;
tool_calls?: any[];
tool_call_id?: string;
name?: string;
}> = [{ role: 'user', content: userPrompt }];
try {
// Create the chat completion with tools
const response = await client.chat.completions.create({
model: 'Dhanishtha-2.0-preview',
messages,
tools: toolsConfig,
tool_choice: 'auto',
stream: false,
hide_think: true,
});
// Handle non-streaming response
if ('choices' in response) {
const responseMessage = response.choices[0].message;
// Handle tool calls
const toolCalls = responseMessage.tool_calls;
if (toolCalls && toolCalls.length > 0) {
if (prints) {
console.log(`Tool calls detected: ${toolCalls.length}`);
}
// Add the assistant's message with tool calls to conversation
messages.push(responseMessage);
// Process each tool call
for (const toolCall of toolCalls) {
const functionName = toolCall.function.name;
const functionArgs = JSON.parse(toolCall.function.arguments);
if (prints) {
console.log(`Calling tool: ${functionName}`);
console.log(`Arguments:`, functionArgs);
}
try {
// Use HelpingAI's built-in tool calling mechanism
const functionResponse = await client.call(functionName, functionArgs);
if (prints) {
console.log(`Tool response:`, functionResponse);
}
// Add tool response to messages
messages.push({
tool_call_id: toolCall.id,
role: 'tool',
name: functionName,
content: String(functionResponse),
});
} catch (error) {
const errorMessage = `Error executing tool ${functionName}: ${error instanceof Error ? error.message : String(error)}`;
if (prints) {
console.error(errorMessage);
}
// Add error response to messages
messages.push({
tool_call_id: toolCall.id,
role: 'tool',
name: functionName,
content: errorMessage,
});
}
}
// Get final response from HelpingAI after tool execution
const secondResponse = await client.chat.completions.create({
model: 'Dhanishtha-2.0-preview',
messages,
tools: toolsConfig,
stream: false,
hide_think: true,
});
if ('choices' in secondResponse) {
return secondResponse.choices[0].message.content || '';
}
}
return responseMessage.content || '';
}
return 'No response received';
} catch (error) {
const errorMessage = `Error in generate function: ${error instanceof Error ? error.message : String(error)}`;
if (prints) {
console.error(errorMessage);
}
throw new Error(errorMessage);
} finally {
// Cleanup resources
await client.cleanup();
}
}
/**
* Example of using the @tools decorator for custom tools
*/
/**
* Calculate tip and total amount for a bill.
*
* @param billAmount - The original bill amount
* @param tipPercentage - Tip percentage (default: 15.0)
*/
const calculateTip = tools(function calculateTip(
billAmount: number,
tipPercentage: number = 15.0
): { tip: number; total: number; original: number } {
const tip = billAmount * (tipPercentage / 100);
const total = billAmount + tip;
return { tip, total, original: billAmount };
});
/**
* Get current weather information for a city.
*
* @param city - The city name to get weather for
* @param units - Temperature units (celsius or fahrenheit)
*/
const getWeather = tools(function getWeather(city: string, units: string = 'celsius'): string {
// Mock implementation - in real usage, you'd call a weather API
const temp = units === 'fahrenheit' ? '72°F' : '22°C';
return `Weather in ${city}: ${temp}, partly cloudy`;
});
/**
* Example using custom tools with @tools decorator
*/
async function exampleWithCustomTools(): Promise<void> {
console.log('\n=== Example with Custom Tools ===');
const client = new HelpingAI({
apiKey: 'your-api-key',
});
try {
// Get tools from registry (includes our @tools decorated functions)
const customTools = getTools();
const response = await client.chat.completions.create({
model: 'Dhanishtha-2.0-preview',
messages: [
{
role: 'user',
content: "What's the weather in Paris and calculate tip for a $50 bill?",
},
],
tools: customTools,
tool_choice: 'auto',
});
if ('choices' in response) {
console.log('Response:', response.choices[0].message.content);
}
} catch (error) {
console.error('Error with custom tools:', error);
} finally {
await client.cleanup();
}
}
/**
* Example of direct tool execution
*/
async function exampleDirectToolExecution(): Promise<void> {
console.log('\n=== Example: Direct Tool Execution ===');
const client = new HelpingAI({
apiKey: 'your-api-key',
});
try {
// Direct tool execution without going through chat completion
const tipResult = await client.call('calculateTip', {
billAmount: 50,
tipPercentage: 20,
});
console.log('Tip calculation result:', tipResult);
const weatherResult = await client.call('getWeather', {
city: 'London',
units: 'celsius',
});
console.log('Weather result:', weatherResult);
} catch (error) {
console.error('Error with direct tool execution:', error);
} finally {
await client.cleanup();
}
}
// Main execution
async function main(): Promise<void> {
console.log('=== HelpingAI Tool Calling Examples ===\n');
// Example usage from Python version
const userQuery =
'https://huggingface.co/CharacterEcho/Rohit-Sharma tell me about downloads of this model';
try {
console.log('1. Basic Tool Calling Example:');
const response = await generate(userQuery, true);
console.log('\nFinal Response:');
console.log('-'.repeat(50));
console.log(response);
// Additional examples
await exampleWithCustomTools();
await exampleDirectToolExecution();
} catch (error) {
console.error('Error in main:', error);
}
}
// Run the example if this file is executed directly
// Note: This check works in Node.js environments
declare const require: any;
declare const module: any;
if (typeof require !== 'undefined' && typeof module !== 'undefined' && require.main === module) {
main().catch(console.error);
}
// Export functions for use in other modules
export { generate, calculateTip, getWeather, exampleWithCustomTools, exampleDirectToolExecution };