Wrapping the Client
Copy
import { Muxx } from 'muxx';
import OpenAI from 'openai';
const muxx = new Muxx();
const client = muxx.wrap(new OpenAI());
const response = await client.chat.completions.create({
model: 'gpt-4o',
messages: [{ role: 'user', content: 'Hello!' }],
});
Supported Methods
| Method | Traced |
|---|---|
chat.completions.create | Yes |
completions.create | Yes |
embeddings.create | Yes |
images.generate | Yes |
Streaming
Copy
const stream = await client.chat.completions.create({
model: 'gpt-4o',
messages: [{ role: 'user', content: 'Write a story' }],
stream: true,
});
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content;
if (content) {
process.stdout.write(content);
}
}
Function Calling
Copy
const tools: OpenAI.ChatCompletionTool[] = [
{
type: 'function',
function: {
name: 'get_weather',
description: 'Get weather for a location',
parameters: {
type: 'object',
properties: {
location: { type: 'string' },
},
required: ['location'],
},
},
},
];
const response = await client.chat.completions.create({
model: 'gpt-4o',
messages: [{ role: 'user', content: "What's the weather in Paris?" }],
tools,
});
// Tool calls are logged
With Traces
Copy
async function handleSupport(message: string): Promise<string> {
return muxx.trace('customer-support', async () => {
const intent = await muxx.span('classify-intent', async () => {
return client.chat.completions.create({
model: 'gpt-4o-mini',
messages: [
{ role: 'system', content: 'Classify: billing, technical, general' },
{ role: 'user', content: message },
],
});
});
const response = await muxx.span('generate-response', async () => {
return client.chat.completions.create({
model: 'gpt-4o',
messages: [{ role: 'user', content: message }],
});
});
return response.choices[0].message.content ?? '';
});
}
Embeddings
Copy
const embeddings = await client.embeddings.create({
model: 'text-embedding-3-small',
input: ['Hello world', 'Goodbye world'],
});
// Token usage and latency are logged