Files
2026-03-03 23:49:13 +01:00

202 lines
7.1 KiB
JavaScript

"use strict";
/**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.OpenAICompatible = void 0;
const fetchWithTimeout_1 = require("../fetchWithTimeout");
const types_1 = require("../types");
class OpenAICompatible {
name = 'openai-compatible';
async complete(conversation, options) {
return complete(conversation, options);
}
}
exports.OpenAICompatible = OpenAICompatible;
async function complete(conversation, options) {
// Convert generic messages to OpenAI format
const systemMessage = {
role: 'system',
content: systemPrompt(conversation.systemPrompt)
};
const openaiMessages = [systemMessage, ...conversation.messages.map(toCompletionsMessages).flat()];
const openaiTools = conversation.tools.map(t => toCompletionsTool(t));
const { response, error } = await create({
model: options.model,
max_completion_tokens: options.maxTokens,
temperature: options.temperature,
messages: openaiMessages,
tools: openaiTools,
tool_choice: conversation.tools.length > 0 ? 'auto' : undefined,
reasoning_effort: toCompletionsReasoning(options.reasoning),
parallel_tool_calls: false,
}, options);
if (error || !response)
return { result: (0, types_1.assistantMessageFromError)(error?.message ?? 'No response from OpenAI compatible API'), usage: (0, types_1.emptyUsage)() };
const result = { role: 'assistant', content: [], stopReason: { code: 'ok' } };
const finishReason = response.choices[0]?.finish_reason;
for (const choice of response.choices) {
const message = choice.message;
if (message.content)
result.content.push({ type: 'text', text: message.content });
for (const entry of message.tool_calls || []) {
if (entry.type !== 'function')
continue;
result.content.push(toToolCall(entry));
}
}
if (finishReason === 'length')
result.stopReason = { code: 'max_tokens' };
const usage = {
input: response.usage?.prompt_tokens ?? 0,
output: response.usage?.completion_tokens ?? 0,
};
return { result, usage };
}
async function create(createParams, options) {
const headers = {
'Content-Type': 'application/json',
'Authorization': `Bearer ${options.apiKey}`,
};
const debugBody = { ...createParams, tools: `${createParams.tools?.length ?? 0} tools` };
options.debug?.('lowire:openai')('Request:', JSON.stringify(debugBody, null, 2));
const response = await (0, fetchWithTimeout_1.fetchWithTimeout)(options.apiEndpoint ?? `https://api.openai.com/v1/chat/completions`, {
method: 'POST',
headers,
body: JSON.stringify(createParams),
signal: options.signal,
timeout: options.apiTimeout
});
const responseText = await response.text();
const responseBody = JSON.parse(responseText);
if (!response.ok) {
try {
return { error: responseBody };
}
catch {
return { error: { type: 'unknown', message: responseText } };
}
}
options.debug?.('lowire:openai')('Response:', JSON.stringify(responseBody, null, 2));
return { response: responseBody };
}
function toCopilotResultContentPart(part) {
if (part.type === 'text') {
return {
type: 'text',
text: part.text,
};
}
if (part.type === 'image') {
return {
type: 'image_url',
image_url: {
url: `data:${part.mimeType};base64,${part.data}`,
},
};
}
throw new Error(`Cannot convert content part of type ${part.type} to text content part`);
}
function toCompletionsMessages(message) {
if (message.role === 'user') {
return [{
role: 'user',
content: message.content
}];
}
if (message.role === 'assistant') {
const assistantMessage = {
role: 'assistant'
};
const textParts = message.content.filter(part => part.type === 'text');
const toolCallParts = message.content.filter(part => part.type === 'tool_call');
if (textParts.length === 1)
assistantMessage.content = textParts[0].text;
else
assistantMessage.content = textParts;
const toolCalls = [];
const toolResultMessages = [];
for (const toolCall of toolCallParts) {
toolCalls.push({
id: toolCall.id,
type: 'function',
function: {
name: toolCall.name,
arguments: JSON.stringify(toolCall.arguments)
}
});
if (toolCall.result) {
toolResultMessages.push({
role: 'tool',
tool_call_id: toolCall.id,
content: toolCall.result.content.map(toCopilotResultContentPart),
});
}
}
if (toolCalls.length > 0)
assistantMessage.tool_calls = toolCalls;
if (message.toolError) {
toolResultMessages.push({
role: 'user',
content: [{
type: 'text',
text: message.toolError,
}]
});
}
return [assistantMessage, ...toolResultMessages];
}
throw new Error(`Unsupported message role: ${message.role}`);
}
function toCompletionsTool(tool) {
return {
type: 'function',
function: {
name: tool.name,
description: tool.description,
parameters: tool.inputSchema,
},
};
}
function toToolCall(entry) {
return {
type: 'tool_call',
name: entry.type === 'function' ? entry.function.name : entry.custom.name,
arguments: JSON.parse(entry.type === 'function' ? entry.function.arguments : entry.custom.input),
id: entry.id,
};
}
function toCompletionsReasoning(reasoning) {
switch (reasoning) {
case 'none':
return 'none';
case 'medium':
return 'medium';
case 'high':
return 'high';
}
}
const systemPrompt = (prompt) => `
### System instructions
${prompt}
### Tool calling instructions
- Make sure every message contains a tool call.
- When you use a tool, you may provide a brief thought or explanation in the content field
immediately before the tool_call. Do not split this into separate messages.
- Every reply must include a tool call.
`;