106 lines
3.4 KiB
TypeScript
106 lines
3.4 KiB
TypeScript
interface AIConfig {
|
|
model: string;
|
|
endpoint: string;
|
|
apiKey: string;
|
|
}
|
|
|
|
function getSelectorConfig(): AIConfig {
|
|
return {
|
|
model: process.env.AI_SELECTOR_MODEL || process.env.AI_MODEL || 'claude-sonnet-4-20250514',
|
|
endpoint: (process.env.AI_SELECTOR_ENDPOINT || process.env.AI_API_ENDPOINT!) + '/v1/messages',
|
|
apiKey: process.env.AI_SELECTOR_API_KEY || process.env.AI_API_KEY!
|
|
};
|
|
}
|
|
|
|
function getAnalyzerConfig(): AIConfig {
|
|
return {
|
|
model: process.env.AI_ANALYZER_MODEL || process.env.AI_MODEL || 'claude-sonnet-4-20250514',
|
|
endpoint: (process.env.AI_ANALYZER_ENDPOINT || process.env.AI_API_ENDPOINT!) + '/v1/messages',
|
|
apiKey: process.env.AI_ANALYZER_API_KEY || process.env.AI_API_KEY!
|
|
};
|
|
}
|
|
|
|
export async function callAI(config: AIConfig, messages: any[], maxTokens: number = 1000, temperature: number = 0.3) {
|
|
const requestBody = {
|
|
model: config.model,
|
|
max_tokens: maxTokens,
|
|
temperature,
|
|
messages
|
|
};
|
|
|
|
console.log(`[AI API] Calling ${config.model} with ${messages.length} messages, max_tokens: ${maxTokens}`);
|
|
|
|
try {
|
|
const response = await fetch(config.endpoint, {
|
|
method: "POST",
|
|
headers: {
|
|
"Content-Type": "application/json",
|
|
"Authorization": `Bearer ${config.apiKey}`
|
|
},
|
|
body: JSON.stringify(requestBody)
|
|
});
|
|
|
|
if (!response.ok) {
|
|
let errorDetails = `${response.status} ${response.statusText}`;
|
|
try {
|
|
const errorBody = await response.text();
|
|
console.error(`[AI API] Error response body:`, errorBody);
|
|
errorDetails += ` - ${errorBody}`;
|
|
} catch {
|
|
console.error(`[AI API] Could not read error response body`);
|
|
}
|
|
|
|
console.error(`[AI API] Request failed:`, {
|
|
endpoint: config.endpoint,
|
|
model: config.model,
|
|
messageCount: messages.length,
|
|
maxTokens,
|
|
status: response.status,
|
|
hasApiKey: !!config.apiKey
|
|
});
|
|
|
|
throw new Error(`AI API error: ${response.status} - ${response.statusText}`);
|
|
}
|
|
|
|
const result = await response.json();
|
|
|
|
// Handle different API response formats
|
|
let content: string | null = null;
|
|
|
|
// Mistral API format
|
|
if (result.content && Array.isArray(result.content) && result.content[0]?.text) {
|
|
content = result.content[0].text;
|
|
console.log(`[AI API] Success (Mistral format) - Response length: ${content.length} chars`);
|
|
return {
|
|
choices: [{
|
|
message: {
|
|
content: content
|
|
}
|
|
}]
|
|
};
|
|
}
|
|
|
|
// Anthropic/OpenAI format
|
|
if (result.choices && result.choices[0] && result.choices[0].message) {
|
|
content = result.choices[0].message.content;
|
|
console.log(`[AI API] Success (OpenAI/Anthropic format) - Response length: ${content?.length || 0} chars`);
|
|
return result;
|
|
}
|
|
|
|
// If neither format matches, log the structure and fail
|
|
console.error(`[AI API] Unexpected response structure:`, JSON.stringify(result, null, 2));
|
|
throw new Error('Unexpected AI API response structure');
|
|
|
|
} catch (error) {
|
|
if (error.message?.includes('AI API error:')) {
|
|
// Re-throw API errors as-is
|
|
throw error;
|
|
} else {
|
|
// Network or other errors
|
|
console.error(`[AI API] Network/request error:`, error);
|
|
throw new Error(`AI API request failed: ${error.message}`);
|
|
}
|
|
}
|
|
}
|
|
|
|
export { getSelectorConfig, getAnalyzerConfig }; |