mono/packages/kbot/src/client.ts
2025-02-20 19:44:08 +01:00

111 lines
3.7 KiB
TypeScript

import OpenAI from 'openai'
import { logger } from './index.js'
import { loadConfig } from './config.js'
import { IKBotOptions } from './zod_types.js'
/**
* Router types supported by the client
*/
type RouterType = 'openrouter' | 'openai' | 'deepseek' | 'huggingface' | 'ollama' | 'fireworks' | 'gemini' | 'xai'
/**
* Default base URLs for different routers
*/
const ROUTER_BASE_URLS: Record<RouterType, string> = {
openrouter: 'https://openrouter.ai/api/v1',
openai: '', // OpenAI uses default URL
deepseek: 'https://api.deepseek.com',
huggingface: 'https://api-inference.huggingface.co',
ollama: 'http://localhost:11434', // Ollama's default local API endpoint
fireworks: 'https://api.fireworks.ai/v1', // Fireworks API endpoint
gemini: 'https://generativelanguage.googleapis.com/v1beta', // Gemini API base URL
xai: 'https://api.x.ai/v1', // XAI (Grok) API base URL
}
/**
* Default models for different routers
*/
const DEFAULT_MODELS: Record<RouterType, string> = {
openrouter: 'anthropic/claude-3.5-sonnet',
openai: 'gpt-4o',
deepseek: 'deepseek-chat',
huggingface: 'meta-llama/2',
ollama: 'leonard', // Default Ollama model
fireworks: 'llama-v2-70b-chat', // Default Fireworks model
gemini: 'gemini-1.5-pro', // Default Gemini model
xai: 'grok-1' // Default XAI (Grok) model
}
/*
* Creates an OpenAI client instance based on the provided options.
* @param options - Configuration options for the client
* @returns OpenAI client instance or undefined if configuration is invalid
*/
export const createClient = (options: IKBotOptions) => {
// Load configuration from file
const config = loadConfig(options);
if (!config) {
logger.error(
"Config not found in $HOME/.osr/config.json. " +
"Optionally, export OSR_CONFIG with the path to the configuration file."
);
return undefined;
}
// Determine router to use (defaults to 'openrouter')
const router: RouterType = (options.router ?? 'openrouter') as RouterType;
// Initialize API key
let apiKey = options.api_key;
// Set API key based on router if not provided in options
if (!apiKey) {
switch (router) {
case 'openrouter':
apiKey = config?.openrouter?.key;
break;
case 'openai':
apiKey = config?.openai?.key;
break;
case 'deepseek':
apiKey = config?.deepseek?.key;
break;
case 'huggingface':
apiKey = config?.huggingface?.key;
break;
case 'ollama':
// Ollama doesn't require an API key when running locally
apiKey = 'ollama'; // Dummy key for Ollama
break;
case 'fireworks':
apiKey = config?.fireworks?.key;
break;
case 'gemini':
apiKey = config?.gemini?.key;
break;
case 'xai':
apiKey = config?.xai?.key;
break;
}
}
// Validate API key
if (!apiKey ) {
logger.error(`No ${router} key found. Please provide an "api_key", set it in the config, or pass it via JSON config.`);
return undefined;
}
// Set default baseURL if not provided
const baseURL = options.baseURL ?? ROUTER_BASE_URLS[router]
// Set default model if not provided
if (!options.model) {
options.model = DEFAULT_MODELS[router]
}
logger.info(`Creating client with ${router} router, model ${options.model}, and API key ${apiKey} at ${baseURL}`)
// Create and return the OpenAI client instance
return new OpenAI({
apiKey,
baseURL,
})
}