diff --git a/packages/kbot/dist-in/commands/build.js b/packages/kbot/dist-in/commands/build.js index 277d1d1e..fedf1a38 100644 --- a/packages/kbot/dist-in/commands/build.js +++ b/packages/kbot/dist-in/commands/build.js @@ -1,4 +1,5 @@ import path from 'node:path'; +import { fileURLToPath } from 'node:url'; import { CONFIG_DEFAULT } from '@polymech/commons'; import { sync as read } from '@polymech/fs/read'; import { sync as write } from '@polymech/fs/write'; @@ -6,6 +7,43 @@ import { sync as exists } from '@polymech/fs/exists'; import { logger } from '../index.js'; import { fetchOpenAIModels } from '../models/openai.js'; import { fetchOpenRouterModels } from '../models/openrouter.js'; +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const generateModelEnum = (models, provider) => { + const enumName = `E_${provider.toUpperCase()}_MODEL`; + const enumContent = `export enum ${enumName} { +${models.map(model => ` MODEL_${model.id.replace(/[^a-zA-Z0-9]/g, '_').toUpperCase()} = "${model.id}"`).join(',\n')} +}`; + return enumContent; +}; +const generateModelEnumFree = (models, provider) => { + const enumName = `E_${provider.toUpperCase()}_MODEL_FREE`; + const freeModels = models.filter(model => { + if (!model.pricing) + return false; + const pricing = model.pricing; + return ((pricing.prompt === 0 || pricing.prompt === "0") && + (pricing.completion === 0 || pricing.completion === "0") && + (pricing.image === 0 || pricing.image === "0")); + }); + const enumContent = `export enum ${enumName} { +${freeModels.map(model => ` MODEL_FREE_${model.id.replace(/[^a-zA-Z0-9]/g, '_').toUpperCase()} = "${model.id}"`).join(',\n')} +}`; + return enumContent; +}; +const generateModelEnumTools = (models, provider) => { + const enumName = `E_${provider.toUpperCase()}_MODEL`; + const toolModels = models.filter(model => { + if (!model.top_provider) + return false; + return model.top_provider.supports_functions === true || + model.top_provider.supports_function_calling === true || + model.top_provider.supports_tools === true; + }); + const enumContent = `export enum ${enumName} { +${toolModels.map(model => ` MODEL_TOOLS_${model.id.replace(/[^a-zA-Z0-9]/g, '_').toUpperCase()} = "${model.id}"`).join(',\n')} +}`; + return enumContent; +}; export const build = async () => { const examplesSrc = path.resolve(__dirname, '../docs_/examples.md'); if (exists(examplesSrc)) { @@ -25,12 +63,28 @@ export const build = async () => { name: model.name, pricing: model.pricing, context: model.context, - created: model.created + created: model.created, + top_provider: model.top_provider }; }); + // Generate model enums + const openAIEnumPath = path.resolve(__dirname, '../../src/models/cache/openai-models.ts'); + const openRouterEnumPath = path.resolve(__dirname, '../../src/models/cache/openrouter-models.ts'); + const openAIEnumFreePath = path.resolve(__dirname, '../../src/models/cache/openai-models-free.ts'); + const openRouterEnumFreePath = path.resolve(__dirname, '../../src/models/cache/openrouter-models-free.ts'); + const openAIEnumToolsPath = path.resolve(__dirname, '../../src/models/cache/openai-models-tools.ts'); + const openRouterEnumToolsPath = path.resolve(__dirname, '../../src/models/cache/openrouter-models-tools.ts'); + write(openAIEnumPath, generateModelEnum(modelsOpenAI, 'OpenAI')); + write(openRouterEnumPath, generateModelEnum(modelsOpenRouter, 'OpenRouter')); + write(openAIEnumFreePath, generateModelEnumFree(modelsOpenAI, 'OpenAI')); + write(openRouterEnumFreePath, generateModelEnumFree(modelsOpenRouter, 'OpenRouter')); + write(openAIEnumToolsPath, generateModelEnumTools(modelsOpenAI, 'OpenAI')); + write(openRouterEnumToolsPath, generateModelEnumTools(modelsOpenRouter, 'OpenRouter')); + logger.info('Model enums generated'); + // Write model data const modelsOpenAIPath = path.resolve(__dirname, '../src/models/cache/openai.ts'); write(modelsOpenAIPath, `export const models = ${JSON.stringify(modelsOpenAI)}`); const modelsOpenRouterPath = path.resolve(__dirname, '../src/models/cache/openrouter.ts'); write(modelsOpenRouterPath, `export const models = ${JSON.stringify(modelsOpenRouter)}`); }; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYnVpbGQuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvY29tbWFuZHMvYnVpbGQudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxJQUFJLE1BQU0sV0FBVyxDQUFBO0FBQzVCLE9BQU8sRUFBRSxjQUFjLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUVsRCxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksS0FBSyxFQUFFLE1BQU0sb0JBQW9CLENBQUE7QUFDbEQsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUVwRCxPQUFPLEVBQUUsTUFBTSxFQUFFLE1BQU0sYUFBYSxDQUFBO0FBQ3BDLE9BQU8sRUFBRSxpQkFBaUIsRUFBRSxNQUFNLHFCQUFxQixDQUFBO0FBQ3ZELE9BQU8sRUFBRSxxQkFBcUIsRUFBRSxNQUFNLHlCQUF5QixDQUFBO0FBRS9ELE1BQU0sQ0FBQyxNQUFNLEtBQUssR0FBRyxLQUFLLElBQUksRUFBRTtJQUM5QixNQUFNLFdBQVcsR0FBSSxJQUFJLENBQUMsT0FBTyxDQUFDLFNBQVMsRUFBRSxzQkFBc0IsQ0FBQyxDQUFBO0lBQ3BFLElBQUcsTUFBTSxDQUFDLFdBQVcsQ0FBQyxFQUFFLENBQUM7UUFDdkIsTUFBTSxRQUFRLEdBQUcsSUFBSSxDQUFDLFdBQVcsRUFBQyxRQUFRLENBQUMsSUFBSSxFQUFFLENBQUE7UUFDakQsTUFBTSxZQUFZLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsa0NBQWtDLENBQUMsQ0FBQTtRQUNoRixLQUFLLENBQUMsWUFBWSxFQUFDLDJCQUEyQixJQUFJLENBQUMsU0FBUyxDQUFDLFFBQVEsQ0FBQyxFQUFFLENBQUMsQ0FBQTtRQUN6RSxNQUFNLENBQUMsSUFBSSxDQUFDLDZCQUE2QixZQUFZLEVBQUUsQ0FBQyxDQUFBO0lBQzFELENBQUM7U0FBSSxDQUFDO1FBQ0osTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsV0FBVyxFQUFFLENBQUMsQ0FBQTtJQUN4RCxDQUFDO0lBRUQsTUFBTSxNQUFNLEdBQUcsY0FBYyxFQUFTLENBQUE7SUFFdEMsTUFBTSxZQUFZLEdBQUksTUFBTSxpQkFBaUIsQ0FBQyxNQUFNLENBQUMsTUFBTSxDQUFDLEdBQUcsQ0FBQyxDQUFBO0lBQ2hFLE1BQU0sZ0JBQWdCLEdBQUcsQ0FBQyxNQUFNLHFCQUFxQixFQUFFLENBQUMsQ0FBQyxHQUFHLENBQUMsQ0FBQyxLQUFLLEVBQUUsRUFBRTtRQUNyRSxPQUFPO1lBQ0wsRUFBRSxFQUFFLEtBQUssQ0FBQyxFQUFFO1lBQ1osSUFBSSxFQUFFLEtBQUssQ0FBQyxJQUFJO1lBQ2hCLE9BQU8sRUFBRSxLQUFLLENBQUMsT0FBTztZQUN0QixPQUFPLEVBQUUsS0FBSyxDQUFDLE9BQU87WUFDdEIsT0FBTyxFQUFFLEtBQUssQ0FBQyxPQUFPO1NBQ3ZCLENBQUE7SUFDSCxDQUFDLENBQUMsQ0FBQTtJQUNGLE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsK0JBQStCLENBQUMsQ0FBQTtJQUNqRixLQUFLLENBQUMsZ0JBQWdCLEVBQUMseUJBQXlCLElBQUksQ0FBQyxTQUFTLENBQUMsWUFBWSxDQUFDLEVBQUUsQ0FBQyxDQUFBO0lBRS9FLE1BQU0sb0JBQW9CLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsbUNBQW1DLENBQUMsQ0FBQTtJQUN6RixLQUFLLENBQUMsb0JBQW9CLEVBQUMseUJBQXlCLElBQUksQ0FBQyxTQUFTLENBQUMsZ0JBQWdCLENBQUMsRUFBRSxDQUFDLENBQUE7QUFDekYsQ0FBQyxDQUFBIn0= \ No newline at end of file +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYnVpbGQuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvY29tbWFuZHMvYnVpbGQudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxJQUFJLE1BQU0sV0FBVyxDQUFBO0FBQzVCLE9BQU8sRUFBRSxhQUFhLEVBQUUsTUFBTSxVQUFVLENBQUE7QUFDeEMsT0FBTyxFQUFFLGNBQWMsRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBRWxELE9BQU8sRUFBRSxJQUFJLElBQUksSUFBSSxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFDaEQsT0FBTyxFQUFFLElBQUksSUFBSSxLQUFLLEVBQUUsTUFBTSxvQkFBb0IsQ0FBQTtBQUNsRCxPQUFPLEVBQUUsSUFBSSxJQUFJLE1BQU0sRUFBRSxNQUFNLHFCQUFxQixDQUFBO0FBRXBELE9BQU8sRUFBRSxNQUFNLEVBQUUsTUFBTSxhQUFhLENBQUE7QUFDcEMsT0FBTyxFQUFFLGlCQUFpQixFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDdkQsT0FBTyxFQUFFLHFCQUFxQixFQUFFLE1BQU0seUJBQXlCLENBQUE7QUFFL0QsTUFBTSxTQUFTLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxhQUFhLENBQUMsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLENBQUMsQ0FBQyxDQUFBO0FBRTlELE1BQU0saUJBQWlCLEdBQUcsQ0FBQyxNQUFhLEVBQUUsUUFBZ0IsRUFBRSxFQUFFO0lBQzVELE1BQU0sUUFBUSxHQUFHLEtBQUssUUFBUSxDQUFDLFdBQVcsRUFBRSxRQUFRLENBQUE7SUFDcEQsTUFBTSxXQUFXLEdBQUcsZUFBZSxRQUFRO0VBQzNDLE1BQU0sQ0FBQyxHQUFHLENBQUMsS0FBSyxDQUFDLEVBQUUsQ0FBQyxXQUFXLEtBQUssQ0FBQyxFQUFFLENBQUMsT0FBTyxDQUFDLGVBQWUsRUFBRSxHQUFHLENBQUMsQ0FBQyxXQUFXLEVBQUUsT0FBTyxLQUFLLENBQUMsRUFBRSxHQUFHLENBQUMsQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDO0VBQ2xILENBQUE7SUFDQSxPQUFPLFdBQVcsQ0FBQTtBQUNwQixDQUFDLENBQUE7QUFFRCxNQUFNLHFCQUFxQixHQUFHLENBQUMsTUFBYSxFQUFFLFFBQWdCLEVBQUUsRUFBRTtJQUNoRSxNQUFNLFFBQVEsR0FBRyxLQUFLLFFBQVEsQ0FBQyxXQUFXLEVBQUUsYUFBYSxDQUFBO0lBQ3pELE1BQU0sVUFBVSxHQUFHLE1BQU0sQ0FBQyxNQUFNLENBQUMsS0FBSyxDQUFDLEVBQUU7UUFDdkMsSUFBSSxDQUFDLEtBQUssQ0FBQyxPQUFPO1lBQUUsT0FBTyxLQUFLLENBQUE7UUFDaEMsTUFBTSxPQUFPLEdBQUcsS0FBSyxDQUFDLE9BQU8sQ0FBQTtRQUM3QixPQUFPLENBQ0wsQ0FBQyxPQUFPLENBQUMsTUFBTSxLQUFLLENBQUMsSUFBSSxPQUFPLENBQUMsTUFBTSxLQUFLLEdBQUcsQ0FBQztZQUNoRCxDQUFDLE9BQU8sQ0FBQyxVQUFVLEtBQUssQ0FBQyxJQUFJLE9BQU8sQ0FBQyxVQUFVLEtBQUssR0FBRyxDQUFDO1lBQ3hELENBQUMsT0FBTyxDQUFDLEtBQUssS0FBSyxDQUFDLElBQUksT0FBTyxDQUFDLEtBQUssS0FBSyxHQUFHLENBQUMsQ0FDL0MsQ0FBQTtJQUNILENBQUMsQ0FBQyxDQUFBO0lBQ0YsTUFBTSxXQUFXLEdBQUcsZUFBZSxRQUFRO0VBQzNDLFVBQVUsQ0FBQyxHQUFHLENBQUMsS0FBSyxDQUFDLEVBQUUsQ0FBQyxnQkFBZ0IsS0FBSyxDQUFDLEVBQUUsQ0FBQyxPQUFPLENBQUMsZUFBZSxFQUFFLEdBQUcsQ0FBQyxDQUFDLFdBQVcsRUFBRSxPQUFPLEtBQUssQ0FBQyxFQUFFLEdBQUcsQ0FBQyxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUM7RUFDM0gsQ0FBQTtJQUNBLE9BQU8sV0FBVyxDQUFBO0FBQ3BCLENBQUMsQ0FBQTtBQUVELE1BQU0sc0JBQXNCLEdBQUcsQ0FBQyxNQUFhLEVBQUUsUUFBZ0IsRUFBRSxFQUFFO0lBQ2pFLE1BQU0sUUFBUSxHQUFHLEtBQUssUUFBUSxDQUFDLFdBQVcsRUFBRSxRQUFRLENBQUE7SUFDcEQsTUFBTSxVQUFVLEdBQUcsTUFBTSxDQUFDLE1BQU0sQ0FBQyxLQUFLLENBQUMsRUFBRTtRQUN2QyxJQUFJLENBQUMsS0FBSyxDQUFDLFlBQVk7WUFBRSxPQUFPLEtBQUssQ0FBQTtRQUNyQyxPQUFPLEtBQUssQ0FBQyxZQUFZLENBQUMsa0JBQWtCLEtBQUssSUFBSTtZQUM5QyxLQUFLLENBQUMsWUFBWSxDQUFDLHlCQUF5QixLQUFLLElBQUk7WUFDckQsS0FBSyxDQUFDLFlBQVksQ0FBQyxjQUFjLEtBQUssSUFBSSxDQUFBO0lBQ25ELENBQUMsQ0FBQyxDQUFBO0lBQ0YsTUFBTSxXQUFXLEdBQUcsZUFBZSxRQUFRO0VBQzNDLFVBQVUsQ0FBQyxHQUFHLENBQUMsS0FBSyxDQUFDLEVBQUUsQ0FBQyxpQkFBaUIsS0FBSyxDQUFDLEVBQUUsQ0FBQyxPQUFPLENBQUMsZUFBZSxFQUFFLEdBQUcsQ0FBQyxDQUFDLFdBQVcsRUFBRSxPQUFPLEtBQUssQ0FBQyxFQUFFLEdBQUcsQ0FBQyxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUM7RUFDNUgsQ0FBQTtJQUNBLE9BQU8sV0FBVyxDQUFBO0FBQ3BCLENBQUMsQ0FBQTtBQUVELE1BQU0sQ0FBQyxNQUFNLEtBQUssR0FBRyxLQUFLLElBQUksRUFBRTtJQUM5QixNQUFNLFdBQVcsR0FBSSxJQUFJLENBQUMsT0FBTyxDQUFDLFNBQVMsRUFBRSxzQkFBc0IsQ0FBQyxDQUFBO0lBQ3BFLElBQUcsTUFBTSxDQUFDLFdBQVcsQ0FBQyxFQUFFLENBQUM7UUFDdkIsTUFBTSxRQUFRLEdBQUcsSUFBSSxDQUFDLFdBQVcsRUFBQyxRQUFRLENBQUMsSUFBSSxFQUFFLENBQUE7UUFDakQsTUFBTSxZQUFZLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsa0NBQWtDLENBQUMsQ0FBQTtRQUNoRixLQUFLLENBQUMsWUFBWSxFQUFDLDJCQUEyQixJQUFJLENBQUMsU0FBUyxDQUFDLFFBQVEsQ0FBQyxFQUFFLENBQUMsQ0FBQTtRQUN6RSxNQUFNLENBQUMsSUFBSSxDQUFDLDZCQUE2QixZQUFZLEVBQUUsQ0FBQyxDQUFBO0lBQzFELENBQUM7U0FBSSxDQUFDO1FBQ0osTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsV0FBVyxFQUFFLENBQUMsQ0FBQTtJQUN4RCxDQUFDO0lBRUQsTUFBTSxNQUFNLEdBQUcsY0FBYyxFQUFTLENBQUE7SUFFdEMsTUFBTSxZQUFZLEdBQUksTUFBTSxpQkFBaUIsQ0FBQyxNQUFNLENBQUMsTUFBTSxDQUFDLEdBQUcsQ0FBQyxDQUFBO0lBQ2hFLE1BQU0sZ0JBQWdCLEdBQUcsQ0FBQyxNQUFNLHFCQUFxQixFQUFFLENBQUMsQ0FBQyxHQUFHLENBQUMsQ0FBQyxLQUFLLEVBQUUsRUFBRTtRQUNyRSxPQUFPO1lBQ0wsRUFBRSxFQUFFLEtBQUssQ0FBQyxFQUFFO1lBQ1osSUFBSSxFQUFFLEtBQUssQ0FBQyxJQUFJO1lBQ2hCLE9BQU8sRUFBRSxLQUFLLENBQUMsT0FBTztZQUN0QixPQUFPLEVBQUUsS0FBSyxDQUFDLE9BQU87WUFDdEIsT0FBTyxFQUFFLEtBQUssQ0FBQyxPQUFPO1lBQ3RCLFlBQVksRUFBRSxLQUFLLENBQUMsWUFBWTtTQUNqQyxDQUFBO0lBQ0gsQ0FBQyxDQUFDLENBQUE7SUFDRix1QkFBdUI7SUFDdkIsTUFBTSxjQUFjLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUseUNBQXlDLENBQUMsQ0FBQTtJQUN6RixNQUFNLGtCQUFrQixHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsU0FBUyxFQUFFLDZDQUE2QyxDQUFDLENBQUE7SUFDakcsTUFBTSxrQkFBa0IsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLFNBQVMsRUFBRSw4Q0FBOEMsQ0FBQyxDQUFBO0lBQ2xHLE1BQU0sc0JBQXNCLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsa0RBQWtELENBQUMsQ0FBQTtJQUMxRyxNQUFNLG1CQUFtQixHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsU0FBUyxFQUFFLCtDQUErQyxDQUFDLENBQUE7SUFDcEcsTUFBTSx1QkFBdUIsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLFNBQVMsRUFBRSxtREFBbUQsQ0FBQyxDQUFBO0lBRTVHLEtBQUssQ0FBQyxjQUFjLEVBQUUsaUJBQWlCLENBQUMsWUFBWSxFQUFFLFFBQVEsQ0FBQyxDQUFDLENBQUE7SUFDaEUsS0FBSyxDQUFDLGtCQUFrQixFQUFFLGlCQUFpQixDQUFDLGdCQUFnQixFQUFFLFlBQVksQ0FBQyxDQUFDLENBQUE7SUFDNUUsS0FBSyxDQUFDLGtCQUFrQixFQUFFLHFCQUFxQixDQUFDLFlBQVksRUFBRSxRQUFRLENBQUMsQ0FBQyxDQUFBO0lBQ3hFLEtBQUssQ0FBQyxzQkFBc0IsRUFBRSxxQkFBcUIsQ0FBQyxnQkFBZ0IsRUFBRSxZQUFZLENBQUMsQ0FBQyxDQUFBO0lBQ3BGLEtBQUssQ0FBQyxtQkFBbUIsRUFBRSxzQkFBc0IsQ0FBQyxZQUFZLEVBQUUsUUFBUSxDQUFDLENBQUMsQ0FBQTtJQUMxRSxLQUFLLENBQUMsdUJBQXVCLEVBQUUsc0JBQXNCLENBQUMsZ0JBQWdCLEVBQUUsWUFBWSxDQUFDLENBQUMsQ0FBQTtJQUN0RixNQUFNLENBQUMsSUFBSSxDQUFDLHVCQUF1QixDQUFDLENBQUE7SUFFcEMsbUJBQW1CO0lBQ25CLE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsK0JBQStCLENBQUMsQ0FBQTtJQUNqRixLQUFLLENBQUMsZ0JBQWdCLEVBQUMseUJBQXlCLElBQUksQ0FBQyxTQUFTLENBQUMsWUFBWSxDQUFDLEVBQUUsQ0FBQyxDQUFBO0lBRS9FLE1BQU0sb0JBQW9CLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsbUNBQW1DLENBQUMsQ0FBQTtJQUN6RixLQUFLLENBQUMsb0JBQW9CLEVBQUMseUJBQXlCLElBQUksQ0FBQyxTQUFTLENBQUMsZ0JBQWdCLENBQUMsRUFBRSxDQUFDLENBQUE7QUFDekYsQ0FBQyxDQUFBIn0= \ No newline at end of file diff --git a/packages/kbot/dist-in/commands/run-tools.js b/packages/kbot/dist-in/commands/run-tools.js index fe3f5d97..9bd10157 100644 --- a/packages/kbot/dist-in/commands/run-tools.js +++ b/packages/kbot/dist-in/commands/run-tools.js @@ -59,4 +59,4 @@ export const runTools = async (client, params, options) => { const ret = content(result); return await onCompletion(ret, options); }; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicnVuLXRvb2xzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL2NvbW1hbmRzL3J1bi10b29scy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFNQSxPQUFPLEVBQUUsT0FBTyxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDN0MsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQUNwQyxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFFbEQsTUFBTSxDQUFDLE1BQU0sUUFBUSxHQUFHLEtBQUssRUFBRSxNQUFjLEVBQUUsTUFBVyxFQUFFLE9BQWtCLEVBQUUsRUFBRTtJQUNoRixNQUFNLFNBQVMsR0FBRyxJQUFJLENBQUMsR0FBRyxFQUFFLENBQUMsUUFBUSxFQUFFLENBQUE7SUFDdkMsTUFBTSxlQUFlLEdBQUc7UUFDdEIsU0FBUztRQUNULE1BQU0sRUFBRSxPQUFPLENBQUMsTUFBTTtRQUN0QixTQUFTLEVBQUUsSUFBSSxJQUFJLEVBQUUsQ0FBQyxXQUFXLEVBQUU7UUFDbkMsUUFBUSxFQUFFLEVBQUU7S0FDYixDQUFBO0lBQ0QsSUFBSSxPQUFPLENBQUMsR0FBRyxFQUFFLENBQUM7UUFDaEIsTUFBTSxDQUFDLElBQUksQ0FBQyw2QkFBNkIsQ0FBQyxDQUFBO1FBQzFDLE9BQU87WUFDTCxNQUFNLEVBQUUsU0FBUztZQUNqQixlQUFlO1lBQ2YsVUFBVSxFQUFFLEVBQUU7WUFDZCxTQUFTLEVBQUUsRUFBRTtTQUNkLENBQUE7SUFDSCxDQUFDO0lBQ0QsTUFBTSxVQUFVLEdBQUcsQ0FBQyxPQUFZLEVBQUUsU0FBaUIsRUFBRSxNQUFNLEVBQUUsRUFBRTtRQUM3RCxPQUFPO1lBQ0wsR0FBRyxPQUFPO1lBQ1YsU0FBUyxFQUFFLElBQUksSUFBSSxFQUFFLENBQUMsV0FBVyxFQUFFO1lBQ25DLFNBQVM7WUFDVCxNQUFNO1NBQ1AsQ0FBQTtJQUNILENBQUMsQ0FBQTtJQUNELElBQUksTUFBTSxHQUFHLElBQUksQ0FBQTtJQUNqQixJQUFJLENBQUM7UUFDSCxNQUFNLEdBQUcsTUFBTSxNQUFNLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxXQUFXLENBQUMsUUFBUSxDQUFDLE1BQTZDLENBQUM7YUFDaEcsRUFBRSxDQUFDLFNBQVMsRUFBRSxDQUFDLE9BQW1DLEVBQUUsRUFBRTtZQUNyRCxPQUFPLENBQUMsU0FBUyxDQUFDLFNBQVMsQ0FBQyxVQUFVLENBQUMsT0FBTyxFQUFFLFNBQVMsRUFBRSxPQUFPLENBQUMsTUFBTSxDQUFDLENBQUMsQ0FBQTtRQUM3RSxDQUFDLENBQUM7YUFDRCxFQUFFLENBQUMsY0FBYyxFQUFFLENBQUMsSUFBd0MsRUFBRSxFQUFFO1lBQy9ELE9BQU8sT0FBTyxDQUFDLFNBQVMsQ0FBQyxVQUFVLENBQUMsVUFBVSxDQUFDLElBQUksRUFBRSxTQUFTLEVBQUUsT0FBTyxDQUFDLE1BQU0sQ0FBQyxDQUFDLENBQUE7UUFDbEYsQ0FBQyxDQUFDO2FBQ0QsRUFBRSxDQUFDLG9CQUFvQixFQUFFLENBQUMsQ0FBQyxFQUFFLEVBQUU7WUFDOUIsT0FBTyxDQUFDLFNBQVMsQ0FBQyxvQkFBb0IsQ0FBQyxDQUFDLENBQUMsQ0FBQTtRQUMzQyxDQUFDLENBQUM7YUFDRCxFQUFFLENBQUMsZ0JBQWdCLEVBQUUsT0FBTyxDQUFDLFNBQVMsQ0FBQyxnQkFBZ0IsQ0FBQzthQUN4RCxFQUFFLENBQUMsU0FBUyxFQUFFLE9BQU8sQ0FBQyxTQUFTLENBQUMsU0FBUyxDQUFDLENBQUE7SUFDL0MsQ0FBQztJQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7UUFDWCxNQUFNLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxDQUFBO0lBQ2pCLENBQUM7SUFFRCxJQUFJLE1BQU0sR0FBRyxPQUFPLENBQUMsTUFBTSxDQUFDLENBQUE7SUFDNUIsSUFBSSxDQUFDO1FBQ0gsTUFBTSxHQUFHLE1BQU0sTUFBTSxDQUFDLG1CQUFtQixFQUFFLENBQUE7SUFDN0MsQ0FBQztJQUFDLE9BQU8sS0FBSyxFQUFFLENBQUM7UUFDZixJQUFJLEtBQUssQ0FBQyxPQUFPLENBQUMsUUFBUSxDQUFDLGlCQUFpQixDQUFDLEVBQUUsQ0FBQztZQUM5QyxNQUFNLENBQUMsS0FBSyxDQUFDLHVDQUF1QyxFQUFFLEtBQUssQ0FBQyxPQUFPLEVBQUUsS0FBSyxDQUFDLE9BQU8sRUFBRSxLQUFLLENBQUMsQ0FBQTtZQUMxRixPQUFNO1FBQ1IsQ0FBQztRQUNELE1BQU0sQ0FBQyxLQUFLLENBQUMsNEJBQTRCLEVBQUUsS0FBSyxDQUFDLE9BQU8sRUFBRSxLQUFLLENBQUMsTUFBTSxDQUFDLENBQUE7UUFDdkUsT0FBTTtJQUNSLENBQUM7SUFDRCxNQUFNLEdBQUcsR0FBRyxPQUFPLENBQUMsTUFBTSxDQUFDLENBQUE7SUFDM0IsT0FBTyxNQUFNLFlBQVksQ0FBQyxHQUFHLEVBQUUsT0FBTyxDQUFDLENBQUE7QUFDekMsQ0FBQyxDQUFBIn0= \ No newline at end of file +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicnVuLXRvb2xzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL2NvbW1hbmRzL3J1bi10b29scy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFNQSxPQUFPLEVBQUUsT0FBTyxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDN0MsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQUNwQyxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFFbEQsTUFBTSxDQUFDLE1BQU0sUUFBUSxHQUFHLEtBQUssRUFBRSxNQUFjLEVBQUUsTUFBVyxFQUFFLE9BQWtCLEVBQUUsRUFBRTtJQUNoRixNQUFNLFNBQVMsR0FBRyxJQUFJLENBQUMsR0FBRyxFQUFFLENBQUMsUUFBUSxFQUFFLENBQUE7SUFDdkMsTUFBTSxlQUFlLEdBQUc7UUFDdEIsU0FBUztRQUNULE1BQU0sRUFBRSxPQUFPLENBQUMsTUFBTTtRQUN0QixTQUFTLEVBQUUsSUFBSSxJQUFJLEVBQUUsQ0FBQyxXQUFXLEVBQUU7UUFDbkMsUUFBUSxFQUFFLEVBQUU7S0FDYixDQUFBO0lBQ0QsSUFBSSxPQUFPLENBQUMsR0FBRyxFQUFFLENBQUM7UUFDaEIsTUFBTSxDQUFDLElBQUksQ0FBQyw2QkFBNkIsQ0FBQyxDQUFBO1FBQzFDLE9BQU87WUFDTCxNQUFNLEVBQUUsU0FBUztZQUNqQixlQUFlO1lBQ2YsVUFBVSxFQUFFLEVBQUU7WUFDZCxTQUFTLEVBQUUsRUFBRTtTQUNkLENBQUE7SUFDSCxDQUFDO0lBQ0QsTUFBTSxVQUFVLEdBQUcsQ0FBQyxPQUFZLEVBQUUsU0FBaUIsRUFBRSxNQUFNLEVBQUUsRUFBRTtRQUM3RCxPQUFPO1lBQ0wsR0FBRyxPQUFPO1lBQ1YsU0FBUyxFQUFFLElBQUksSUFBSSxFQUFFLENBQUMsV0FBVyxFQUFFO1lBQ25DLFNBQVM7WUFDVCxNQUFNO1NBQ1AsQ0FBQTtJQUNILENBQUMsQ0FBQTtJQUNELElBQUksTUFBTSxHQUFHLElBQUksQ0FBQTtJQUNqQixJQUFJLENBQUM7UUFDSCxNQUFNLEdBQUcsTUFBTSxNQUFNLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxXQUFXLENBQUMsUUFBUSxDQUFDLE1BQTZDLENBQUM7YUFDaEcsRUFBRSxDQUFDLFNBQVMsRUFBRSxDQUFDLE9BQW1DLEVBQUUsRUFBRTtZQUNwRCxPQUFPLENBQUMsU0FBd0IsQ0FBQyxTQUFTLENBQUMsVUFBVSxDQUFDLE9BQU8sRUFBRSxTQUFTLEVBQUUsT0FBTyxDQUFDLE1BQU0sQ0FBQyxDQUFDLENBQUE7UUFDN0YsQ0FBQyxDQUFDO2FBQ0QsRUFBRSxDQUFDLGNBQWMsRUFBRSxDQUFDLElBQXdDLEVBQUUsRUFBRTtZQUMvRCxPQUFRLE9BQU8sQ0FBQyxTQUF3QixDQUFDLFVBQVUsQ0FBQyxVQUFVLENBQUMsSUFBSSxFQUFFLFNBQVMsRUFBRSxPQUFPLENBQUMsTUFBTSxDQUFDLENBQUMsQ0FBQTtRQUNsRyxDQUFDLENBQUM7YUFDRCxFQUFFLENBQUMsb0JBQW9CLEVBQUUsQ0FBQyxDQUFDLEVBQUUsRUFBRTtZQUM3QixPQUFPLENBQUMsU0FBd0IsQ0FBQyxvQkFBb0IsQ0FBQyxDQUFDLENBQUMsQ0FBQTtRQUMzRCxDQUFDLENBQUM7YUFDRCxFQUFFLENBQUMsZ0JBQWdCLEVBQUcsT0FBTyxDQUFDLFNBQXdCLENBQUMsZ0JBQWdCLENBQUM7YUFDeEUsRUFBRSxDQUFDLFNBQVMsRUFBRyxPQUFPLENBQUMsU0FBd0IsQ0FBQyxTQUFTLENBQUMsQ0FBQTtJQUMvRCxDQUFDO0lBQUMsT0FBTyxDQUFDLEVBQUUsQ0FBQztRQUNYLE1BQU0sQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLENBQUE7SUFDakIsQ0FBQztJQUVELElBQUksTUFBTSxHQUFHLE9BQU8sQ0FBQyxNQUFNLENBQUMsQ0FBQTtJQUM1QixJQUFJLENBQUM7UUFDSCxNQUFNLEdBQUcsTUFBTSxNQUFNLENBQUMsbUJBQW1CLEVBQUUsQ0FBQTtJQUM3QyxDQUFDO0lBQUMsT0FBTyxLQUFLLEVBQUUsQ0FBQztRQUNmLElBQUksS0FBSyxDQUFDLE9BQU8sQ0FBQyxRQUFRLENBQUMsaUJBQWlCLENBQUMsRUFBRSxDQUFDO1lBQzlDLE1BQU0sQ0FBQyxLQUFLLENBQUMsdUNBQXVDLEVBQUUsS0FBSyxDQUFDLE9BQU8sRUFBRSxLQUFLLENBQUMsT0FBTyxFQUFFLEtBQUssQ0FBQyxDQUFBO1lBQzFGLE9BQU07UUFDUixDQUFDO1FBQ0QsTUFBTSxDQUFDLEtBQUssQ0FBQyw0QkFBNEIsRUFBRSxLQUFLLENBQUMsT0FBTyxFQUFFLEtBQUssQ0FBQyxNQUFNLENBQUMsQ0FBQTtRQUN2RSxPQUFNO0lBQ1IsQ0FBQztJQUNELE1BQU0sR0FBRyxHQUFHLE9BQU8sQ0FBQyxNQUFNLENBQUMsQ0FBQTtJQUMzQixPQUFPLE1BQU0sWUFBWSxDQUFDLEdBQUcsRUFBRSxPQUFPLENBQUMsQ0FBQTtBQUN6QyxDQUFDLENBQUEifQ== \ No newline at end of file diff --git a/packages/kbot/dist-in/data/openai_models.json b/packages/kbot/dist-in/data/openai_models.json index 09d928bc..c7cc30a1 100644 --- a/packages/kbot/dist-in/data/openai_models.json +++ b/packages/kbot/dist-in/data/openai_models.json @@ -1,10 +1,16 @@ { - "timestamp": 1742371204069, + "timestamp": 1743496844609, "models": [ { - "id": "gpt-4o-mini-audio-preview-2024-12-17", + "id": "gpt-4o-realtime-preview-2024-12-17", "object": "model", - "created": 1734115920, + "created": 1733945430, + "owned_by": "system" + }, + { + "id": "gpt-4o-audio-preview-2024-12-17", + "object": "model", + "created": 1734034239, "owned_by": "system" }, { @@ -26,33 +32,27 @@ "owned_by": "system" }, { - "id": "o1-mini-2024-09-12", + "id": "o3-mini", "object": "model", - "created": 1725648979, + "created": 1737146383, "owned_by": "system" }, { - "id": "omni-moderation-latest", + "id": "o3-mini-2025-01-31", "object": "model", - "created": 1731689265, + "created": 1738010200, "owned_by": "system" }, { - "id": "gpt-4o-mini-audio-preview", + "id": "gpt-4o-mini-realtime-preview-2024-12-17", "object": "model", - "created": 1734387424, + "created": 1734112601, "owned_by": "system" }, { - "id": "omni-moderation-2024-09-26", + "id": "gpt-4o-mini-realtime-preview", "object": "model", - "created": 1732734466, - "owned_by": "system" - }, - { - "id": "o1", - "object": "model", - "created": 1734375816, + "created": 1734387380, "owned_by": "system" }, { @@ -61,6 +61,24 @@ "created": 1727131766, "owned_by": "system" }, + { + "id": "gpt-4o-transcribe", + "object": "model", + "created": 1742068463, + "owned_by": "system" + }, + { + "id": "gpt-4o-mini-transcribe", + "object": "model", + "created": 1742068596, + "owned_by": "system" + }, + { + "id": "gpt-4o-realtime-preview", + "object": "model", + "created": 1727659998, + "owned_by": "system" + }, { "id": "babbage-002", "object": "model", @@ -68,9 +86,9 @@ "owned_by": "system" }, { - "id": "o1-mini", + "id": "gpt-4o-mini-tts", "object": "model", - "created": 1725649008, + "created": 1742403959, "owned_by": "system" }, { @@ -85,12 +103,6 @@ "created": 1705953180, "owned_by": "system" }, - { - "id": "gpt-4o-audio-preview-2024-12-17", - "object": "model", - "created": 1734034239, - "owned_by": "system" - }, { "id": "gpt-4", "object": "model", @@ -98,22 +110,10 @@ "owned_by": "openai" }, { - "id": "o3-mini-2025-01-31", + "id": "text-embedding-ada-002", "object": "model", - "created": 1738010200, - "owned_by": "system" - }, - { - "id": "o3-mini", - "object": "model", - "created": 1737146383, - "owned_by": "system" - }, - { - "id": "gpt-4o-2024-05-13", - "object": "model", - "created": 1715368132, - "owned_by": "system" + "created": 1671217299, + "owned_by": "openai-internal" }, { "id": "tts-1-hd", @@ -121,6 +121,18 @@ "created": 1699046015, "owned_by": "system" }, + { + "id": "gpt-4o-mini-audio-preview", + "object": "model", + "created": 1734387424, + "owned_by": "system" + }, + { + "id": "gpt-4o-audio-preview", + "object": "model", + "created": 1727460443, + "owned_by": "system" + }, { "id": "o1-preview-2024-09-12", "object": "model", @@ -145,48 +157,24 @@ "created": 1699053241, "owned_by": "system" }, - { - "id": "gpt-4o-audio-preview", - "object": "model", - "created": 1727460443, - "owned_by": "system" - }, { "id": "davinci-002", "object": "model", "created": 1692634301, "owned_by": "system" }, - { - "id": "gpt-4o-realtime-preview", - "object": "model", - "created": 1727659998, - "owned_by": "system" - }, { "id": "gpt-3.5-turbo-1106", "object": "model", "created": 1698959748, "owned_by": "system" }, - { - "id": "gpt-4o-search-preview", - "object": "model", - "created": 1741388720, - "owned_by": "system" - }, { "id": "gpt-4-turbo", "object": "model", "created": 1712361441, "owned_by": "system" }, - { - "id": "o1-2024-12-17", - "object": "model", - "created": 1734326976, - "owned_by": "system" - }, { "id": "gpt-3.5-turbo-instruct", "object": "model", @@ -199,6 +187,12 @@ "created": 1677610602, "owned_by": "openai" }, + { + "id": "chatgpt-4o-latest", + "object": "model", + "created": 1723515131, + "owned_by": "system" + }, { "id": "gpt-4o-mini-search-preview-2025-03-11", "object": "model", @@ -206,9 +200,9 @@ "owned_by": "system" }, { - "id": "chatgpt-4o-latest", + "id": "gpt-4o-2024-11-20", "object": "model", - "created": 1723515131, + "created": 1739331543, "owned_by": "system" }, { @@ -224,15 +218,9 @@ "owned_by": "system" }, { - "id": "gpt-4o-2024-08-06", + "id": "gpt-4o-2024-05-13", "object": "model", - "created": 1722814719, - "owned_by": "system" - }, - { - "id": "gpt-4-turbo-2024-04-09", - "object": "model", - "created": 1712601677, + "created": 1715368132, "owned_by": "system" }, { @@ -242,15 +230,9 @@ "owned_by": "openai-internal" }, { - "id": "gpt-4o", + "id": "gpt-4-turbo-2024-04-09", "object": "model", - "created": 1715367049, - "owned_by": "system" - }, - { - "id": "text-embedding-3-small", - "object": "model", - "created": 1705948997, + "created": 1712601677, "owned_by": "system" }, { @@ -259,12 +241,6 @@ "created": 1698957206, "owned_by": "system" }, - { - "id": "text-embedding-ada-002", - "object": "model", - "created": 1671217299, - "owned_by": "openai-internal" - }, { "id": "o1-preview", "object": "model", @@ -278,15 +254,15 @@ "owned_by": "openai" }, { - "id": "gpt-4.5-preview", + "id": "gpt-4o-search-preview", "object": "model", - "created": 1740623059, + "created": 1741388720, "owned_by": "system" }, { - "id": "gpt-4o-mini-realtime-preview", + "id": "gpt-4.5-preview", "object": "model", - "created": 1734387380, + "created": 1740623059, "owned_by": "system" }, { @@ -295,12 +271,6 @@ "created": 1740623304, "owned_by": "system" }, - { - "id": "gpt-4o-mini-realtime-preview-2024-12-17", - "object": "model", - "created": 1734112601, - "owned_by": "system" - }, { "id": "gpt-4o-search-preview-2025-03-11", "object": "model", @@ -308,15 +278,9 @@ "owned_by": "system" }, { - "id": "gpt-4o-mini-2024-07-18", + "id": "omni-moderation-latest", "object": "model", - "created": 1721172717, - "owned_by": "system" - }, - { - "id": "gpt-4o-mini", - "object": "model", - "created": 1721172741, + "created": 1731689265, "owned_by": "system" }, { @@ -325,12 +289,66 @@ "created": 1681940951, "owned_by": "openai-internal" }, + { + "id": "omni-moderation-2024-09-26", + "object": "model", + "created": 1732734466, + "owned_by": "system" + }, + { + "id": "text-embedding-3-small", + "object": "model", + "created": 1705948997, + "owned_by": "system" + }, + { + "id": "gpt-4o", + "object": "model", + "created": 1715367049, + "owned_by": "system" + }, + { + "id": "gpt-4o-mini", + "object": "model", + "created": 1721172741, + "owned_by": "system" + }, + { + "id": "gpt-4o-2024-08-06", + "object": "model", + "created": 1722814719, + "owned_by": "system" + }, + { + "id": "gpt-4o-mini-2024-07-18", + "object": "model", + "created": 1721172717, + "owned_by": "system" + }, { "id": "gpt-4-turbo-preview", "object": "model", "created": 1706037777, "owned_by": "system" }, + { + "id": "o1-mini", + "object": "model", + "created": 1725649008, + "owned_by": "system" + }, + { + "id": "gpt-4o-mini-audio-preview-2024-12-17", + "object": "model", + "created": 1734115920, + "owned_by": "system" + }, + { + "id": "o1-mini-2024-09-12", + "object": "model", + "created": 1725648979, + "owned_by": "system" + }, { "id": "gpt-4-0125-preview", "object": "model", @@ -338,15 +356,27 @@ "owned_by": "system" }, { - "id": "gpt-4o-2024-11-20", + "id": "o1", "object": "model", - "created": 1739331543, + "created": 1734375816, "owned_by": "system" }, { - "id": "gpt-4o-realtime-preview-2024-12-17", + "id": "o1-2024-12-17", "object": "model", - "created": 1733945430, + "created": 1734326976, + "owned_by": "system" + }, + { + "id": "o1-pro", + "object": "model", + "created": 1742251791, + "owned_by": "system" + }, + { + "id": "o1-pro-2025-03-19", + "object": "model", + "created": 1742251504, "owned_by": "system" } ] diff --git a/packages/kbot/dist-in/data/openrouter_models.json b/packages/kbot/dist-in/data/openrouter_models.json index cab2245a..5ad24218 100644 --- a/packages/kbot/dist-in/data/openrouter_models.json +++ b/packages/kbot/dist-in/data/openrouter_models.json @@ -1,29 +1,519 @@ { - "timestamp": 1742371204254, + "timestamp": 1743496844791, "models": [ + { + "id": "mistral/ministral-8b", + "name": "Mistral: Ministral 8b", + "created": 1743430021, + "description": "Ministral 8B is a state-of-the-art language model optimized for on-device and edge computing. Designed for efficiency in knowledge-intensive tasks, commonsense reasoning, and function-calling, it features a specialized interleaved sliding-window attention mechanism, enabling faster and more memory-efficient inference. Ministral 8B excels in local, low-latency applications such as offline translation, smart assistants, autonomous robotics, and local analytics.\n\nThe model supports up to 128k context length and can function as a performant intermediary in multi-step agentic workflows, efficiently handling tasks like input parsing, API calls, and task routing. It consistently outperforms comparable models like Mistral 7B across benchmarks, making it particularly suitable for compute-efficient, privacy-focused scenarios.", + "context_length": 131072, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Mistral", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000001", + "completion": "0.0000001", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "deepseek/deepseek-v3-base:free", + "name": "DeepSeek: DeepSeek V3 Base (free)", + "created": 1743272023, + "description": "Note that this is a base model mostly meant for testing, you need to provide detailed prompts for the model to return useful responses. \n\nDeepSeek-V3 Base is a 671B parameter open Mixture-of-Experts (MoE) language model with 37B active parameters per forward pass and a context length of 128K tokens. Trained on 14.8T tokens using FP8 mixed precision, it achieves high training efficiency and stability, with strong performance across language, reasoning, math, and coding tasks. \n\nDeepSeek-V3 Base is the pre-trained model behind [DeepSeek V3](/deepseek/deepseek-chat-v3)", + "context_length": 131072, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "DeepSeek", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "scb10x/llama3.1-typhoon2-8b-instruct", + "name": "Typhoon2 8B Instruct", + "created": 1743196511, + "description": "Llama3.1-Typhoon2-8B-Instruct is a Thai-English instruction-tuned model with 8 billion parameters, built on Llama 3.1. It significantly improves over its base model in Thai reasoning, instruction-following, and function-calling tasks, while maintaining competitive English performance. The model is optimized for bilingual interaction and performs well on Thai-English code-switching, MT-Bench, IFEval, and tool-use benchmarks.\n\nDespite its smaller size, it demonstrates strong generalization across math, coding, and multilingual benchmarks, outperforming comparable 8B models across most Thai-specific tasks. Full benchmark results and methodology are available in the [technical report.](https://arxiv.org/abs/2412.13702)", + "context_length": 8192, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.00000018", + "completion": "0.00000018", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 8192, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "scb10x/llama3.1-typhoon2-70b-instruct", + "name": "Typhoon2 70B Instruct", + "created": 1743196170, + "description": "Llama3.1-Typhoon2-70B-Instruct is a Thai-English instruction-tuned language model with 70 billion parameters, built on Llama 3.1. It demonstrates strong performance across general instruction-following, math, coding, and tool-use tasks, with state-of-the-art results in Thai-specific benchmarks such as IFEval, MT-Bench, and Thai-English code-switching.\n\nThe model excels in bilingual reasoning and function-calling scenarios, offering high accuracy across diverse domains. Comparative evaluations show consistent improvements over prior Thai LLMs and other Llama-based baselines. Full results and methodology are available in the [technical report.](https://arxiv.org/abs/2412.13702)", + "context_length": 8192, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.00000088", + "completion": "0.00000088", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 8192, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "allenai/molmo-7b-d:free", + "name": "AllenAI: Molmo 7B D (free)", + "created": 1743023247, + "description": "Molmo is a family of open vision-language models developed by the Allen Institute for AI. Molmo models are trained on PixMo, a dataset of 1 million, highly-curated image-text pairs. It has state-of-the-art performance among multimodal models with a similar size while being fully open-source. You can find all models in the Molmo family [here](https://huggingface.co/collections/allenai/molmo-66f379e6fe3b8ef090a8ca19). Learn more about the Molmo family [in the announcement blog post](https://molmo.allenai.org/blog) or the [paper](https://huggingface.co/papers/2409.17146).\n\nMolmo 7B-D is based on [Qwen2-7B](https://huggingface.co/Qwen/Qwen2-7B) and uses [OpenAI CLIP](https://huggingface.co/openai/clip-vit-large-patch14-336) as vision backbone. It performs comfortably between GPT-4V and GPT-4o on both academic benchmarks and human evaluation.\n\nThis checkpoint is a preview of the Molmo release. All artifacts used in creating Molmo (PixMo dataset, training code, evaluations, intermediate checkpoints) will be made available at a later date, furthering our commitment to open-source AI development and reproducibility.", + "context_length": 4096, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 4096, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "bytedance-research/ui-tars-72b:free", + "name": "Bytedance: UI-TARS 72B (free)", + "created": 1743020065, + "description": "UI-TARS 72B is an open-source multimodal AI model designed specifically for automating browser and desktop tasks through visual interaction and control. The model is built with a specialized vision architecture enabling accurate interpretation and manipulation of on-screen visual data. It supports automation tasks within web browsers as well as desktop applications, including Microsoft Office and VS Code.\n\nCore capabilities include intelligent screen detection, predictive action modeling, and efficient handling of repetitive interactions. UI-TARS employs supervised fine-tuning (SFT) tailored explicitly for computer control scenarios. It can be deployed locally or accessed via Hugging Face for demonstration purposes. Intended use cases encompass workflow automation, task scripting, and interactive desktop control applications.", + "context_length": 32768, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 32768, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "qwen/qwen2.5-vl-3b-instruct:free", + "name": "Qwen: Qwen2.5 VL 3B Instruct (free)", + "created": 1743014573, + "description": "Qwen2.5 VL 3B is a multimodal LLM from the Qwen Team with the following key enhancements:\n\n- SoTA understanding of images of various resolution & ratio: Qwen2.5-VL achieves state-of-the-art performance on visual understanding benchmarks, including MathVista, DocVQA, RealWorldQA, MTVQA, etc.\n\n- Agent that can operate your mobiles, robots, etc.: with the abilities of complex reasoning and decision making, Qwen2.5-VL can be integrated with devices like mobile phones, robots, etc., for automatic operation based on visual environment and text instructions.\n\n- Multilingual Support: to serve global users, besides English and Chinese, Qwen2.5-VL now supports the understanding of texts in different languages inside images, including most European languages, Japanese, Korean, Arabic, Vietnamese, etc.\n\nFor more details, see this [blog post](https://qwenlm.github.io/blog/qwen2-vl/) and [GitHub repo](https://github.com/QwenLM/Qwen2-VL).\n\nUsage of this model is subject to [Tongyi Qianwen LICENSE AGREEMENT](https://huggingface.co/Qwen/Qwen1.5-110B-Chat/blob/main/LICENSE).", + "context_length": 64000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 64000, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "google/gemini-2.5-pro-exp-03-25:free", + "name": "Google: Gemini Pro 2.5 Experimental (free)", + "created": 1742922099, + "description": "Gemini 2.5 Pro is Google’s state-of-the-art AI model designed for advanced reasoning, coding, mathematics, and scientific tasks. It employs “thinking” capabilities, enabling it to reason through responses with enhanced accuracy and nuanced context handling. Gemini 2.5 Pro achieves top-tier performance on multiple benchmarks, including first-place positioning on the LMArena leaderboard, reflecting superior human-preference alignment and complex problem-solving abilities.", + "context_length": 1000000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Gemini", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 1000000, + "max_completion_tokens": 65535, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "qwen/qwen2.5-vl-32b-instruct:free", + "name": "Qwen: Qwen2.5 VL 32B Instruct (free)", + "created": 1742839838, + "description": "Qwen2.5-VL-32B is a multimodal vision-language model fine-tuned through reinforcement learning for enhanced mathematical reasoning, structured outputs, and visual problem-solving capabilities. It excels at visual analysis tasks, including object recognition, textual interpretation within images, and precise event localization in extended videos. Qwen2.5-VL-32B demonstrates state-of-the-art performance across multimodal benchmarks such as MMMU, MathVista, and VideoMME, while maintaining strong reasoning and clarity in text-based tasks like MMLU, mathematical problem-solving, and code generation.", + "context_length": 8192, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 8192, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "deepseek/deepseek-chat-v3-0324:free", + "name": "DeepSeek: DeepSeek V3 0324 (free)", + "created": 1742824755, + "description": "DeepSeek V3, a 685B-parameter, mixture-of-experts model, is the latest iteration of the flagship chat model family from the DeepSeek team.\n\nIt succeeds the [DeepSeek V3](/deepseek/deepseek-chat-v3) model and performs really well on a variety of tasks.", + "context_length": 131072, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "DeepSeek", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "deepseek/deepseek-chat-v3-0324", + "name": "DeepSeek: DeepSeek V3 0324", + "created": 1742824755, + "description": "DeepSeek V3, a 685B-parameter, mixture-of-experts model, is the latest iteration of the flagship chat model family from the DeepSeek team.\n\nIt succeeds the [DeepSeek V3](/deepseek/deepseek-chat-v3) model and performs really well on a variety of tasks.", + "context_length": 64000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "DeepSeek", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000027", + "completion": "0.0000011", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 64000, + "max_completion_tokens": 8192, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "featherless/qwerky-72b:free", + "name": "Qwerky 72b (free)", + "created": 1742481597, + "description": "Qwerky-72B is a linear-attention RWKV variant of the Qwen 2.5 72B model, optimized to significantly reduce computational cost at scale. Leveraging linear attention, it achieves substantial inference speedups (>1000x) while retaining competitive accuracy on common benchmarks like ARC, HellaSwag, Lambada, and MMLU. It inherits knowledge and language support from Qwen 2.5, supporting approximately 30 languages, making it suitable for efficient inference in large-context applications.", + "context_length": 32768, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 32768, + "max_completion_tokens": 4096, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "openai/o1-pro", + "name": "OpenAI: o1-pro", + "created": 1742423211, + "description": "The o1 series of models are trained with reinforcement learning to think before they answer and perform complex reasoning. The o1-pro model uses more compute to think harder and provide consistently better answers.", + "context_length": 200000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "GPT", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00015", + "completion": "0.0006", + "request": "0", + "image": "0.21675", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 100000, + "is_moderated": true + }, + "per_request_limits": null + }, + { + "id": "mistralai/mistral-small-3.1-24b-instruct:free", + "name": "Mistral: Mistral Small 3.1 24B (free)", + "created": 1742238937, + "description": "Mistral Small 3.1 24B Instruct is an upgraded variant of Mistral Small 3 (2501), featuring 24 billion parameters with advanced multimodal capabilities. It provides state-of-the-art performance in text-based reasoning and vision tasks, including image analysis, programming, mathematical reasoning, and multilingual support across dozens of languages. Equipped with an extensive 128k token context window and optimized for efficient local inference, it supports use cases such as conversational agents, function calling, long-document comprehension, and privacy-sensitive deployments.", + "context_length": 96000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Mistral", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 96000, + "max_completion_tokens": 96000, + "is_moderated": false + }, + "per_request_limits": null + }, { "id": "mistralai/mistral-small-3.1-24b-instruct", "name": "Mistral: Mistral Small 3.1 24B", "created": 1742238937, "description": "Mistral Small 3.1 24B Instruct is an upgraded variant of Mistral Small 3 (2501), featuring 24 billion parameters with advanced multimodal capabilities. It provides state-of-the-art performance in text-based reasoning and vision tasks, including image analysis, programming, mathematical reasoning, and multilingual support across dozens of languages. Equipped with an extensive 128k token context window and optimized for efficient local inference, it supports use cases such as conversational agents, function calling, long-document comprehension, and privacy-sensitive deployments.", - "context_length": 128000, + "context_length": 32768, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.0000001", "completion": "0.0000003", - "image": "0.000926", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0009264", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 128000, + "context_length": 32768, "max_completion_tokens": null, "is_moderated": false }, @@ -37,18 +527,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -65,18 +561,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -90,25 +592,31 @@ "name": "SteelSkull: L3.3 Electra R1 70B", "created": 1742067611, "description": "L3.3-Electra-R1-70 is the newest release of the Unnamed series. Built on a DeepSeek R1 Distill base, Electra-R1 integrates various models together to provide an intelligent and coherent model capable of providing deep character insights. Through proper prompting, the model demonstrates advanced reasoning capabilities and unprompted exploration of character inner thoughts and motivations. Read more about the model and [prompting here](https://huggingface.co/Steelskull/L3.3-Electra-R1-70b)", - "context_length": 128000, + "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0.0000007", "completion": "0.0000007", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 128000, - "max_completion_tokens": null, + "context_length": 131072, + "max_completion_tokens": 131072, "is_moderated": false }, "per_request_limits": null @@ -121,18 +629,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.000001", "completion": "0.0000015", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -146,24 +660,31 @@ "name": "Google: Gemma 3 1B (free)", "created": 1741963556, "description": "Gemma 3 1B is the smallest of the new Gemma 3 family. It handles context windows up to 32k tokens, understands over 140 languages, and offers improved math, reasoning, and chat capabilities, including structured outputs and function calling. Note: Gemma 3 1B is not multimodal. For the smallest multimodal Gemma 3 model, please see [Gemma 3 4B](google/gemma-3-4b-it)", - "context_length": 32000, + "context_length": 32768, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": "gemma" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 32000, + "context_length": 32768, "max_completion_tokens": 8192, "is_moderated": false }, @@ -177,18 +698,25 @@ "context_length": 131072, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": "gemma" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -197,6 +725,41 @@ }, "per_request_limits": null }, + { + "id": "google/gemma-3-4b-it", + "name": "Google: Gemma 3 4B", + "created": 1741905510, + "description": "Gemma 3 introduces multimodality, supporting vision-language input and text outputs. It handles context windows up to 128k tokens, understands over 140 languages, and offers improved math, reasoning, and chat capabilities, including structured outputs and function calling.", + "context_length": 131072, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Gemini", + "instruct_type": "gemma" + }, + "pricing": { + "prompt": "0.00000002", + "completion": "0.00000004", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, { "id": "ai21/jamba-1.6-large", "name": "AI21: Jamba 1.6 Large", @@ -205,18 +768,24 @@ "context_length": 256000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.000002", "completion": "0.000008", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 256000, @@ -233,18 +802,24 @@ "context_length": 256000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000002", "completion": "0.0000004", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 256000, @@ -261,18 +836,25 @@ "context_length": 131072, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": "gemma" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -281,6 +863,41 @@ }, "per_request_limits": null }, + { + "id": "google/gemma-3-12b-it", + "name": "Google: Gemma 3 12B", + "created": 1741902625, + "description": "Gemma 3 introduces multimodality, supporting vision-language input and text outputs. It handles context windows up to 128k tokens, understands over 140 languages, and offers improved math, reasoning, and chat capabilities, including structured outputs and function calling. Gemma 3 12B is the second largest in the family of Gemma 3 models after [Gemma 3 27B](google/gemma-3-27b-it)", + "context_length": 131072, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Gemini", + "instruct_type": "gemma" + }, + "pricing": { + "prompt": "0.00000005", + "completion": "0.0000001", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, { "id": "cohere/command-a", "name": "Cohere: Command A", @@ -289,18 +906,24 @@ "context_length": 256000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000025", "completion": "0.00001", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 256000, @@ -317,18 +940,25 @@ "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.00000015", "completion": "0.0000006", - "image": "0.000217", "request": "0.0275", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.000217", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -345,18 +975,25 @@ "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.0000025", "completion": "0.00001", - "image": "0.003613", "request": "0.035", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.003613", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -373,18 +1010,24 @@ "context_length": 16384, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": null }, "pricing": { "prompt": "0.0000006", "completion": "0.0000012", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -401,18 +1044,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -429,18 +1078,25 @@ "context_length": 96000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": "gemma" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 96000, @@ -457,18 +1113,25 @@ "context_length": 131072, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": "gemma" }, "pricing": { "prompt": "0.0000001", "completion": "0.0000002", - "image": "0.0000256", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0000256", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -482,25 +1145,31 @@ "name": "TheDrummer: Anubis Pro 105B V1", "created": 1741642290, "description": "Anubis Pro 105B v1 is an expanded and refined variant of Meta’s Llama 3.3 70B, featuring 50% additional layers and further fine-tuning to leverage its increased capacity. Designed for advanced narrative, roleplay, and instructional tasks, it demonstrates enhanced emotional intelligence, creativity, nuanced character portrayal, and superior prompt adherence compared to smaller models. Its larger parameter count allows for deeper contextual understanding and extended reasoning capabilities, optimized for engaging, intelligent, and coherent interactions.", - "context_length": 64000, + "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000008", - "completion": "0.0000008", - "image": "0", + "completion": "0.000001", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 64000, - "max_completion_tokens": 64000, + "context_length": 131072, + "max_completion_tokens": 131072, "is_moderated": false }, "per_request_limits": null @@ -510,25 +1179,31 @@ "name": "LatitudeGames: Wayfarer Large 70B Llama 3.3", "created": 1741636885, "description": "Wayfarer Large 70B is a roleplay and text-adventure model fine-tuned from Meta’s Llama-3.3-70B-Instruct. Specifically optimized for narrative-driven, challenging scenarios, it introduces realistic stakes, conflicts, and consequences often avoided by standard RLHF-aligned models. Trained using a curated blend of adventure, roleplay, and instructive fiction datasets, Wayfarer emphasizes tense storytelling, authentic player failure scenarios, and robust narrative immersion, making it uniquely suited for interactive fiction and gaming experiences.", - "context_length": 128000, + "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": null }, "pricing": { "prompt": "0.0000007", "completion": "0.0000007", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 128000, - "max_completion_tokens": 128000, + "context_length": 131072, + "max_completion_tokens": 131072, "is_moderated": false }, "per_request_limits": null @@ -541,18 +1216,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000005", "completion": "0.0000005", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -569,18 +1250,25 @@ "context_length": 131072, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.00000005", "completion": "0.0000001", - "image": "0.00017685", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.00017685", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -597,18 +1285,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0.000002", "completion": "0.000008", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -625,18 +1319,24 @@ "context_length": 200000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000015", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -653,18 +1353,24 @@ "context_length": 200000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0.000002", "completion": "0.000008", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -681,18 +1387,24 @@ "context_length": 163840, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 163840, @@ -706,25 +1418,31 @@ "name": "Qwen: QwQ 32B (free)", "created": 1741208814, "description": "QwQ is the reasoning model of the Qwen series. Compared with conventional instruction-tuned models, QwQ, which is capable of thinking and reasoning, can achieve significantly enhanced performance in downstream tasks, especially hard problems. QwQ-32B is the medium-sized reasoning model, which is capable of achieving competitive performance against state-of-the-art reasoning models, e.g., DeepSeek-R1, o1-mini.", - "context_length": 131072, + "context_length": 40000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "qwq" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 131072, - "max_completion_tokens": 131072, + "context_length": 40000, + "max_completion_tokens": 40000, "is_moderated": false }, "per_request_limits": null @@ -737,18 +1455,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "qwq" }, "pricing": { "prompt": "0.00000012", "completion": "0.00000018", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -761,26 +1485,32 @@ "id": "qwen/qwen2.5-32b-instruct", "name": "Qwen: Qwen2.5 32B Instruct", "created": 1741042744, - "description": "Qwen2.5 32B Instruct is the instruction-tuned variant of the latest Qwen large language model series. It provides enhanced instruction-following capabilities, improved proficiency in coding and mathematical reasoning, and robust handling of structured data and outputs such as JSON. It supports long-context processing up to 128K tokens and multilingual tasks across 29+ languages. The model has 32.5 billion parameters, 64 layers, and utilizes an advanced transformer architecture with RoPE, SwiGLU, RMSNorm, and Attention QKV bias.\n\nFor details, please refer to the [Qwen2.5 Blog](https://qwenlm.github.io/blog/qwen2.5/).", + "description": "Qwen2.5 32B Instruct is the instruction-tuned variant of the latest Qwen large language model series. It provides enhanced instruction-following capabilities, improved proficiency in coding and mathematical reasoning, and robust handling of structured data and outputs such as JSON. It supports long-context processing up to 128K tokens and multilingual tasks across 29+ languages. The model has 32.5 billion parameters, 64 layers, and utilizes an advanced transformer architecture with RoPE, SwiGLU, RMSNorm, and Attention QKV bias.\n\nFor more details, please refer to the [Qwen2.5 Blog](https://qwenlm.github.io/blog/qwen2.5/) .", "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": null }, "pricing": { "prompt": "0.00000079", "completion": "0.00000079", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, - "max_completion_tokens": null, + "max_completion_tokens": 131072, "is_moderated": false }, "per_request_limits": null @@ -793,18 +1523,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -821,18 +1557,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -849,18 +1591,25 @@ "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.000075", "completion": "0.00015", - "image": "0.108375", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.108375", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -877,18 +1626,25 @@ "context_length": 1048576, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0.000000075", "completion": "0.0000003", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 1048576, @@ -905,18 +1661,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000015", - "image": "0.0048", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0048", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -933,18 +1696,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000015", - "image": "0.0048", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0048", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -961,18 +1731,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000015", - "image": "0.0048", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0048", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -989,18 +1766,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "DeepSeek", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0.000002", "completion": "0.000008", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -1014,24 +1797,30 @@ "name": "Mistral: Saba", "created": 1739803239, "description": "Mistral Saba is a 24B-parameter language model specifically designed for the Middle East and South Asia, delivering accurate and contextually relevant responses while maintaining efficient performance. Trained on curated regional datasets, it supports multiple Indian-origin languages—including Tamil and Malayalam—alongside Arabic. This makes it a versatile option for a range of regional and multilingual applications. Read more at the blog post [here](https://mistral.ai/en/news/mistral-saba)", - "context_length": 32000, + "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.0000002", "completion": "0.0000006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 32000, + "context_length": 32768, "max_completion_tokens": null, "is_moderated": false }, @@ -1045,18 +1834,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -1073,18 +1868,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -1101,18 +1902,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "none" }, "pricing": { "prompt": "0.0000002", "completion": "0.0000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -1129,18 +1936,24 @@ "context_length": 200000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000011", "completion": "0.0000044", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -1149,34 +1962,6 @@ }, "per_request_limits": null }, - { - "id": "allenai/llama-3.1-tulu-3-405b", - "name": "Llama 3.1 Tulu 3 405B", - "created": 1739053421, - "description": "Tülu 3 405B is the largest model in the Tülu 3 family, applying fully open post-training recipes at a 405B parameter scale. Built on the Llama 3.1 405B base, it leverages Reinforcement Learning with Verifiable Rewards (RLVR) to enhance instruction following, MATH, GSM8K, and IFEval performance. As part of Tülu 3’s fully open-source approach, it offers state-of-the-art capabilities while surpassing prior open-weight models like Llama 3.1 405B Instruct and Nous Hermes 3 405B on multiple benchmarks. To read more, [click here.](https://allenai.org/blog/tulu-3-405B)", - "context_length": 16384, - "architecture": { - "modality": "text->text", - "tokenizer": "Other", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000005", - "completion": "0.00001", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 16384, - "max_completion_tokens": 4096, - "is_moderated": false - }, - "per_request_limits": null - }, { "id": "deepseek/deepseek-r1-distill-llama-8b", "name": "DeepSeek: R1 Distill Llama 8B", @@ -1185,18 +1970,24 @@ "context_length": 32000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0.00000004", "completion": "0.00000004", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32000, @@ -1213,46 +2004,25 @@ "context_length": 1000000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0.0000001", "completion": "0.0000004", + "request": "0", "image": "0.0000258", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 1000000, - "max_completion_tokens": 8192, - "is_moderated": false - }, - "per_request_limits": null - }, - { - "id": "google/gemini-2.0-flash-lite-preview-02-05:free", - "name": "Google: Gemini Flash Lite 2.0 Preview (free)", - "created": 1738768262, - "description": "Gemini Flash Lite 2.0 offers a significantly faster time to first token (TTFT) compared to [Gemini Flash 1.5](/google/gemini-flash-1.5), while maintaining quality on par with larger models like [Gemini Pro 1.5](/google/gemini-pro-1.5). Because it's currently in preview, it will be **heavily rate-limited** by Google. This model will move from free to paid pending a general rollout on February 24th, at $0.075 / $0.30 per million input / ouput tokens respectively.", - "context_length": 1000000, - "architecture": { - "modality": "text+image->text", - "tokenizer": "Gemini", - "instruct_type": null - }, - "pricing": { - "prompt": "0", - "completion": "0", - "image": "0", - "request": "0", + "internal_reasoning": "0", "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" + "input_cache_write": "0" }, "top_provider": { "context_length": 1000000, @@ -1269,18 +2039,25 @@ "context_length": 2000000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 2000000, @@ -1297,18 +2074,25 @@ "context_length": 7500, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": null }, "pricing": { "prompt": "0.00000021", "completion": "0.00000063", - "image": "0.0002688", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0002688", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 7500, @@ -1322,24 +2106,30 @@ "name": "AionLabs: Aion-1.0", "created": 1738697557, "description": "Aion-1.0 is a multi-model system designed for high performance across various tasks, including reasoning and coding. It is built on DeepSeek-R1, augmented with additional models and techniques such as Tree of Thoughts (ToT) and Mixture of Experts (MoE). It is Aion Lab's most powerful reasoning model.", - "context_length": 32768, + "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.000004", "completion": "0.000008", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 32768, + "context_length": 131072, "max_completion_tokens": 32768, "is_moderated": false }, @@ -1350,24 +2140,30 @@ "name": "AionLabs: Aion-1.0-Mini", "created": 1738697107, "description": "Aion-1.0-Mini 32B parameter model is a distilled version of the DeepSeek-R1 model, designed for strong performance in reasoning domains such as mathematics, coding, and logic. It is a modified variant of a FuseAI model that outperforms R1-Distill-Qwen-32B and R1-Distill-Llama-70B, with benchmark results available on its [Hugging Face page](https://huggingface.co/FuseAI/FuseO1-DeepSeekR1-QwQ-SkyT1-32B-Preview), independently replicated for verification.", - "context_length": 32768, + "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000007", "completion": "0.0000014", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 32768, + "context_length": 131072, "max_completion_tokens": 32768, "is_moderated": false }, @@ -1381,18 +2177,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000002", "completion": "0.0000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -1409,18 +2211,25 @@ "context_length": 7500, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": null }, "pricing": { "prompt": "0.0000008", "completion": "0.0000032", - "image": "0.001024", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.001024", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 7500, @@ -1437,18 +2246,24 @@ "context_length": 1000000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": null }, "pricing": { "prompt": "0.00000005", "completion": "0.0000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 1000000, @@ -1465,18 +2280,25 @@ "context_length": 131072, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -1490,25 +2312,32 @@ "name": "Qwen: Qwen2.5 VL 72B Instruct", "created": 1738410311, "description": "Qwen2.5-VL is proficient in recognizing common objects such as flowers, birds, fish, and insects. It is also highly capable of analyzing texts, charts, icons, graphics, and layouts within images.", - "context_length": 32000, + "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": null }, "pricing": { "prompt": "0.0000007", "completion": "0.0000007", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 32000, - "max_completion_tokens": null, + "context_length": 128000, + "max_completion_tokens": 128000, "is_moderated": false }, "per_request_limits": null @@ -1521,18 +2350,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": null }, "pricing": { "prompt": "0.0000004", "completion": "0.0000012", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -1549,18 +2384,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": null }, "pricing": { "prompt": "0.0000016", "completion": "0.0000064", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -1577,18 +2418,24 @@ "context_length": 200000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000011", "completion": "0.0000044", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -1605,18 +2452,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0.00000018", "completion": "0.00000018", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -1633,18 +2486,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -1661,18 +2520,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.00000007", "completion": "0.00000014", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -1689,18 +2554,24 @@ "context_length": 16000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16000, @@ -1717,18 +2588,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0.00000012", "completion": "0.00000018", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -1745,18 +2622,24 @@ "context_length": 64000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 64000, @@ -1773,18 +2656,24 @@ "context_length": 64000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0.00000015", "completion": "0.00000015", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 64000, @@ -1801,18 +2690,24 @@ "context_length": 127000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", - "instruct_type": null + "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0.000001", "completion": "0.000005", - "image": "0", "request": "0.005", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 127000, @@ -1829,18 +2724,24 @@ "context_length": 127072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.000001", "completion": "0.000001", - "image": "0", "request": "0.005", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 127072, @@ -1857,18 +2758,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "chatml" }, "pricing": { "prompt": "0.00000001", "completion": "0.00000001", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -1885,18 +2792,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "chatml" }, "pricing": { "prompt": "0.00000002", "completion": "0.00000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -1910,25 +2823,31 @@ "name": "DeepSeek: R1 Distill Llama 70B (free)", "created": 1737663169, "description": "DeepSeek R1 Distill Llama 70B is a distilled large language model based on [Llama-3.3-70B-Instruct](/meta-llama/llama-3.3-70b-instruct), using outputs from [DeepSeek R1](/deepseek/deepseek-r1). The model combines advanced distillation techniques to achieve high performance across multiple benchmarks, including:\n\n- AIME 2024 pass@1: 70.0\n- MATH-500 pass@1: 94.5\n- CodeForces Rating: 1633\n\nThe model leverages fine-tuning from DeepSeek R1's outputs, enabling competitive performance comparable to larger frontier models.", - "context_length": 128000, + "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 128000, - "max_completion_tokens": null, + "context_length": 8192, + "max_completion_tokens": 4096, "is_moderated": false }, "per_request_limits": null @@ -1941,18 +2860,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0.00000023", "completion": "0.00000069", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -1969,18 +2894,25 @@ "context_length": 1048576, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 1048576, @@ -1997,18 +2929,24 @@ "context_length": 163840, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "DeepSeek", "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 163840, @@ -2022,25 +2960,31 @@ "name": "DeepSeek: R1", "created": 1737381095, "description": "DeepSeek R1 is here: Performance on par with [OpenAI o1](/openai/o1), but open-sourced and with fully open reasoning tokens. It's 671B parameters in size, with 37B active in an inference pass.\n\nFully open-source model & [technical report](https://api-docs.deepseek.com/news/news250120).\n\nMIT licensed: Distill & commercialize freely!", - "context_length": 64000, + "context_length": 163840, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "DeepSeek", "instruct_type": "deepseek-r1" }, "pricing": { - "prompt": "0.0000007", - "completion": "0.0000025", - "image": "0", + "prompt": "0.00000055", + "completion": "0.00000219", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 64000, - "max_completion_tokens": 16000, + "context_length": 163840, + "max_completion_tokens": 163840, "is_moderated": false }, "per_request_limits": null @@ -2053,18 +2997,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "vicuna" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -2081,18 +3031,25 @@ "context_length": 1000192, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000002", "completion": "0.0000011", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 1000192, @@ -2106,24 +3063,30 @@ "name": "Mistral: Codestral 2501", "created": 1736895522, "description": "[Mistral](/mistralai)'s cutting-edge language model for coding. Codestral specializes in low-latency, high-frequency tasks such as fill-in-the-middle (FIM), code correction and test generation. \n\nLearn more on their blog post: https://mistral.ai/news/codestral-2501/", - "context_length": 256000, + "context_length": 262144, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.0000003", "completion": "0.0000009", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 256000, + "context_length": 262144, "max_completion_tokens": null, "is_moderated": false }, @@ -2137,18 +3100,24 @@ "context_length": 16384, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.00000007", "completion": "0.00000014", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -2165,18 +3134,24 @@ "context_length": 16000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000003", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16000, @@ -2193,18 +3168,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "DeepSeek", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -2218,25 +3199,31 @@ "name": "DeepSeek: DeepSeek V3", "created": 1735241320, "description": "DeepSeek-V3 is the latest model from the DeepSeek team, building upon the instruction following and coding abilities of the previous versions. Pre-trained on nearly 15 trillion tokens, the reported evaluations reveal that the model outperforms other open-source models and rivals leading closed-source models.\n\nFor model details, please visit [the DeepSeek-V3 repo](https://github.com/deepseek-ai/DeepSeek-V3) for more information, or see the [launch announcement](https://api-docs.deepseek.com/news/news1226).", - "context_length": 64000, + "context_length": 163840, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "DeepSeek", "instruct_type": null }, "pricing": { "prompt": "0.0000004", - "completion": "0.0000013", - "image": "0", + "completion": "0.00000089", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 64000, - "max_completion_tokens": 16000, + "context_length": 163840, + "max_completion_tokens": 163840, "is_moderated": false }, "per_request_limits": null @@ -2249,18 +3236,25 @@ "context_length": 40000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 40000, @@ -2277,18 +3271,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.0000007", "completion": "0.0000008", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -2305,18 +3305,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.000015", "completion": "0.00006", - "image": "0.021675", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.021675", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -2333,18 +3340,24 @@ "context_length": 16384, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.000004", "completion": "0.000006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -2361,18 +3374,25 @@ "context_length": 32768, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Grok", "instruct_type": null }, "pricing": { "prompt": "0.000002", "completion": "0.00001", - "image": "0.0036", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0036", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -2389,18 +3409,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Grok", "instruct_type": null }, "pricing": { "prompt": "0.000002", "completion": "0.00001", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -2417,18 +3443,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Cohere", "instruct_type": null }, "pricing": { "prompt": "0.0000000375", "completion": "0.00000015", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -2445,18 +3477,25 @@ "context_length": 1048576, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 1048576, @@ -2465,58 +3504,36 @@ }, "per_request_limits": null }, - { - "id": "google/gemini-exp-1206:free", - "name": "Google: Gemini Experimental 1206 (free)", - "created": 1733507713, - "description": "Experimental release (December 6, 2024) of Gemini.", - "context_length": 2097152, - "architecture": { - "modality": "text+image->text", - "tokenizer": "Gemini", - "instruct_type": null - }, - "pricing": { - "prompt": "0", - "completion": "0", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 2097152, - "max_completion_tokens": 8192, - "is_moderated": false - }, - "per_request_limits": null - }, { "id": "meta-llama/llama-3.3-70b-instruct:free", "name": "Meta: Llama 3.3 70B Instruct (free)", "created": 1733506137, "description": "The Meta Llama 3.3 multilingual large language model (LLM) is a pretrained and instruction tuned generative model in 70B (text in/text out). The Llama 3.3 instruction tuned text only model is optimized for multilingual dialogue use cases and outperforms many of the available open source and closed chat models on common industry benchmarks.\n\nSupported languages: English, German, French, Italian, Portuguese, Hindi, Spanish, and Thai.\n\n[Model Card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_3/MODEL_CARD.md)", - "context_length": 131072, + "context_length": 8000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 131072, - "max_completion_tokens": null, + "context_length": 8000, + "max_completion_tokens": 8000, "is_moderated": false }, "per_request_limits": null @@ -2529,18 +3546,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.00000012", "completion": "0.0000003", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -2557,18 +3580,25 @@ "context_length": 300000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Nova", "instruct_type": null }, "pricing": { "prompt": "0.00000006", "completion": "0.00000024", - "image": "0.00009", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.00009", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 300000, @@ -2585,18 +3615,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Nova", "instruct_type": null }, "pricing": { "prompt": "0.000000035", "completion": "0.00000014", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -2613,18 +3649,25 @@ "context_length": 300000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Nova", "instruct_type": null }, "pricing": { "prompt": "0.0000008", "completion": "0.0000032", - "image": "0.0012", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0012", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 300000, @@ -2641,18 +3684,24 @@ "context_length": 16384, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", - "instruct_type": null + "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -2669,18 +3718,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", - "instruct_type": null + "instruct_type": "deepseek-r1" }, "pricing": { "prompt": "0.0000002", "completion": "0.0000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -2697,18 +3752,25 @@ "context_length": 40960, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 40960, @@ -2722,25 +3784,31 @@ "name": "EVA Qwen2.5 72B", "created": 1732210606, "description": "EVA Qwen2.5 72B is a roleplay and storywriting specialist model. It's a full-parameter finetune of Qwen2.5-72B on mixture of synthetic and natural data.\n\nIt uses Celeste 70B 0.1 data mixture, greatly expanding it to improve versatility, creativity and \"flavor\" of the resulting model.", - "context_length": 32000, + "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0.0000007", "completion": "0.0000007", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 32000, - "max_completion_tokens": null, + "context_length": 131072, + "max_completion_tokens": 131072, "is_moderated": false }, "per_request_limits": null @@ -2753,18 +3821,25 @@ "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.0000025", "completion": "0.00001", - "image": "0.003613", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.003613", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -2778,24 +3853,30 @@ "name": "Mistral Large 2411", "created": 1731978685, "description": "Mistral Large 2 2411 is an update of [Mistral Large 2](/mistralai/mistral-large) released together with [Pixtral Large 2411](/mistralai/pixtral-large-2411)\n\nIt provides a significant upgrade on the previous [Mistral Large 24.07](/mistralai/mistral-large-2407), with notable improvements in long context understanding, a new system prompt, and more accurate function calling.", - "context_length": 128000, + "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.000002", "completion": "0.000006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 128000, + "context_length": 131072, "max_completion_tokens": null, "is_moderated": false }, @@ -2806,24 +3887,30 @@ "name": "Mistral Large 2407", "created": 1731978415, "description": "This is Mistral AI's flagship model, Mistral Large 2 (version mistral-large-2407). It's a proprietary weights-available model and excels at reasoning, code, JSON, chat, and more. Read the launch announcement [here](https://mistral.ai/news/mistral-large-2407/).\n\nIt supports dozens of languages including French, German, Spanish, Italian, Portuguese, Arabic, Hindi, Russian, Chinese, Japanese, and Korean, along with 80+ coding languages including Python, Java, C, C++, JavaScript, and Bash. Its long context window allows precise information recall from large documents.\n", - "context_length": 128000, + "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.000002", "completion": "0.000006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 128000, + "context_length": 131072, "max_completion_tokens": null, "is_moderated": false }, @@ -2834,24 +3921,31 @@ "name": "Mistral: Pixtral Large 2411", "created": 1731977388, "description": "Pixtral Large is a 124B parameter, open-weight, multimodal model built on top of [Mistral Large 2](/mistralai/mistral-large-2411). The model is able to understand documents, charts and natural images.\n\nThe model is available under the Mistral Research License (MRL) for research and educational use, and the Mistral Commercial License for experimentation, testing, and production for commercial purposes.\n\n", - "context_length": 128000, + "context_length": 131072, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.000002", "completion": "0.000006", - "image": "0.002888", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.002888", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 128000, + "context_length": 131072, "max_completion_tokens": null, "is_moderated": false }, @@ -2865,18 +3959,25 @@ "context_length": 8192, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Grok", "instruct_type": null }, "pricing": { "prompt": "0.000005", "completion": "0.000015", - "image": "0.009", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.009", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -2893,18 +3994,24 @@ "context_length": 16384, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "mistral" }, "pricing": { "prompt": "0.0000008", "completion": "0.0000012", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -2921,18 +4028,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -2949,18 +4062,24 @@ "context_length": 33000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0.00000007", "completion": "0.00000016", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 33000, @@ -2977,18 +4096,24 @@ "context_length": 16000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "vicuna" }, "pricing": { "prompt": "0.0000045", "completion": "0.0000045", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16000, @@ -3005,18 +4130,24 @@ "context_length": 16384, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0.0000026", "completion": "0.0000034", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -3033,18 +4164,24 @@ "context_length": 32000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "mistral" }, "pricing": { "prompt": "0.0000005", "completion": "0.0000005", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32000, @@ -3053,62 +4190,6 @@ }, "per_request_limits": null }, - { - "id": "anthropic/claude-3.5-haiku-20241022:beta", - "name": "Anthropic: Claude 3.5 Haiku (2024-10-22) (self-moderated)", - "created": 1730678400, - "description": "Claude 3.5 Haiku features enhancements across all skill sets including coding, tool use, and reasoning. As the fastest model in the Anthropic lineup, it offers rapid response times suitable for applications that require high interactivity and low latency, such as user-facing chatbots and on-the-fly code completions. It also excels in specialized tasks like data extraction and real-time content moderation, making it a versatile tool for a broad range of industries.\n\nIt does not support image inputs.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/3-5-models-and-computer-use)", - "context_length": 200000, - "architecture": { - "modality": "text->text", - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000008", - "completion": "0.000004", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 8192, - "is_moderated": false - }, - "per_request_limits": null - }, - { - "id": "anthropic/claude-3.5-haiku-20241022", - "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)", - "created": 1730678400, - "description": "Claude 3.5 Haiku features enhancements across all skill sets including coding, tool use, and reasoning. As the fastest model in the Anthropic lineup, it offers rapid response times suitable for applications that require high interactivity and low latency, such as user-facing chatbots and on-the-fly code completions. It also excels in specialized tasks like data extraction and real-time content moderation, making it a versatile tool for a broad range of industries.\n\nIt does not support image inputs.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/3-5-models-and-computer-use)", - "context_length": 200000, - "architecture": { - "modality": "text->text", - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000008", - "completion": "0.000004", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 8192, - "is_moderated": true - }, - "per_request_limits": null - }, { "id": "anthropic/claude-3.5-haiku:beta", "name": "Anthropic: Claude 3.5 Haiku (self-moderated)", @@ -3116,19 +4197,26 @@ "description": "Claude 3.5 Haiku features offers enhanced capabilities in speed, coding accuracy, and tool use. Engineered to excel in real-time applications, it delivers quick response times that are essential for dynamic tasks such as chat interactions and immediate coding suggestions.\n\nThis makes it highly suitable for environments that demand both speed and precision, such as software development, customer service bots, and data management systems.\n\nThis model is currently pointing to [Claude 3.5 Haiku (2024-10-22)](/anthropic/claude-3-5-haiku-20241022).", "context_length": 200000, "architecture": { - "modality": "text->text", + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.0000008", "completion": "0.000004", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -3144,19 +4232,96 @@ "description": "Claude 3.5 Haiku features offers enhanced capabilities in speed, coding accuracy, and tool use. Engineered to excel in real-time applications, it delivers quick response times that are essential for dynamic tasks such as chat interactions and immediate coding suggestions.\n\nThis makes it highly suitable for environments that demand both speed and precision, such as software development, customer service bots, and data management systems.\n\nThis model is currently pointing to [Claude 3.5 Haiku (2024-10-22)](/anthropic/claude-3-5-haiku-20241022).", "context_length": 200000, "architecture": { - "modality": "text->text", + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.0000008", "completion": "0.000004", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 8192, + "is_moderated": true + }, + "per_request_limits": null + }, + { + "id": "anthropic/claude-3.5-haiku-20241022:beta", + "name": "Anthropic: Claude 3.5 Haiku (2024-10-22) (self-moderated)", + "created": 1730678400, + "description": "Claude 3.5 Haiku features enhancements across all skill sets including coding, tool use, and reasoning. As the fastest model in the Anthropic lineup, it offers rapid response times suitable for applications that require high interactivity and low latency, such as user-facing chatbots and on-the-fly code completions. It also excels in specialized tasks like data extraction and real-time content moderation, making it a versatile tool for a broad range of industries.\n\nIt does not support image inputs.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/3-5-models-and-computer-use)", + "context_length": 200000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Claude", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000008", + "completion": "0.000004", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 8192, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "anthropic/claude-3.5-haiku-20241022", + "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)", + "created": 1730678400, + "description": "Claude 3.5 Haiku features enhancements across all skill sets including coding, tool use, and reasoning. As the fastest model in the Anthropic lineup, it offers rapid response times suitable for applications that require high interactivity and low latency, such as user-facing chatbots and on-the-fly code completions. It also excels in specialized tasks like data extraction and real-time content moderation, making it a versatile tool for a broad range of industries.\n\nIt does not support image inputs.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/3-5-models-and-computer-use)", + "context_length": 200000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Claude", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000008", + "completion": "0.000004", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -3173,18 +4338,24 @@ "context_length": 16384, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.000003375", "completion": "0.0000045", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -3201,18 +4372,24 @@ "context_length": 16384, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0.000001875", "completion": "0.00000225", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -3229,18 +4406,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000015", - "image": "0.0048", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0048", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -3257,18 +4441,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000015", - "image": "0.0048", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0048", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -3285,18 +4476,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Grok", "instruct_type": null }, "pricing": { "prompt": "0.000005", "completion": "0.000015", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -3313,18 +4510,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.0000001", "completion": "0.0000001", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -3338,24 +4541,30 @@ "name": "Mistral: Ministral 3B", "created": 1729123200, "description": "Ministral 3B is a 3B parameter model optimized for on-device and edge computing. It excels in knowledge, commonsense reasoning, and function-calling, outperforming larger models like Mistral 7B on most benchmarks. Supporting up to 128k context length, it’s ideal for orchestrating agentic workflows and specialist tasks with efficient inference.", - "context_length": 128000, + "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.00000004", "completion": "0.00000004", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 128000, + "context_length": 131072, "max_completion_tokens": null, "is_moderated": false }, @@ -3369,18 +4578,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0.000000025", "completion": "0.00000005", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -3397,18 +4612,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -3425,18 +4646,24 @@ "context_length": 131000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.00000012", "completion": "0.0000003", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131000, @@ -3445,34 +4672,6 @@ }, "per_request_limits": null }, - { - "id": "inflection/inflection-3-pi", - "name": "Inflection: Inflection 3 Pi", - "created": 1728604800, - "description": "Inflection 3 Pi powers Inflection's [Pi](https://pi.ai) chatbot, including backstory, emotional intelligence, productivity, and safety. It has access to recent news, and excels in scenarios like customer support and roleplay.\n\nPi has been trained to mirror your tone and style, if you use more emojis, so will Pi! Try experimenting with various prompts and conversation styles.", - "context_length": 8000, - "architecture": { - "modality": "text->text", - "tokenizer": "Other", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000025", - "completion": "0.00001", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 8000, - "max_completion_tokens": 1024, - "is_moderated": false - }, - "per_request_limits": null - }, { "id": "inflection/inflection-3-productivity", "name": "Inflection: Inflection 3 Productivity", @@ -3481,18 +4680,58 @@ "context_length": 8000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000025", "completion": "0.00001", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 8000, + "max_completion_tokens": 1024, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "inflection/inflection-3-pi", + "name": "Inflection: Inflection 3 Pi", + "created": 1728604800, + "description": "Inflection 3 Pi powers Inflection's [Pi](https://pi.ai) chatbot, including backstory, emotional intelligence, productivity, and safety. It has access to recent news, and excels in scenarios like customer support and roleplay.\n\nPi has been trained to mirror your tone and style, if you use more emojis, so will Pi! Try experimenting with various prompts and conversation styles.", + "context_length": 8000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000025", + "completion": "0.00001", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8000, @@ -3509,18 +4748,25 @@ "context_length": 1000000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0.0000000375", "completion": "0.00000015", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 1000000, @@ -3529,6 +4775,40 @@ }, "per_request_limits": null }, + { + "id": "thedrummer/rocinante-12b", + "name": "Rocinante 12B", + "created": 1727654400, + "description": "Rocinante 12B is designed for engaging storytelling and rich prose.\n\nEarly testers have reported:\n- Expanded vocabulary with unique and expressive word choices\n- Enhanced creativity for vivid narratives\n- Adventure-filled and captivating stories", + "context_length": 32768, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen", + "instruct_type": "chatml" + }, + "pricing": { + "prompt": "0.00000025", + "completion": "0.0000005", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 32768, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, { "id": "anthracite-org/magnum-v2-72b", "name": "Magnum v2 72B", @@ -3537,18 +4817,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0.000003", "completion": "0.000003", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -3565,46 +4851,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "chatml" }, "pricing": { "prompt": "0.00000015", "completion": "0.00000015", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 32768, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null - }, - { - "id": "thedrummer/rocinante-12b", - "name": "Rocinante 12B", - "created": 1727654400, - "description": "Rocinante 12B is designed for engaging storytelling and rich prose.\n\nEarly testers have reported:\n- Expanded vocabulary with unique and expressive word choices\n- Enhanced creativity for vivid narratives\n- Adventure-filled and captivating stories", - "context_length": 32768, - "architecture": { - "modality": "text->text", - "tokenizer": "Qwen", - "instruct_type": "chatml" - }, - "pricing": { - "prompt": "0.00000025", - "completion": "0.0000005", "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -3621,18 +4885,24 @@ "context_length": 20000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 20000, @@ -3649,18 +4919,24 @@ "context_length": 131000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.000000015", "completion": "0.000000025", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131000, @@ -3677,22 +4953,28 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, - "max_completion_tokens": null, + "max_completion_tokens": 131072, "is_moderated": false }, "per_request_limits": null @@ -3705,18 +4987,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.00000001", "completion": "0.00000001", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -3733,18 +5021,25 @@ "context_length": 4096, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.0000008", "completion": "0.0000016", - "image": "0.0051456", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0051456", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -3761,18 +5056,25 @@ "context_length": 131072, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -3789,18 +5091,25 @@ "context_length": 16384, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.000000055", "completion": "0.000000055", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -3817,18 +5126,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -3845,18 +5160,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0.00000013", "completion": "0.0000004", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -3870,24 +5191,31 @@ "name": "Qwen: Qwen2.5-VL 72B Instruct", "created": 1726617600, "description": "Qwen2.5 VL 72B is a multimodal LLM from the Qwen Team with the following key enhancements:\n\n- SoTA understanding of images of various resolution & ratio: Qwen2.5-VL achieves state-of-the-art performance on visual understanding benchmarks, including MathVista, DocVQA, RealWorldQA, MTVQA, etc.\n\n- Understanding videos of 20min+: Qwen2.5-VL can understand videos over 20 minutes for high-quality video-based question answering, dialog, content creation, etc.\n\n- Agent that can operate your mobiles, robots, etc.: with the abilities of complex reasoning and decision making, Qwen2.5-VL can be integrated with devices like mobile phones, robots, etc., for automatic operation based on visual environment and text instructions.\n\n- Multilingual Support: to serve global users, besides English and Chinese, Qwen2.5-VL now supports the understanding of texts in different languages inside images, including most European languages, Japanese, Korean, Arabic, Vietnamese, etc.\n\nFor more details, see this [blog post](https://qwenlm.github.io/blog/qwen2-vl/) and [GitHub repo](https://github.com/QwenLM/Qwen2-VL).\n\nUsage of this model is subject to [Tongyi Qianwen LICENSE AGREEMENT](https://huggingface.co/Qwen/Qwen1.5-110B-Chat/blob/main/LICENSE).", - "context_length": 4096, + "context_length": 32768, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": null }, "pricing": { "prompt": "0.0000006", "completion": "0.0000006", - "image": "0.000578", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.000578", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 4096, + "context_length": 32768, "max_completion_tokens": null, "is_moderated": false }, @@ -3901,18 +5229,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.0000001875", "completion": "0.000001125", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -3921,34 +5255,6 @@ }, "per_request_limits": null }, - { - "id": "openai/o1-mini-2024-09-12", - "name": "OpenAI: o1-mini (2024-09-12)", - "created": 1726099200, - "description": "The latest and strongest model family from OpenAI, o1 is designed to spend more time thinking before responding.\n\nThe o1 models are optimized for math, science, programming, and other STEM-related tasks. They consistently exhibit PhD-level accuracy on benchmarks in physics, chemistry, and biology. Learn more in the [launch announcement](https://openai.com/o1).\n\nNote: This model is currently experimental and not suitable for production use-cases, and may be heavily rate-limited.", - "context_length": 128000, - "architecture": { - "modality": "text->text", - "tokenizer": "GPT", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000011", - "completion": "0.0000044", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 128000, - "max_completion_tokens": 65536, - "is_moderated": true - }, - "per_request_limits": null - }, { "id": "openai/o1-preview", "name": "OpenAI: o1-preview", @@ -3957,18 +5263,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.000015", "completion": "0.00006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -3985,18 +5297,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.000015", "completion": "0.00006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -4013,18 +5331,58 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.0000011", "completion": "0.0000044", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 128000, + "max_completion_tokens": 65536, + "is_moderated": true + }, + "per_request_limits": null + }, + { + "id": "openai/o1-mini-2024-09-12", + "name": "OpenAI: o1-mini (2024-09-12)", + "created": 1726099200, + "description": "The latest and strongest model family from OpenAI, o1 is designed to spend more time thinking before responding.\n\nThe o1 models are optimized for math, science, programming, and other STEM-related tasks. They consistently exhibit PhD-level accuracy on benchmarks in physics, chemistry, and biology. Learn more in the [launch announcement](https://openai.com/o1).\n\nNote: This model is currently experimental and not suitable for production use-cases, and may be heavily rate-limited.", + "context_length": 128000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "GPT", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000011", + "completion": "0.0000044", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -4038,57 +5396,36 @@ "name": "Mistral: Pixtral 12B", "created": 1725926400, "description": "The first multi-modal, text+image-to-text model from Mistral AI. Its weights were launched via torrent: https://x.com/mistralai/status/1833758285167722836.", - "context_length": 4096, + "context_length": 32768, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.0000001", "completion": "0.0000001", + "request": "0", "image": "0.0001445", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 4096, + "context_length": 32768, "max_completion_tokens": null, "is_moderated": false }, "per_request_limits": null }, - { - "id": "cohere/command-r-08-2024", - "name": "Cohere: Command R (08-2024)", - "created": 1724976000, - "description": "command-r-08-2024 is an update of the [Command R](/models/cohere/command-r) with improved performance for multilingual retrieval-augmented generation (RAG) and tool use. More broadly, it is better at math, code and reasoning and is competitive with the previous version of the larger Command R+ model.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", - "context_length": 128000, - "architecture": { - "modality": "text->text", - "tokenizer": "Cohere", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000001425", - "completion": "0.00000057", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 128000, - "max_completion_tokens": 4000, - "is_moderated": false - }, - "per_request_limits": null - }, { "id": "cohere/command-r-plus-08-2024", "name": "Cohere: Command R+ (08-2024)", @@ -4097,18 +5434,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Cohere", "instruct_type": null }, "pricing": { "prompt": "0.000002375", "completion": "0.0000095", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -4117,6 +5460,110 @@ }, "per_request_limits": null }, + { + "id": "cohere/command-r-08-2024", + "name": "Cohere: Command R (08-2024)", + "created": 1724976000, + "description": "command-r-08-2024 is an update of the [Command R](/models/cohere/command-r) with improved performance for multilingual retrieval-augmented generation (RAG) and tool use. More broadly, it is better at math, code and reasoning and is competitive with the previous version of the larger Command R+ model.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", + "context_length": 128000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Cohere", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000001425", + "completion": "0.00000057", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 128000, + "max_completion_tokens": 4000, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "qwen/qwen-2.5-vl-7b-instruct:free", + "name": "Qwen: Qwen2.5-VL 7B Instruct (free)", + "created": 1724803200, + "description": "Qwen2.5 VL 7B is a multimodal LLM from the Qwen Team with the following key enhancements:\n\n- SoTA understanding of images of various resolution & ratio: Qwen2.5-VL achieves state-of-the-art performance on visual understanding benchmarks, including MathVista, DocVQA, RealWorldQA, MTVQA, etc.\n\n- Understanding videos of 20min+: Qwen2.5-VL can understand videos over 20 minutes for high-quality video-based question answering, dialog, content creation, etc.\n\n- Agent that can operate your mobiles, robots, etc.: with the abilities of complex reasoning and decision making, Qwen2.5-VL can be integrated with devices like mobile phones, robots, etc., for automatic operation based on visual environment and text instructions.\n\n- Multilingual Support: to serve global users, besides English and Chinese, Qwen2.5-VL now supports the understanding of texts in different languages inside images, including most European languages, Japanese, Korean, Arabic, Vietnamese, etc.\n\nFor more details, see this [blog post](https://qwenlm.github.io/blog/qwen2-vl/) and [GitHub repo](https://github.com/QwenLM/Qwen2-VL).\n\nUsage of this model is subject to [Tongyi Qianwen LICENSE AGREEMENT](https://huggingface.co/Qwen/Qwen1.5-110B-Chat/blob/main/LICENSE).", + "context_length": 64000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 64000, + "max_completion_tokens": 64000, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "qwen/qwen-2.5-vl-7b-instruct", + "name": "Qwen: Qwen2.5-VL 7B Instruct", + "created": 1724803200, + "description": "Qwen2.5 VL 7B is a multimodal LLM from the Qwen Team with the following key enhancements:\n\n- SoTA understanding of images of various resolution & ratio: Qwen2.5-VL achieves state-of-the-art performance on visual understanding benchmarks, including MathVista, DocVQA, RealWorldQA, MTVQA, etc.\n\n- Understanding videos of 20min+: Qwen2.5-VL can understand videos over 20 minutes for high-quality video-based question answering, dialog, content creation, etc.\n\n- Agent that can operate your mobiles, robots, etc.: with the abilities of complex reasoning and decision making, Qwen2.5-VL can be integrated with devices like mobile phones, robots, etc., for automatic operation based on visual environment and text instructions.\n\n- Multilingual Support: to serve global users, besides English and Chinese, Qwen2.5-VL now supports the understanding of texts in different languages inside images, including most European languages, Japanese, Korean, Arabic, Vietnamese, etc.\n\nFor more details, see this [blog post](https://qwenlm.github.io/blog/qwen2-vl/) and [GitHub repo](https://github.com/QwenLM/Qwen2-VL).\n\nUsage of this model is subject to [Tongyi Qianwen LICENSE AGREEMENT](https://huggingface.co/Qwen/Qwen1.5-110B-Chat/blob/main/LICENSE).", + "context_length": 32768, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000002", + "completion": "0.0000002", + "request": "0", + "image": "0.0001445", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 32768, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, { "id": "sao10k/l3.1-euryale-70b", "name": "Sao10K: Llama 3.1 Euryale 70B v2.2", @@ -4125,18 +5572,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.0000007", "completion": "0.0000008", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -4153,18 +5606,25 @@ "context_length": 1000000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 1000000, @@ -4173,62 +5633,6 @@ }, "per_request_limits": null }, - { - "id": "qwen/qwen-2.5-vl-7b-instruct", - "name": "Qwen: Qwen2.5-VL 7B Instruct", - "created": 1724803200, - "description": "Qwen2.5 VL 7B is a multimodal LLM from the Qwen Team with the following key enhancements:\n\n- SoTA understanding of images of various resolution & ratio: Qwen2.5-VL achieves state-of-the-art performance on visual understanding benchmarks, including MathVista, DocVQA, RealWorldQA, MTVQA, etc.\n\n- Understanding videos of 20min+: Qwen2.5-VL can understand videos over 20 minutes for high-quality video-based question answering, dialog, content creation, etc.\n\n- Agent that can operate your mobiles, robots, etc.: with the abilities of complex reasoning and decision making, Qwen2.5-VL can be integrated with devices like mobile phones, robots, etc., for automatic operation based on visual environment and text instructions.\n\n- Multilingual Support: to serve global users, besides English and Chinese, Qwen2.5-VL now supports the understanding of texts in different languages inside images, including most European languages, Japanese, Korean, Arabic, Vietnamese, etc.\n\nFor more details, see this [blog post](https://qwenlm.github.io/blog/qwen2-vl/) and [GitHub repo](https://github.com/QwenLM/Qwen2-VL).\n\nUsage of this model is subject to [Tongyi Qianwen LICENSE AGREEMENT](https://huggingface.co/Qwen/Qwen1.5-110B-Chat/blob/main/LICENSE).", - "context_length": 4096, - "architecture": { - "modality": "text+image->text", - "tokenizer": "Qwen", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000002", - "completion": "0.0000002", - "image": "0.0001445", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 4096, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null - }, - { - "id": "ai21/jamba-1-5-large", - "name": "AI21: Jamba 1.5 Large", - "created": 1724371200, - "description": "Jamba 1.5 Large is part of AI21's new family of open models, offering superior speed, efficiency, and quality.\n\nIt features a 256K effective context window, the longest among open models, enabling improved performance on tasks like document summarization and analysis.\n\nBuilt on a novel SSM-Transformer architecture, it outperforms larger models like Llama 3.1 70B on benchmarks while maintaining resource efficiency.\n\nRead their [announcement](https://www.ai21.com/blog/announcing-jamba-model-family) to learn more.", - "context_length": 256000, - "architecture": { - "modality": "text->text", - "tokenizer": "Other", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000002", - "completion": "0.000008", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 256000, - "max_completion_tokens": 4096, - "is_moderated": false - }, - "per_request_limits": null - }, { "id": "ai21/jamba-1-5-mini", "name": "AI21: Jamba 1.5 Mini", @@ -4237,18 +5641,58 @@ "context_length": 256000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000002", "completion": "0.0000004", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 256000, + "max_completion_tokens": 4096, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "ai21/jamba-1-5-large", + "name": "AI21: Jamba 1.5 Large", + "created": 1724371200, + "description": "Jamba 1.5 Large is part of AI21's new family of open models, offering superior speed, efficiency, and quality.\n\nIt features a 256K effective context window, the longest among open models, enabling improved performance on tasks like document summarization and analysis.\n\nBuilt on a novel SSM-Transformer architecture, it outperforms larger models like Llama 3.1 70B on benchmarks while maintaining resource efficiency.\n\nRead their [announcement](https://www.ai21.com/blog/announcing-jamba-model-family) to learn more.", + "context_length": 256000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000002", + "completion": "0.000008", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 256000, @@ -4265,18 +5709,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "phi3" }, "pricing": { "prompt": "0.0000001", "completion": "0.0000001", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -4293,18 +5743,24 @@ "context_length": 131000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "chatml" }, "pricing": { "prompt": "0.00000012", "completion": "0.0000003", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131000, @@ -4321,18 +5777,24 @@ "context_length": 131000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "chatml" }, "pricing": { "prompt": "0.0000008", "completion": "0.0000008", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131000, @@ -4349,18 +5811,25 @@ "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.000005", "completion": "0.000015", - "image": "0.007225", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.007225", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -4377,18 +5846,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.00000003", "completion": "0.00000006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -4405,18 +5880,24 @@ "context_length": 16384, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "chatml" }, "pricing": { "prompt": "0.0000008", "completion": "0.0000012", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -4433,18 +5914,25 @@ "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.0000025", "completion": "0.00001", - "image": "0.003613", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.003613", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -4461,18 +5949,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "none" }, "pricing": { "prompt": "0.000002", "completion": "0.000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -4489,18 +5983,24 @@ "context_length": 16384, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "chatml" }, "pricing": { "prompt": "0.0000008", "completion": "0.0000012", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -4510,56 +6010,34 @@ "per_request_limits": null }, { - "id": "perplexity/llama-3.1-sonar-small-128k-chat", - "name": "Perplexity: Llama 3.1 Sonar 8B", + "id": "perplexity/llama-3.1-sonar-small-128k-online", + "name": "Perplexity: Llama 3.1 Sonar 8B Online", "created": 1722470400, - "description": "Llama 3.1 Sonar is Perplexity's latest model family. It surpasses their earlier Sonar models in cost-efficiency, speed, and performance.\n\nThis is a normal offline LLM, but the [online version](/models/perplexity/llama-3.1-sonar-small-128k-online) of this model has Internet access.", - "context_length": 131072, + "description": "Llama 3.1 Sonar is Perplexity's latest model family. It surpasses their earlier Sonar models in cost-efficiency, speed, and performance.\n\nThis is the online version of the [offline chat model](/models/perplexity/llama-3.1-sonar-small-128k-chat). It is focused on delivering helpful, up-to-date, and factual responses. #online", + "context_length": 127072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": null }, "pricing": { "prompt": "0.0000002", "completion": "0.0000002", + "request": "0.005", "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 131072, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null - }, - { - "id": "perplexity/llama-3.1-sonar-large-128k-chat", - "name": "Perplexity: Llama 3.1 Sonar 70B", - "created": 1722470400, - "description": "Llama 3.1 Sonar is Perplexity's latest model family. It surpasses their earlier Sonar models in cost-efficiency, speed, and performance.\n\nThis is a normal offline LLM, but the [online version](/models/perplexity/llama-3.1-sonar-large-128k-online) of this model has Internet access.", - "context_length": 131072, - "architecture": { - "modality": "text->text", - "tokenizer": "Llama3", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000001", - "completion": "0.000001", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 131072, + "context_length": 127072, "max_completion_tokens": null, "is_moderated": false }, @@ -4573,18 +6051,24 @@ "context_length": 127072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": null }, "pricing": { "prompt": "0.000001", "completion": "0.000001", - "image": "0", "request": "0.005", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 127072, @@ -4593,62 +6077,6 @@ }, "per_request_limits": null }, - { - "id": "perplexity/llama-3.1-sonar-small-128k-online", - "name": "Perplexity: Llama 3.1 Sonar 8B Online", - "created": 1722470400, - "description": "Llama 3.1 Sonar is Perplexity's latest model family. It surpasses their earlier Sonar models in cost-efficiency, speed, and performance.\n\nThis is the online version of the [offline chat model](/models/perplexity/llama-3.1-sonar-small-128k-chat). It is focused on delivering helpful, up-to-date, and factual responses. #online", - "context_length": 127072, - "architecture": { - "modality": "text->text", - "tokenizer": "Llama3", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000002", - "completion": "0.0000002", - "image": "0", - "request": "0.005", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 127072, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null - }, - { - "id": "meta-llama/llama-3.1-405b-instruct", - "name": "Meta: Llama 3.1 405B Instruct", - "created": 1721692800, - "description": "The highly anticipated 400B class of Llama3 is here! Clocking in at 128k context with impressive eval scores, the Meta AI team continues to push the frontier of open-source LLMs.\n\nMeta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 405B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models including GPT-4o and Claude 3.5 Sonnet in evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", - "context_length": 32768, - "architecture": { - "modality": "text->text", - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0.0000008", - "completion": "0.0000008", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 32768, - "max_completion_tokens": 8192, - "is_moderated": false - }, - "per_request_limits": null - }, { "id": "meta-llama/llama-3.1-8b-instruct:free", "name": "Meta: Llama 3.1 8B Instruct (free)", @@ -4657,22 +6085,28 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, - "max_completion_tokens": null, + "max_completion_tokens": 4096, "is_moderated": false }, "per_request_limits": null @@ -4685,18 +6119,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.00000002", "completion": "0.00000005", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -4705,6 +6145,40 @@ }, "per_request_limits": null }, + { + "id": "meta-llama/llama-3.1-405b-instruct", + "name": "Meta: Llama 3.1 405B Instruct", + "created": 1721692800, + "description": "The highly anticipated 400B class of Llama3 is here! Clocking in at 128k context with impressive eval scores, the Meta AI team continues to push the frontier of open-source LLMs.\n\nMeta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 405B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models including GPT-4o and Claude 3.5 Sonnet in evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", + "context_length": 32768, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.0000008", + "completion": "0.0000008", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 32768, + "max_completion_tokens": 8192, + "is_moderated": false + }, + "per_request_limits": null + }, { "id": "meta-llama/llama-3.1-70b-instruct", "name": "Meta: Llama 3.1 70B Instruct", @@ -4713,18 +6187,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.00000012", "completion": "0.0000003", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -4733,6 +6213,40 @@ }, "per_request_limits": null }, + { + "id": "mistralai/codestral-mamba", + "name": "Mistral: Codestral Mamba", + "created": 1721347200, + "description": "A 7.3B parameter Mamba-based model designed for code and reasoning tasks.\n\n- Linear time inference, allowing for theoretically infinite sequence lengths\n- 256k token context window\n- Optimized for quick responses, especially beneficial for code productivity\n- Performs comparably to state-of-the-art transformer models in code and reasoning tasks\n- Available under the Apache 2.0 license for free use, modification, and distribution", + "context_length": 262144, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Mistral", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000025", + "completion": "0.00000025", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 262144, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, { "id": "mistralai/mistral-nemo:free", "name": "Mistral: Mistral Nemo (free)", @@ -4741,18 +6255,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "mistral" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -4769,18 +6289,24 @@ "context_length": 131072, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "mistral" }, "pricing": { "prompt": "0.000000035", "completion": "0.00000008", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131072, @@ -4789,34 +6315,6 @@ }, "per_request_limits": null }, - { - "id": "mistralai/codestral-mamba", - "name": "Mistral: Codestral Mamba", - "created": 1721347200, - "description": "A 7.3B parameter Mamba-based model designed for code and reasoning tasks.\n\n- Linear time inference, allowing for theoretically infinite sequence lengths\n- 256k token context window\n- Optimized for quick responses, especially beneficial for code productivity\n- Performs comparably to state-of-the-art transformer models in code and reasoning tasks\n- Available under the Apache 2.0 license for free use, modification, and distribution", - "context_length": 256000, - "architecture": { - "modality": "text->text", - "tokenizer": "Mistral", - "instruct_type": null - }, - "pricing": { - "prompt": "0.00000025", - "completion": "0.00000025", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 256000, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null - }, { "id": "openai/gpt-4o-mini", "name": "OpenAI: GPT-4o-mini", @@ -4825,18 +6323,25 @@ "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.00000015", "completion": "0.0000006", - "image": "0.000217", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.000217", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -4853,18 +6358,25 @@ "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.00000015", "completion": "0.0000006", - "image": "0.007225", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.007225", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -4881,18 +6393,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -4909,18 +6427,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0.000000054", "completion": "0.000000054", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -4937,18 +6461,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": "gemma" }, "pricing": { "prompt": "0.00000027", "completion": "0.00000027", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -4965,18 +6495,24 @@ "context_length": 16384, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0.000001875", "completion": "0.00000225", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -4993,18 +6529,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": "gemma" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -5021,18 +6563,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": "gemma" }, "pricing": { "prompt": "0.00000003", "completion": "0.00000006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -5049,18 +6597,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Yi", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000003", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -5077,18 +6631,24 @@ "context_length": 256000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": null }, "pricing": { "prompt": "0.0000005", "completion": "0.0000007", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 256000, @@ -5105,18 +6665,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000015", - "image": "0.0048", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0048", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -5133,18 +6700,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000015", - "image": "0.0048", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0048", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -5161,18 +6735,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.0000007", "completion": "0.0000008", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -5189,18 +6769,24 @@ "context_length": 16000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "chatml" }, "pricing": { "prompt": "0.0000009", "completion": "0.0000009", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16000, @@ -5217,18 +6803,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Qwen", "instruct_type": "chatml" }, "pricing": { "prompt": "0.0000009", "completion": "0.0000009", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -5245,18 +6837,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "mistral" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -5273,18 +6871,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "mistral" }, "pricing": { "prompt": "0.00000003", "completion": "0.000000055", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -5301,18 +6905,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "mistral" }, "pricing": { "prompt": "0.00000003", "completion": "0.000000055", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -5329,18 +6939,24 @@ "context_length": 131000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "chatml" }, "pricing": { "prompt": "0.000000025", "completion": "0.00000004", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 131000, @@ -5357,18 +6973,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "phi3" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -5385,18 +7007,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "phi3" }, "pricing": { "prompt": "0.0000001", "completion": "0.0000001", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -5413,18 +7041,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "phi3" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -5441,18 +7075,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Other", "instruct_type": "phi3" }, "pricing": { "prompt": "0.000001", "completion": "0.000001", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -5469,18 +7109,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.000003375", "completion": "0.0000045", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -5497,18 +7143,25 @@ "context_length": 1000000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0.000000075", "completion": "0.0000003", - "image": "0.00004", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.00004", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 1000000, @@ -5517,62 +7170,6 @@ }, "per_request_limits": null }, - { - "id": "openai/gpt-4o-2024-05-13", - "name": "OpenAI: GPT-4o (2024-05-13)", - "created": 1715558400, - "description": "GPT-4o (\"o\" for \"omni\") is OpenAI's latest AI model, supporting both text and image inputs with text outputs. It maintains the intelligence level of [GPT-4 Turbo](/models/openai/gpt-4-turbo) while being twice as fast and 50% more cost-effective. GPT-4o also offers improved performance in processing non-English languages and enhanced visual capabilities.\n\nFor benchmarking against other models, it was briefly called [\"im-also-a-good-gpt2-chatbot\"](https://twitter.com/LiamFedus/status/1790064963966370209)\n\n#multimodal", - "context_length": 128000, - "architecture": { - "modality": "text+image->text", - "tokenizer": "GPT", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000005", - "completion": "0.000015", - "image": "0.007225", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 128000, - "max_completion_tokens": 4096, - "is_moderated": true - }, - "per_request_limits": null - }, - { - "id": "meta-llama/llama-guard-2-8b", - "name": "Meta: LlamaGuard 2 8B", - "created": 1715558400, - "description": "This safeguard model has 8B parameters and is based on the Llama 3 family. Just like is predecessor, [LlamaGuard 1](https://huggingface.co/meta-llama/LlamaGuard-7b), it can do both prompt and response classification.\n\nLlamaGuard 2 acts as a normal LLM would, generating text that indicates whether the given input/output is safe/unsafe. If deemed unsafe, it will also share the content categories violated.\n\nFor best results, please use raw prompt input or the `/completions` endpoint, instead of the chat API.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", - "context_length": 8192, - "architecture": { - "modality": "text->text", - "tokenizer": "Llama3", - "instruct_type": "none" - }, - "pricing": { - "prompt": "0.0000002", - "completion": "0.0000002", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 8192, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null - }, { "id": "openai/gpt-4o", "name": "OpenAI: GPT-4o", @@ -5581,18 +7178,25 @@ "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.0000025", "completion": "0.00001", - "image": "0.003613", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.003613", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -5609,18 +7213,25 @@ "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.000006", "completion": "0.000018", - "image": "0.007225", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.007225", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -5629,6 +7240,75 @@ }, "per_request_limits": null }, + { + "id": "openai/gpt-4o-2024-05-13", + "name": "OpenAI: GPT-4o (2024-05-13)", + "created": 1715558400, + "description": "GPT-4o (\"o\" for \"omni\") is OpenAI's latest AI model, supporting both text and image inputs with text outputs. It maintains the intelligence level of [GPT-4 Turbo](/models/openai/gpt-4-turbo) while being twice as fast and 50% more cost-effective. GPT-4o also offers improved performance in processing non-English languages and enhanced visual capabilities.\n\nFor benchmarking against other models, it was briefly called [\"im-also-a-good-gpt2-chatbot\"](https://twitter.com/LiamFedus/status/1790064963966370209)\n\n#multimodal", + "context_length": 128000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "GPT", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000005", + "completion": "0.000015", + "request": "0", + "image": "0.007225", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 128000, + "max_completion_tokens": 4096, + "is_moderated": true + }, + "per_request_limits": null + }, + { + "id": "meta-llama/llama-guard-2-8b", + "name": "Meta: LlamaGuard 2 8B", + "created": 1715558400, + "description": "This safeguard model has 8B parameters and is based on the Llama 3 family. Just like is predecessor, [LlamaGuard 1](https://huggingface.co/meta-llama/LlamaGuard-7b), it can do both prompt and response classification.\n\nLlamaGuard 2 acts as a normal LLM would, generating text that indicates whether the given input/output is safe/unsafe. If deemed unsafe, it will also share the content categories violated.\n\nFor best results, please use raw prompt input or the `/completions` endpoint, instead of the chat API.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", + "context_length": 8192, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "none" + }, + "pricing": { + "prompt": "0.0000002", + "completion": "0.0000002", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 8192, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, { "id": "neversleep/llama-3-lumimaid-8b:extended", "name": "NeverSleep: Llama 3 Lumimaid 8B (extended)", @@ -5637,18 +7317,24 @@ "context_length": 24576, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.0000001875", "completion": "0.000001125", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 24576, @@ -5665,18 +7351,24 @@ "context_length": 24576, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.0000001875", "completion": "0.000001125", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 24576, @@ -5693,18 +7385,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "alpaca" }, "pricing": { "prompt": "0.0000008", "completion": "0.0000012", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -5721,18 +7419,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -5749,18 +7453,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.00000003", "completion": "0.00000006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -5777,18 +7487,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama3", "instruct_type": "llama3" }, "pricing": { "prompt": "0.00000023", "completion": "0.0000004", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -5805,18 +7521,24 @@ "context_length": 65536, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "mistral" }, "pricing": { "prompt": "0.0000009", "completion": "0.0000009", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 65536, @@ -5833,18 +7555,24 @@ "context_length": 65536, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "vicuna" }, "pricing": { "prompt": "0.0000005", "completion": "0.0000005", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 65536, @@ -5861,18 +7589,24 @@ "context_length": 32000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "vicuna" }, "pricing": { "prompt": "0.00000007", "completion": "0.00000007", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32000, @@ -5889,18 +7623,25 @@ "context_length": 2000000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0.00000125", "completion": "0.000005", - "image": "0.0006575", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0006575", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 2000000, @@ -5917,18 +7658,25 @@ "context_length": 128000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.00001", "completion": "0.00003", - "image": "0.01445", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.01445", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -5945,18 +7693,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Cohere", "instruct_type": null }, "pricing": { "prompt": "0.00000285", "completion": "0.00001425", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -5973,18 +7727,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Cohere", "instruct_type": null }, "pricing": { "prompt": "0.00000285", "completion": "0.00001425", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -6001,18 +7761,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "airoboros" }, "pricing": { "prompt": "0.0000008", "completion": "0.0000008", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -6029,18 +7795,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Cohere", "instruct_type": null }, "pricing": { "prompt": "0.00000095", "completion": "0.0000019", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -6057,18 +7829,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Cohere", "instruct_type": null }, "pricing": { "prompt": "0.000000475", "completion": "0.000001425", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -6085,18 +7863,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.00000025", "completion": "0.00000125", - "image": "0.0004", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0004", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -6113,18 +7898,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.00000025", "completion": "0.00000125", - "image": "0.0004", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0004", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -6141,18 +7933,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000015", "completion": "0.000075", - "image": "0.024", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.024", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -6169,18 +7968,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000015", "completion": "0.000075", - "image": "0.024", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.024", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -6197,18 +8003,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000015", - "image": "0.0048", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0048", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -6225,18 +8038,25 @@ "context_length": 200000, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000015", - "image": "0.0048", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0048", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -6253,18 +8073,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Cohere", "instruct_type": null }, "pricing": { "prompt": "0.000000475", "completion": "0.000001425", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -6281,18 +8107,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.000002", "completion": "0.000006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -6309,18 +8141,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": "gemma" }, "pricing": { "prompt": "0.00000015", "completion": "0.00000015", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -6337,18 +8175,24 @@ "context_length": 4095, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.000001", "completion": "0.000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4095, @@ -6365,18 +8209,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.00001", "completion": "0.00003", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -6393,18 +8243,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "chatml" }, "pricing": { "prompt": "0.0000006", "completion": "0.0000006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -6413,29 +8269,69 @@ }, "per_request_limits": null }, + { + "id": "mistralai/mistral-medium", + "name": "Mistral Medium", + "created": 1704844800, + "description": "This is Mistral AI's closed-source, medium-sided model. It's powered by a closed-source prototype and excels at reasoning, code, JSON, chat, and more. In benchmarks, it compares with many of the flagship models of other companies.", + "context_length": 32768, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Mistral", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000275", + "completion": "0.0000081", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 32768, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null + }, { "id": "mistralai/mistral-small", "name": "Mistral Small", "created": 1704844800, "description": "With 22 billion parameters, Mistral Small v24.09 offers a convenient mid-point between (Mistral NeMo 12B)[/mistralai/mistral-nemo] and (Mistral Large 2)[/mistralai/mistral-large], providing a cost-effective solution that can be deployed across various platforms and environments. It has better reasoning, exhibits more capabilities, can produce and reason about code, and is multiligual, supporting English, French, German, Italian, and Spanish.", - "context_length": 32000, + "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.0000002", "completion": "0.0000006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 32000, + "context_length": 32768, "max_completion_tokens": null, "is_moderated": false }, @@ -6445,53 +8341,31 @@ "id": "mistralai/mistral-tiny", "name": "Mistral Tiny", "created": 1704844800, - "description": "This model is currently powered by Mistral-7B-v0.2, and incorporates a \"better\" fine-tuning than [Mistral 7B](/models/mistralai/mistral-7b-instruct-v0.1), inspired by community work. It's best used for large batch processing tasks where cost is a significant factor but reasoning capabilities are not crucial.", - "context_length": 32000, + "description": "Note: This model is being deprecated. Recommended replacement is the newer [Ministral 8B](/mistral/ministral-8b)\n\nThis model is currently powered by Mistral-7B-v0.2, and incorporates a \"better\" fine-tuning than [Mistral 7B](/models/mistralai/mistral-7b-instruct-v0.1), inspired by community work. It's best used for large batch processing tasks where cost is a significant factor but reasoning capabilities are not crucial.", + "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": null }, "pricing": { "prompt": "0.00000025", "completion": "0.00000025", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { - "context_length": 32000, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null - }, - { - "id": "mistralai/mistral-medium", - "name": "Mistral Medium", - "created": 1704844800, - "description": "This is Mistral AI's closed-source, medium-sided model. It's powered by a closed-source prototype and excels at reasoning, code, JSON, chat, and more. In benchmarks, it compares with many of the flagship models of other companies.", - "context_length": 32000, - "architecture": { - "modality": "text->text", - "tokenizer": "Mistral", - "instruct_type": null - }, - "pricing": { - "prompt": "0.00000275", - "completion": "0.0000081", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 32000, + "context_length": 32768, "max_completion_tokens": null, "is_moderated": false }, @@ -6505,18 +8379,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "mistral" }, "pricing": { "prompt": "0.0000002", "completion": "0.0000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -6533,18 +8413,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "chatml" }, "pricing": { "prompt": "0.0000005", "completion": "0.0000005", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -6561,18 +8447,25 @@ "context_length": 16384, "architecture": { "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0.0000005", "completion": "0.0000015", - "image": "0.0025", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0025", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16384, @@ -6589,18 +8482,24 @@ "context_length": 32760, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Gemini", "instruct_type": null }, "pricing": { "prompt": "0.0000005", "completion": "0.0000015", - "image": "0.0025", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0.0025", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32760, @@ -6617,18 +8516,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "none" }, "pricing": { "prompt": "0.0000006", "completion": "0.0000006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -6645,18 +8550,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "mistral" }, "pricing": { "prompt": "0.00000024", "completion": "0.00000024", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -6673,18 +8584,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "openchat" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -6701,18 +8618,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "openchat" }, "pricing": { "prompt": "0.000000055", "completion": "0.000000055", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -6729,18 +8652,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "alpaca" }, "pricing": { "prompt": "0.0000015", "completion": "0.00000225", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -6749,62 +8678,6 @@ }, "per_request_limits": null }, - { - "id": "anthropic/claude-2:beta", - "name": "Anthropic: Claude v2 (self-moderated)", - "created": 1700611200, - "description": "Claude 2 delivers advancements in key capabilities for enterprises—including an industry-leading 200K token context window, significant reductions in rates of model hallucination, system prompts and a new beta feature: tool use.", - "context_length": 200000, - "architecture": { - "modality": "text->text", - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000008", - "completion": "0.000024", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 4096, - "is_moderated": false - }, - "per_request_limits": null - }, - { - "id": "anthropic/claude-2", - "name": "Anthropic: Claude v2", - "created": 1700611200, - "description": "Claude 2 delivers advancements in key capabilities for enterprises—including an industry-leading 200K token context window, significant reductions in rates of model hallucination, system prompts and a new beta feature: tool use.", - "context_length": 200000, - "architecture": { - "modality": "text->text", - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000008", - "completion": "0.000024", - "image": "0", - "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 4096, - "is_moderated": true - }, - "per_request_limits": null - }, { "id": "anthropic/claude-2.1:beta", "name": "Anthropic: Claude v2.1 (self-moderated)", @@ -6813,18 +8686,24 @@ "context_length": 200000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000008", "completion": "0.000024", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -6841,18 +8720,92 @@ "context_length": 200000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000008", "completion": "0.000024", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 4096, + "is_moderated": true + }, + "per_request_limits": null + }, + { + "id": "anthropic/claude-2:beta", + "name": "Anthropic: Claude v2 (self-moderated)", + "created": 1700611200, + "description": "Claude 2 delivers advancements in key capabilities for enterprises—including an industry-leading 200K token context window, significant reductions in rates of model hallucination, system prompts and a new beta feature: tool use.", + "context_length": 200000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Claude", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000008", + "completion": "0.000024", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 4096, + "is_moderated": false + }, + "per_request_limits": null + }, + { + "id": "anthropic/claude-2", + "name": "Anthropic: Claude v2", + "created": 1700611200, + "description": "Claude 2 delivers advancements in key capabilities for enterprises—including an industry-leading 200K token context window, significant reductions in rates of model hallucination, system prompts and a new beta feature: tool use.", + "context_length": 200000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Claude", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000008", + "completion": "0.000024", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 200000, @@ -6869,18 +8822,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "chatml" }, "pricing": { "prompt": "0.00000017", "completion": "0.00000017", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -6897,18 +8856,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "alpaca" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -6925,18 +8890,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "alpaca" }, "pricing": { "prompt": "0.00000007", "completion": "0.00000007", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -6953,18 +8924,24 @@ "context_length": 6144, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "airoboros" }, "pricing": { "prompt": "0.000009375", "completion": "0.000009375", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 6144, @@ -6981,6 +8958,12 @@ "context_length": 2000000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Router", "instruct_type": null }, @@ -6990,9 +8973,9 @@ "request": "-1", "image": "-1", "web_search": "-1", + "internal_reasoning": "-1", "input_cache_read": "-1", - "input_cache_write": "-1", - "internal_reasoning": "-1" + "input_cache_write": "-1" }, "top_provider": { "context_length": null, @@ -7009,18 +8992,24 @@ "context_length": 16385, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.000001", "completion": "0.000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16385, @@ -7037,18 +9026,24 @@ "context_length": 128000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.00001", "completion": "0.00003", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 128000, @@ -7065,18 +9060,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "PaLM", "instruct_type": null }, "pricing": { "prompt": "0.000001", "completion": "0.000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -7093,18 +9094,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "PaLM", "instruct_type": null }, "pricing": { "prompt": "0.000001", "completion": "0.000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -7121,18 +9128,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "airoboros" }, "pricing": { "prompt": "0.0000005", "completion": "0.0000005", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -7149,18 +9162,24 @@ "context_length": 8192, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "airoboros" }, "pricing": { "prompt": "0.00000375", "completion": "0.00000375", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8192, @@ -7177,18 +9196,24 @@ "context_length": 4095, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": "chatml" }, "pricing": { "prompt": "0.0000015", "completion": "0.000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4095, @@ -7205,18 +9230,24 @@ "context_length": 32768, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "mistral" }, "pricing": { "prompt": "0.0000002", "completion": "0.0000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32768, @@ -7233,18 +9264,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "alpaca" }, "pricing": { "prompt": "0.0000008", "completion": "0.0000012", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -7261,18 +9298,24 @@ "context_length": 16385, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.000003", "completion": "0.000004", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16385, @@ -7289,18 +9332,24 @@ "context_length": 32767, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.00006", "completion": "0.00012", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32767, @@ -7317,18 +9366,24 @@ "context_length": 32767, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.00006", "completion": "0.00012", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 32767, @@ -7345,18 +9400,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "alpaca" }, "pricing": { "prompt": "0.00000017", "completion": "0.00000017", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -7373,18 +9434,24 @@ "context_length": 8000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "alpaca" }, "pricing": { "prompt": "0.0000015", "completion": "0.00000225", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8000, @@ -7401,18 +9468,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Mistral", "instruct_type": "zephyr" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -7429,18 +9502,24 @@ "context_length": 100000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000008", "completion": "0.000024", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 100000, @@ -7457,18 +9536,24 @@ "context_length": 100000, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Claude", "instruct_type": null }, "pricing": { "prompt": "0.000008", "completion": "0.000024", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 100000, @@ -7485,18 +9570,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "alpaca" }, "pricing": { "prompt": "0.0000008", "completion": "0.0000012", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -7513,18 +9604,24 @@ "context_length": 9216, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "PaLM", "instruct_type": null }, "pricing": { "prompt": "0.000001", "completion": "0.000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 9216, @@ -7541,18 +9638,24 @@ "context_length": 7168, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "PaLM", "instruct_type": null }, "pricing": { "prompt": "0.000001", "completion": "0.000002", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 7168, @@ -7569,18 +9672,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "alpaca" }, "pricing": { "prompt": "0", "completion": "0", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -7597,18 +9706,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "alpaca" }, "pricing": { "prompt": "0.000000065", "completion": "0.000000065", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -7625,18 +9740,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "llama2" }, "pricing": { "prompt": "0.00000022", "completion": "0.00000022", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -7653,18 +9774,24 @@ "context_length": 4096, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "Llama2", "instruct_type": "llama2" }, "pricing": { "prompt": "0.0000009", "completion": "0.0000009", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 4096, @@ -7681,18 +9808,24 @@ "context_length": 16385, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.0000005", "completion": "0.0000015", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16385, @@ -7709,18 +9842,24 @@ "context_length": 16385, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.0000005", "completion": "0.0000015", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 16385, @@ -7737,18 +9876,24 @@ "context_length": 8191, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.00003", "completion": "0.00006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8191, @@ -7765,18 +9910,24 @@ "context_length": 8191, "architecture": { "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], "tokenizer": "GPT", "instruct_type": null }, "pricing": { "prompt": "0.00003", "completion": "0.00006", - "image": "0", "request": "0", - "input_cache_read": "0", - "input_cache_write": "0", + "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0", + "input_cache_write": "0" }, "top_provider": { "context_length": 8191, diff --git a/packages/kbot/dist-in/index.d.ts b/packages/kbot/dist-in/index.d.ts index b45525e2..a6b99979 100644 --- a/packages/kbot/dist-in/index.d.ts +++ b/packages/kbot/dist-in/index.d.ts @@ -5,4 +5,7 @@ export declare const assistant_supported: Record; export * from './types.js'; export * from './zod_types.js'; export * from './zod_schema.js'; +export { E_OPENAI_MODEL } from './models/cache/openai-models.js'; +export { E_OPENROUTER_MODEL } from './models/cache/openrouter-models.js'; +export { E_OPENROUTER_MODEL_FREE } from './models/cache/openrouter-models-free.js'; export { IKBotTask } from '@polymech/ai-tools'; diff --git a/packages/kbot/dist-in/index.js b/packages/kbot/dist-in/index.js index 36a21af0..9e96dbfb 100644 --- a/packages/kbot/dist-in/index.js +++ b/packages/kbot/dist-in/index.js @@ -33,4 +33,7 @@ export const assistant_supported = { export * from './types.js'; export * from './zod_types.js'; export * from './zod_schema.js'; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvaW5kZXgudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLFFBQVEsRUFBRSxNQUFNLGNBQWMsQ0FBQTtBQUN2QyxPQUFPLElBQUksTUFBTSxXQUFXLENBQUE7QUFFNUIsTUFBTSxTQUFTLEdBQUcsUUFBUSxLQUFLLE9BQU8sQ0FBQTtBQUV0QyxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sZUFBZSxDQUFBO0FBQzVDLE9BQU8sRUFBRSxPQUFPLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUMzQyxPQUFPLEVBQUUsV0FBVyxFQUFFLE1BQU0sZ0JBQWdCLENBQUE7QUFFNUMsTUFBTSxDQUFDLE1BQU0sTUFBTSxHQUFRLFlBQVksQ0FBQyxXQUFXLENBQUMsQ0FBQTtBQUNwRCxPQUFPLEVBQUUsR0FBRyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFDdkMsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLEdBQUcsRUFBRSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsU0FBUyxDQUFDLENBQUMsQ0FBQyxVQUFVLENBQUMsQ0FBQyxDQUFDLE1BQU0sQ0FBQyxFQUFFLElBQUksV0FBVyxFQUFFLENBQUMsQ0FBQyxDQUFBO0FBRXJILE1BQU0sQ0FBQyxNQUFNLG1CQUFtQixHQUEyQjtJQUN6RCxJQUFJLEVBQUUsVUFBVTtJQUNoQixNQUFNLEVBQUUsWUFBWTtJQUNwQixLQUFLLEVBQUUsZUFBZTtJQUN0QixNQUFNLEVBQUUsVUFBVTtJQUNsQixNQUFNLEVBQUUsb0JBQW9CO0lBQzVCLE9BQU8sRUFBRSx5RUFBeUU7SUFDbEYsS0FBSyxFQUFFLGVBQWU7SUFDdEIsT0FBTyxFQUFFLFdBQVc7SUFDcEIsT0FBTyxFQUFFLGFBQWE7SUFDdEIsS0FBSyxFQUFFLGlCQUFpQjtJQUN4QixPQUFPLEVBQUUsa0JBQWtCO0lBQzNCLEtBQUssRUFBRSxlQUFlO0lBQ3RCLE1BQU0sRUFBRSxpQkFBaUI7SUFDekIsTUFBTSxFQUFFLFlBQVk7SUFDcEIsT0FBTyxFQUFFLDJFQUEyRTtJQUNwRixLQUFLLEVBQUUsZUFBZTtJQUN0QixLQUFLLEVBQUUsYUFBYTtJQUNwQixLQUFLLEVBQUUsa0JBQWtCO0lBQ3pCLE1BQU0sRUFBRSxZQUFZO0lBQ3BCLEtBQUssRUFBRSx3QkFBd0I7SUFDL0IsTUFBTSxFQUFFLFlBQVk7Q0FDckIsQ0FBQTtBQUNELGNBQWMsWUFBWSxDQUFBO0FBQzFCLGNBQWMsZ0JBQWdCLENBQUE7QUFDOUIsY0FBYyxpQkFBaUIsQ0FBQSJ9 \ No newline at end of file +export { E_OPENAI_MODEL } from './models/cache/openai-models.js'; +export { E_OPENROUTER_MODEL } from './models/cache/openrouter-models.js'; +export { E_OPENROUTER_MODEL_FREE } from './models/cache/openrouter-models-free.js'; +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvaW5kZXgudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLFFBQVEsRUFBRSxNQUFNLGNBQWMsQ0FBQTtBQUN2QyxPQUFPLElBQUksTUFBTSxXQUFXLENBQUE7QUFFNUIsTUFBTSxTQUFTLEdBQUcsUUFBUSxLQUFLLE9BQU8sQ0FBQTtBQUV0QyxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sZUFBZSxDQUFBO0FBQzVDLE9BQU8sRUFBRSxPQUFPLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUMzQyxPQUFPLEVBQUUsV0FBVyxFQUFFLE1BQU0sZ0JBQWdCLENBQUE7QUFFNUMsTUFBTSxDQUFDLE1BQU0sTUFBTSxHQUFRLFlBQVksQ0FBQyxXQUFXLENBQUMsQ0FBQTtBQUNwRCxPQUFPLEVBQUUsR0FBRyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFDdkMsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLEdBQUcsRUFBRSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsU0FBUyxDQUFDLENBQUMsQ0FBQyxVQUFVLENBQUMsQ0FBQyxDQUFDLE1BQU0sQ0FBQyxFQUFFLElBQUksV0FBVyxFQUFFLENBQUMsQ0FBQyxDQUFBO0FBRXJILE1BQU0sQ0FBQyxNQUFNLG1CQUFtQixHQUEyQjtJQUN6RCxJQUFJLEVBQUUsVUFBVTtJQUNoQixNQUFNLEVBQUUsWUFBWTtJQUNwQixLQUFLLEVBQUUsZUFBZTtJQUN0QixNQUFNLEVBQUUsVUFBVTtJQUNsQixNQUFNLEVBQUUsb0JBQW9CO0lBQzVCLE9BQU8sRUFBRSx5RUFBeUU7SUFDbEYsS0FBSyxFQUFFLGVBQWU7SUFDdEIsT0FBTyxFQUFFLFdBQVc7SUFDcEIsT0FBTyxFQUFFLGFBQWE7SUFDdEIsS0FBSyxFQUFFLGlCQUFpQjtJQUN4QixPQUFPLEVBQUUsa0JBQWtCO0lBQzNCLEtBQUssRUFBRSxlQUFlO0lBQ3RCLE1BQU0sRUFBRSxpQkFBaUI7SUFDekIsTUFBTSxFQUFFLFlBQVk7SUFDcEIsT0FBTyxFQUFFLDJFQUEyRTtJQUNwRixLQUFLLEVBQUUsZUFBZTtJQUN0QixLQUFLLEVBQUUsYUFBYTtJQUNwQixLQUFLLEVBQUUsa0JBQWtCO0lBQ3pCLE1BQU0sRUFBRSxZQUFZO0lBQ3BCLEtBQUssRUFBRSx3QkFBd0I7SUFDL0IsTUFBTSxFQUFFLFlBQVk7Q0FDckIsQ0FBQTtBQUNELGNBQWMsWUFBWSxDQUFBO0FBQzFCLGNBQWMsZ0JBQWdCLENBQUE7QUFDOUIsY0FBYyxpQkFBaUIsQ0FBQTtBQUUvQixPQUFPLEVBQUUsY0FBYyxFQUFFLE1BQU0saUNBQWlDLENBQUE7QUFDaEUsT0FBTyxFQUFFLGtCQUFrQixFQUFFLE1BQU0scUNBQXFDLENBQUE7QUFDeEUsT0FBTyxFQUFFLHVCQUF1QixFQUFFLE1BQU0sMENBQTBDLENBQUEifQ== \ No newline at end of file diff --git a/packages/kbot/dist-in/models/cache/openai-models.d.ts b/packages/kbot/dist-in/models/cache/openai-models.d.ts new file mode 100644 index 00000000..9692839e --- /dev/null +++ b/packages/kbot/dist-in/models/cache/openai-models.d.ts @@ -0,0 +1,65 @@ +export declare enum E_OPENAI_MODEL { + MODEL_GPT_4O_REALTIME_PREVIEW_2024_12_17 = "gpt-4o-realtime-preview-2024-12-17", + MODEL_GPT_4O_AUDIO_PREVIEW_2024_12_17 = "gpt-4o-audio-preview-2024-12-17", + MODEL_DALL_E_3 = "dall-e-3", + MODEL_DALL_E_2 = "dall-e-2", + MODEL_GPT_4O_AUDIO_PREVIEW_2024_10_01 = "gpt-4o-audio-preview-2024-10-01", + MODEL_O3_MINI = "o3-mini", + MODEL_O3_MINI_2025_01_31 = "o3-mini-2025-01-31", + MODEL_GPT_4O_MINI_REALTIME_PREVIEW_2024_12_17 = "gpt-4o-mini-realtime-preview-2024-12-17", + MODEL_GPT_4O_MINI_REALTIME_PREVIEW = "gpt-4o-mini-realtime-preview", + MODEL_GPT_4O_REALTIME_PREVIEW_2024_10_01 = "gpt-4o-realtime-preview-2024-10-01", + MODEL_GPT_4O_TRANSCRIBE = "gpt-4o-transcribe", + MODEL_GPT_4O_MINI_TRANSCRIBE = "gpt-4o-mini-transcribe", + MODEL_GPT_4O_REALTIME_PREVIEW = "gpt-4o-realtime-preview", + MODEL_BABBAGE_002 = "babbage-002", + MODEL_GPT_4O_MINI_TTS = "gpt-4o-mini-tts", + MODEL_TTS_1_HD_1106 = "tts-1-hd-1106", + MODEL_TEXT_EMBEDDING_3_LARGE = "text-embedding-3-large", + MODEL_GPT_4 = "gpt-4", + MODEL_TEXT_EMBEDDING_ADA_002 = "text-embedding-ada-002", + MODEL_TTS_1_HD = "tts-1-hd", + MODEL_GPT_4O_MINI_AUDIO_PREVIEW = "gpt-4o-mini-audio-preview", + MODEL_GPT_4O_AUDIO_PREVIEW = "gpt-4o-audio-preview", + MODEL_O1_PREVIEW_2024_09_12 = "o1-preview-2024-09-12", + MODEL_GPT_3_5_TURBO_INSTRUCT_0914 = "gpt-3.5-turbo-instruct-0914", + MODEL_GPT_4O_MINI_SEARCH_PREVIEW = "gpt-4o-mini-search-preview", + MODEL_TTS_1_1106 = "tts-1-1106", + MODEL_DAVINCI_002 = "davinci-002", + MODEL_GPT_3_5_TURBO_1106 = "gpt-3.5-turbo-1106", + MODEL_GPT_4_TURBO = "gpt-4-turbo", + MODEL_GPT_3_5_TURBO_INSTRUCT = "gpt-3.5-turbo-instruct", + MODEL_GPT_3_5_TURBO = "gpt-3.5-turbo", + MODEL_CHATGPT_4O_LATEST = "chatgpt-4o-latest", + MODEL_GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = "gpt-4o-mini-search-preview-2025-03-11", + MODEL_GPT_4O_2024_11_20 = "gpt-4o-2024-11-20", + MODEL_WHISPER_1 = "whisper-1", + MODEL_GPT_3_5_TURBO_0125 = "gpt-3.5-turbo-0125", + MODEL_GPT_4O_2024_05_13 = "gpt-4o-2024-05-13", + MODEL_GPT_3_5_TURBO_16K = "gpt-3.5-turbo-16k", + MODEL_GPT_4_TURBO_2024_04_09 = "gpt-4-turbo-2024-04-09", + MODEL_GPT_4_1106_PREVIEW = "gpt-4-1106-preview", + MODEL_O1_PREVIEW = "o1-preview", + MODEL_GPT_4_0613 = "gpt-4-0613", + MODEL_GPT_4O_SEARCH_PREVIEW = "gpt-4o-search-preview", + MODEL_GPT_4_5_PREVIEW = "gpt-4.5-preview", + MODEL_GPT_4_5_PREVIEW_2025_02_27 = "gpt-4.5-preview-2025-02-27", + MODEL_GPT_4O_SEARCH_PREVIEW_2025_03_11 = "gpt-4o-search-preview-2025-03-11", + MODEL_OMNI_MODERATION_LATEST = "omni-moderation-latest", + MODEL_TTS_1 = "tts-1", + MODEL_OMNI_MODERATION_2024_09_26 = "omni-moderation-2024-09-26", + MODEL_TEXT_EMBEDDING_3_SMALL = "text-embedding-3-small", + MODEL_GPT_4O = "gpt-4o", + MODEL_GPT_4O_MINI = "gpt-4o-mini", + MODEL_GPT_4O_2024_08_06 = "gpt-4o-2024-08-06", + MODEL_GPT_4O_MINI_2024_07_18 = "gpt-4o-mini-2024-07-18", + MODEL_GPT_4_TURBO_PREVIEW = "gpt-4-turbo-preview", + MODEL_O1_MINI = "o1-mini", + MODEL_GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = "gpt-4o-mini-audio-preview-2024-12-17", + MODEL_O1_MINI_2024_09_12 = "o1-mini-2024-09-12", + MODEL_GPT_4_0125_PREVIEW = "gpt-4-0125-preview", + MODEL_O1 = "o1", + MODEL_O1_2024_12_17 = "o1-2024-12-17", + MODEL_O1_PRO = "o1-pro", + MODEL_O1_PRO_2025_03_19 = "o1-pro-2025-03-19" +} diff --git a/packages/kbot/dist-in/models/cache/openai-models.js b/packages/kbot/dist-in/models/cache/openai-models.js new file mode 100644 index 00000000..e9b66c23 --- /dev/null +++ b/packages/kbot/dist-in/models/cache/openai-models.js @@ -0,0 +1,67 @@ +export var E_OPENAI_MODEL; +(function (E_OPENAI_MODEL) { + E_OPENAI_MODEL["MODEL_GPT_4O_REALTIME_PREVIEW_2024_12_17"] = "gpt-4o-realtime-preview-2024-12-17"; + E_OPENAI_MODEL["MODEL_GPT_4O_AUDIO_PREVIEW_2024_12_17"] = "gpt-4o-audio-preview-2024-12-17"; + E_OPENAI_MODEL["MODEL_DALL_E_3"] = "dall-e-3"; + E_OPENAI_MODEL["MODEL_DALL_E_2"] = "dall-e-2"; + E_OPENAI_MODEL["MODEL_GPT_4O_AUDIO_PREVIEW_2024_10_01"] = "gpt-4o-audio-preview-2024-10-01"; + E_OPENAI_MODEL["MODEL_O3_MINI"] = "o3-mini"; + E_OPENAI_MODEL["MODEL_O3_MINI_2025_01_31"] = "o3-mini-2025-01-31"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_REALTIME_PREVIEW_2024_12_17"] = "gpt-4o-mini-realtime-preview-2024-12-17"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_REALTIME_PREVIEW"] = "gpt-4o-mini-realtime-preview"; + E_OPENAI_MODEL["MODEL_GPT_4O_REALTIME_PREVIEW_2024_10_01"] = "gpt-4o-realtime-preview-2024-10-01"; + E_OPENAI_MODEL["MODEL_GPT_4O_TRANSCRIBE"] = "gpt-4o-transcribe"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_TRANSCRIBE"] = "gpt-4o-mini-transcribe"; + E_OPENAI_MODEL["MODEL_GPT_4O_REALTIME_PREVIEW"] = "gpt-4o-realtime-preview"; + E_OPENAI_MODEL["MODEL_BABBAGE_002"] = "babbage-002"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_TTS"] = "gpt-4o-mini-tts"; + E_OPENAI_MODEL["MODEL_TTS_1_HD_1106"] = "tts-1-hd-1106"; + E_OPENAI_MODEL["MODEL_TEXT_EMBEDDING_3_LARGE"] = "text-embedding-3-large"; + E_OPENAI_MODEL["MODEL_GPT_4"] = "gpt-4"; + E_OPENAI_MODEL["MODEL_TEXT_EMBEDDING_ADA_002"] = "text-embedding-ada-002"; + E_OPENAI_MODEL["MODEL_TTS_1_HD"] = "tts-1-hd"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_AUDIO_PREVIEW"] = "gpt-4o-mini-audio-preview"; + E_OPENAI_MODEL["MODEL_GPT_4O_AUDIO_PREVIEW"] = "gpt-4o-audio-preview"; + E_OPENAI_MODEL["MODEL_O1_PREVIEW_2024_09_12"] = "o1-preview-2024-09-12"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_INSTRUCT_0914"] = "gpt-3.5-turbo-instruct-0914"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_SEARCH_PREVIEW"] = "gpt-4o-mini-search-preview"; + E_OPENAI_MODEL["MODEL_TTS_1_1106"] = "tts-1-1106"; + E_OPENAI_MODEL["MODEL_DAVINCI_002"] = "davinci-002"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_1106"] = "gpt-3.5-turbo-1106"; + E_OPENAI_MODEL["MODEL_GPT_4_TURBO"] = "gpt-4-turbo"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_INSTRUCT"] = "gpt-3.5-turbo-instruct"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO"] = "gpt-3.5-turbo"; + E_OPENAI_MODEL["MODEL_CHATGPT_4O_LATEST"] = "chatgpt-4o-latest"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11"] = "gpt-4o-mini-search-preview-2025-03-11"; + E_OPENAI_MODEL["MODEL_GPT_4O_2024_11_20"] = "gpt-4o-2024-11-20"; + E_OPENAI_MODEL["MODEL_WHISPER_1"] = "whisper-1"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_0125"] = "gpt-3.5-turbo-0125"; + E_OPENAI_MODEL["MODEL_GPT_4O_2024_05_13"] = "gpt-4o-2024-05-13"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_16K"] = "gpt-3.5-turbo-16k"; + E_OPENAI_MODEL["MODEL_GPT_4_TURBO_2024_04_09"] = "gpt-4-turbo-2024-04-09"; + E_OPENAI_MODEL["MODEL_GPT_4_1106_PREVIEW"] = "gpt-4-1106-preview"; + E_OPENAI_MODEL["MODEL_O1_PREVIEW"] = "o1-preview"; + E_OPENAI_MODEL["MODEL_GPT_4_0613"] = "gpt-4-0613"; + E_OPENAI_MODEL["MODEL_GPT_4O_SEARCH_PREVIEW"] = "gpt-4o-search-preview"; + E_OPENAI_MODEL["MODEL_GPT_4_5_PREVIEW"] = "gpt-4.5-preview"; + E_OPENAI_MODEL["MODEL_GPT_4_5_PREVIEW_2025_02_27"] = "gpt-4.5-preview-2025-02-27"; + E_OPENAI_MODEL["MODEL_GPT_4O_SEARCH_PREVIEW_2025_03_11"] = "gpt-4o-search-preview-2025-03-11"; + E_OPENAI_MODEL["MODEL_OMNI_MODERATION_LATEST"] = "omni-moderation-latest"; + E_OPENAI_MODEL["MODEL_TTS_1"] = "tts-1"; + E_OPENAI_MODEL["MODEL_OMNI_MODERATION_2024_09_26"] = "omni-moderation-2024-09-26"; + E_OPENAI_MODEL["MODEL_TEXT_EMBEDDING_3_SMALL"] = "text-embedding-3-small"; + E_OPENAI_MODEL["MODEL_GPT_4O"] = "gpt-4o"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI"] = "gpt-4o-mini"; + E_OPENAI_MODEL["MODEL_GPT_4O_2024_08_06"] = "gpt-4o-2024-08-06"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_2024_07_18"] = "gpt-4o-mini-2024-07-18"; + E_OPENAI_MODEL["MODEL_GPT_4_TURBO_PREVIEW"] = "gpt-4-turbo-preview"; + E_OPENAI_MODEL["MODEL_O1_MINI"] = "o1-mini"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17"] = "gpt-4o-mini-audio-preview-2024-12-17"; + E_OPENAI_MODEL["MODEL_O1_MINI_2024_09_12"] = "o1-mini-2024-09-12"; + E_OPENAI_MODEL["MODEL_GPT_4_0125_PREVIEW"] = "gpt-4-0125-preview"; + E_OPENAI_MODEL["MODEL_O1"] = "o1"; + E_OPENAI_MODEL["MODEL_O1_2024_12_17"] = "o1-2024-12-17"; + E_OPENAI_MODEL["MODEL_O1_PRO"] = "o1-pro"; + E_OPENAI_MODEL["MODEL_O1_PRO_2025_03_19"] = "o1-pro-2025-03-19"; +})(E_OPENAI_MODEL || (E_OPENAI_MODEL = {})); +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbmFpLW1vZGVscy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbmFpLW1vZGVscy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSxjQWdFWDtBQWhFRCxXQUFZLGNBQWM7SUFDeEIsaUdBQStFLENBQUE7SUFDL0UsMkZBQXlFLENBQUE7SUFDekUsNkNBQTJCLENBQUE7SUFDM0IsNkNBQTJCLENBQUE7SUFDM0IsMkZBQXlFLENBQUE7SUFDekUsMkNBQXlCLENBQUE7SUFDekIsaUVBQStDLENBQUE7SUFDL0MsMkdBQXlGLENBQUE7SUFDekYscUZBQW1FLENBQUE7SUFDbkUsaUdBQStFLENBQUE7SUFDL0UsK0RBQTZDLENBQUE7SUFDN0MseUVBQXVELENBQUE7SUFDdkQsMkVBQXlELENBQUE7SUFDekQsbURBQWlDLENBQUE7SUFDakMsMkRBQXlDLENBQUE7SUFDekMsdURBQXFDLENBQUE7SUFDckMseUVBQXVELENBQUE7SUFDdkQsdUNBQXFCLENBQUE7SUFDckIseUVBQXVELENBQUE7SUFDdkQsNkNBQTJCLENBQUE7SUFDM0IsK0VBQTZELENBQUE7SUFDN0QscUVBQW1ELENBQUE7SUFDbkQsdUVBQXFELENBQUE7SUFDckQsbUZBQWlFLENBQUE7SUFDakUsaUZBQStELENBQUE7SUFDL0QsaURBQStCLENBQUE7SUFDL0IsbURBQWlDLENBQUE7SUFDakMsaUVBQStDLENBQUE7SUFDL0MsbURBQWlDLENBQUE7SUFDakMseUVBQXVELENBQUE7SUFDdkQsdURBQXFDLENBQUE7SUFDckMsK0RBQTZDLENBQUE7SUFDN0MsdUdBQXFGLENBQUE7SUFDckYsK0RBQTZDLENBQUE7SUFDN0MsK0NBQTZCLENBQUE7SUFDN0IsaUVBQStDLENBQUE7SUFDL0MsK0RBQTZDLENBQUE7SUFDN0MsK0RBQTZDLENBQUE7SUFDN0MseUVBQXVELENBQUE7SUFDdkQsaUVBQStDLENBQUE7SUFDL0MsaURBQStCLENBQUE7SUFDL0IsaURBQStCLENBQUE7SUFDL0IsdUVBQXFELENBQUE7SUFDckQsMkRBQXlDLENBQUE7SUFDekMsaUZBQStELENBQUE7SUFDL0QsNkZBQTJFLENBQUE7SUFDM0UseUVBQXVELENBQUE7SUFDdkQsdUNBQXFCLENBQUE7SUFDckIsaUZBQStELENBQUE7SUFDL0QseUVBQXVELENBQUE7SUFDdkQseUNBQXVCLENBQUE7SUFDdkIsbURBQWlDLENBQUE7SUFDakMsK0RBQTZDLENBQUE7SUFDN0MseUVBQXVELENBQUE7SUFDdkQsbUVBQWlELENBQUE7SUFDakQsMkNBQXlCLENBQUE7SUFDekIscUdBQW1GLENBQUE7SUFDbkYsaUVBQStDLENBQUE7SUFDL0MsaUVBQStDLENBQUE7SUFDL0MsaUNBQWUsQ0FBQTtJQUNmLHVEQUFxQyxDQUFBO0lBQ3JDLHlDQUF1QixDQUFBO0lBQ3ZCLCtEQUE2QyxDQUFBO0FBQy9DLENBQUMsRUFoRVcsY0FBYyxLQUFkLGNBQWMsUUFnRXpCIn0= \ No newline at end of file diff --git a/packages/kbot/dist-in/models/cache/openai.js b/packages/kbot/dist-in/models/cache/openai.js index 48367d36..dfaca302 100644 --- a/packages/kbot/dist-in/models/cache/openai.js +++ b/packages/kbot/dist-in/models/cache/openai.js @@ -1,2 +1,2 @@ -export const models = [{ "id": "gpt-4o-audio-preview-2024-10-01", "object": "model", "created": 1727389042, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview", "object": "model", "created": 1727659998, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview-2024-10-01", "object": "model", "created": 1727131766, "owned_by": "system" }, { "id": "dall-e-2", "object": "model", "created": 1698798177, "owned_by": "system" }, { "id": "gpt-4o-2024-08-06", "object": "model", "created": 1722814719, "owned_by": "system" }, { "id": "gpt-4-turbo", "object": "model", "created": 1712361441, "owned_by": "system" }, { "id": "gpt-4-1106-preview", "object": "model", "created": 1698957206, "owned_by": "system" }, { "id": "gpt-4o", "object": "model", "created": 1715367049, "owned_by": "system" }, { "id": "gpt-3.5-turbo", "object": "model", "created": 1677610602, "owned_by": "openai" }, { "id": "gpt-3.5-turbo-0125", "object": "model", "created": 1706048358, "owned_by": "system" }, { "id": "gpt-3.5-turbo-instruct", "object": "model", "created": 1692901427, "owned_by": "system" }, { "id": "gpt-4-1106-vision-preview", "object": "model", "created": 1711473033, "owned_by": "system" }, { "id": "babbage-002", "object": "model", "created": 1692634615, "owned_by": "system" }, { "id": "whisper-1", "object": "model", "created": 1677532384, "owned_by": "openai-internal" }, { "id": "dall-e-3", "object": "model", "created": 1698785189, "owned_by": "system" }, { "id": "text-embedding-3-small", "object": "model", "created": 1705948997, "owned_by": "system" }, { "id": "gpt-3.5-turbo-16k", "object": "model", "created": 1683758102, "owned_by": "openai-internal" }, { "id": "gpt-4-0125-preview", "object": "model", "created": 1706037612, "owned_by": "system" }, { "id": "gpt-4-turbo-preview", "object": "model", "created": 1706037777, "owned_by": "system" }, { "id": "chatgpt-4o-latest", "object": "model", "created": 1723515131, "owned_by": "system" }, { "id": "omni-moderation-latest", "object": "model", "created": 1731689265, "owned_by": "system" }, { "id": "gpt-4o-2024-05-13", "object": "model", "created": 1715368132, "owned_by": "system" }, { "id": "o1-preview-2024-09-12", "object": "model", "created": 1725648865, "owned_by": "system" }, { "id": "omni-moderation-2024-09-26", "object": "model", "created": 1732734466, "owned_by": "system" }, { "id": "tts-1-hd-1106", "object": "model", "created": 1699053533, "owned_by": "system" }, { "id": "o1-preview", "object": "model", "created": 1725648897, "owned_by": "system" }, { "id": "gpt-4", "object": "model", "created": 1687882411, "owned_by": "openai" }, { "id": "gpt-4-0613", "object": "model", "created": 1686588896, "owned_by": "openai" }, { "id": "tts-1-hd", "object": "model", "created": 1699046015, "owned_by": "system" }, { "id": "gpt-4-vision-preview", "object": "model", "created": 1698894917, "owned_by": "system" }, { "id": "text-embedding-ada-002", "object": "model", "created": 1671217299, "owned_by": "openai-internal" }, { "id": "gpt-3.5-turbo-1106", "object": "model", "created": 1698959748, "owned_by": "system" }, { "id": "gpt-4o-audio-preview", "object": "model", "created": 1727460443, "owned_by": "system" }, { "id": "tts-1", "object": "model", "created": 1681940951, "owned_by": "openai-internal" }, { "id": "tts-1-1106", "object": "model", "created": 1699053241, "owned_by": "system" }, { "id": "gpt-3.5-turbo-instruct-0914", "object": "model", "created": 1694122472, "owned_by": "system" }, { "id": "davinci-002", "object": "model", "created": 1692634301, "owned_by": "system" }, { "id": "text-embedding-3-large", "object": "model", "created": 1705953180, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview-2024-12-17", "object": "model", "created": 1733945430, "owned_by": "system" }, { "id": "gpt-4o-mini-realtime-preview", "object": "model", "created": 1734387380, "owned_by": "system" }, { "id": "gpt-4o-mini-realtime-preview-2024-12-17", "object": "model", "created": 1734112601, "owned_by": "system" }, { "id": "o1-mini", "object": "model", "created": 1725649008, "owned_by": "system" }, { "id": "gpt-4o-2024-11-20", "object": "model", "created": 1731975040, "owned_by": "system" }, { "id": "o1-mini-2024-09-12", "object": "model", "created": 1725648979, "owned_by": "system" }, { "id": "gpt-4o-mini-2024-07-18", "object": "model", "created": 1721172717, "owned_by": "system" }, { "id": "gpt-4o-mini", "object": "model", "created": 1721172741, "owned_by": "system" }, { "id": "gpt-4o-audio-preview-2024-12-17", "object": "model", "created": 1734034239, "owned_by": "system" }, { "id": "gpt-4o-mini-audio-preview", "object": "model", "created": 1734387424, "owned_by": "system" }, { "id": "gpt-4o-mini-audio-preview-2024-12-17", "object": "model", "created": 1734115920, "owned_by": "system" }, { "id": "gpt-4-turbo-2024-04-09", "object": "model", "created": 1712601677, "owned_by": "system" }]; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbmFpLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vLi4vc3JjL21vZGVscy9jYWNoZS9vcGVuYWkudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsTUFBTSxDQUFDLE1BQU0sTUFBTSxHQUFHLENBQUMsRUFBQyxJQUFJLEVBQUMsaUNBQWlDLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyx5QkFBeUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG9DQUFvQyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsbUJBQW1CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxhQUFhLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLFFBQVEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGVBQWUsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG9CQUFvQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQywyQkFBMkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGFBQWEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLFdBQVcsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLGlCQUFpQixFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxtQkFBbUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLGlCQUFpQixFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsb0JBQW9CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxxQkFBcUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG1CQUFtQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxtQkFBbUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHVCQUF1QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsNEJBQTRCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxlQUFlLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxPQUFPLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxzQkFBc0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsaUJBQWlCLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHNCQUFzQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsT0FBTyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsaUJBQWlCLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyw2QkFBNkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGFBQWEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsb0NBQW9DLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyw4QkFBOEIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHlDQUF5QyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsU0FBUyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsbUJBQW1CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsYUFBYSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsaUNBQWlDLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQywyQkFBMkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHNDQUFzQyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsQ0FBQyxDQUFBIn0= \ No newline at end of file +export const models = [{ "id": "gpt-4o-audio-preview-2024-10-01", "object": "model", "created": 1727389042, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview", "object": "model", "created": 1727659998, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview-2024-10-01", "object": "model", "created": 1727131766, "owned_by": "system" }, { "id": "dall-e-2", "object": "model", "created": 1698798177, "owned_by": "system" }, { "id": "gpt-4o-2024-08-06", "object": "model", "created": 1722814719, "owned_by": "system" }, { "id": "gpt-4-turbo", "object": "model", "created": 1712361441, "owned_by": "system" }, { "id": "gpt-4-1106-preview", "object": "model", "created": 1698957206, "owned_by": "system" }, { "id": "gpt-4o", "object": "model", "created": 1715367049, "owned_by": "system" }, { "id": "gpt-3.5-turbo", "object": "model", "created": 1677610602, "owned_by": "openai" }, { "id": "gpt-3.5-turbo-0125", "object": "model", "created": 1706048358, "owned_by": "system" }, { "id": "gpt-3.5-turbo-instruct", "object": "model", "created": 1692901427, "owned_by": "system" }, { "id": "gpt-4-1106-vision-preview", "object": "model", "created": 1711473033, "owned_by": "system" }, { "id": "babbage-002", "object": "model", "created": 1692634615, "owned_by": "system" }, { "id": "whisper-1", "object": "model", "created": 1677532384, "owned_by": "openai-internal" }, { "id": "dall-e-3", "object": "model", "created": 1698785189, "owned_by": "system" }, { "id": "text-embedding-3-small", "object": "model", "created": 1705948997, "owned_by": "system" }, { "id": "gpt-3.5-turbo-16k", "object": "model", "created": 1683758102, "owned_by": "openai-internal" }, { "id": "gpt-4-0125-preview", "object": "model", "created": 1706037612, "owned_by": "system" }, { "id": "gpt-4-turbo-preview", "object": "model", "created": 1706037777, "owned_by": "system" }, { "id": "chatgpt-4o-latest", "object": "model", "created": 1723515131, "owned_by": "system" }, { "id": "omni-moderation-latest", "object": "model", "created": 1731689265, "owned_by": "system" }, { "id": "gpt-4o-2024-05-13", "object": "model", "created": 1715368132, "owned_by": "system" }, { "id": "o1-preview-2024-09-12", "object": "model", "created": 1725648865, "owned_by": "system" }, { "id": "omni-moderation-2024-09-26", "object": "model", "created": 1732734466, "owned_by": "system" }, { "id": "tts-1-hd-1106", "object": "model", "created": 1699053533, "owned_by": "system" }, { "id": "o1-preview", "object": "model", "created": 1725648897, "owned_by": "system" }, { "id": "gpt-4", "object": "model", "created": 1687882411, "owned_by": "openai" }, { "id": "gpt-4-0613", "object": "model", "created": 1686588896, "owned_by": "openai" }, { "id": "tts-1-hd", "object": "model", "created": 1699046015, "owned_by": "system" }, { "id": "gpt-4-vision-preview", "object": "model", "created": 1698894917, "owned_by": "system" }, { "id": "text-embedding-ada-002", "object": "model", "created": 1671217299, "owned_by": "openai-internal" }, { "id": "gpt-3.5-turbo-1106", "object": "model", "created": 1698959748, "owned_by": "system" }, { "id": "gpt-4o-audio-preview", "object": "model", "created": 1727460443, "owned_by": "system" }, { "id": "tts-1", "object": "model", "created": 1681940951, "owned_by": "openai-internal" }, { "id": "tts-1-1106", "object": "model", "created": 1699053241, "owned_by": "system" }, { "id": "gpt-3.5-turbo-instruct-0914", "object": "model", "created": 1694122472, "owned_by": "system" }, { "id": "davinci-002", "object": "model", "created": 1692634301, "owned_by": "system" }, { "id": "text-embedding-3-large", "object": "model", "created": 1705953180, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview-2024-12-17", "object": "model", "created": 1733945430, "owned_by": "system" }, { "id": "gpt-4o-mini-realtime-preview", "object": "model", "created": 1734387380, "owned_by": "system" }, { "id": "gpt-4o-mini-realtime-preview-2024-12-17", "object": "model", "created": 1734112601, "owned_by": "system" }, { "id": "o1-mini", "object": "model", "created": 1725649008, "owned_by": "system" }, { "id": "gpt-4o-2024-11-20", "object": "model", "created": 1731975040, "owned_by": "system" }, { "id": "o1-mini-2024-09-12", "object": "model", "created": 1725648979, "owned_by": "system" }, { "id": "gpt-4o-mini-2024-07-18", "object": "model", "created": 1721172717, "owned_by": "system" }, { "id": "gpt-4o-mini", "object": "model", "created": 1721172741, "owned_by": "system" }, { "id": "gpt-4o-audio-preview-2024-12-17", "object": "model", "created": 1734034239, "owned_by": "system" }, { "id": "gpt-4o-mini-audio-preview", "object": "model", "created": 1734387424, "owned_by": "system" }, { "id": "gpt-4o-mini-audio-preview-2024-12-17", "object": "model", "created": 1734115920, "owned_by": "system" }, { "id": "gpt-4-turbo-2024-04-09", "object": "model", "created": 1712601677, "owned_by": "" }]; +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbmFpLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vLi4vc3JjL21vZGVscy9jYWNoZS9vcGVuYWkudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsTUFBTSxDQUFDLE1BQU0sTUFBTSxHQUFHLENBQUMsRUFBQyxJQUFJLEVBQUMsaUNBQWlDLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyx5QkFBeUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG9DQUFvQyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsbUJBQW1CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxhQUFhLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLFFBQVEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGVBQWUsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG9CQUFvQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQywyQkFBMkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGFBQWEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLFdBQVcsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLGlCQUFpQixFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxtQkFBbUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLGlCQUFpQixFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsb0JBQW9CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxxQkFBcUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG1CQUFtQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxtQkFBbUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHVCQUF1QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsNEJBQTRCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxlQUFlLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxPQUFPLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxzQkFBc0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsaUJBQWlCLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHNCQUFzQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsT0FBTyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsaUJBQWlCLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyw2QkFBNkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGFBQWEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsb0NBQW9DLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyw4QkFBOEIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHlDQUF5QyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsU0FBUyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsbUJBQW1CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsYUFBYSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsaUNBQWlDLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQywyQkFBMkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHNDQUFzQyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxFQUFFLEVBQUMsQ0FBQyxDQUFBIn0= \ No newline at end of file diff --git a/packages/kbot/dist-in/models/cache/openrouter-models-free.d.ts b/packages/kbot/dist-in/models/cache/openrouter-models-free.d.ts new file mode 100644 index 00000000..6e675dbb --- /dev/null +++ b/packages/kbot/dist-in/models/cache/openrouter-models-free.d.ts @@ -0,0 +1,59 @@ +export declare enum E_OPENROUTER_MODEL_FREE { + MODEL_FREE_DEEPSEEK_DEEPSEEK_V3_BASE_FREE = "deepseek/deepseek-v3-base:free", + MODEL_FREE_ALLENAI_MOLMO_7B_D_FREE = "allenai/molmo-7b-d:free", + MODEL_FREE_BYTEDANCE_RESEARCH_UI_TARS_72B_FREE = "bytedance-research/ui-tars-72b:free", + MODEL_FREE_QWEN_QWEN2_5_VL_3B_INSTRUCT_FREE = "qwen/qwen2.5-vl-3b-instruct:free", + MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25_FREE = "google/gemini-2.5-pro-exp-03-25:free", + MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free", + MODEL_FREE_FEATHERLESS_QWERKY_72B_FREE = "featherless/qwerky-72b:free", + MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free", + MODEL_FREE_OPEN_R1_OLYMPICCODER_7B_FREE = "open-r1/olympiccoder-7b:free", + MODEL_FREE_OPEN_R1_OLYMPICCODER_32B_FREE = "open-r1/olympiccoder-32b:free", + MODEL_FREE_GOOGLE_GEMMA_3_1B_IT_FREE = "google/gemma-3-1b-it:free", + MODEL_FREE_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free", + MODEL_FREE_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free", + MODEL_FREE_REKAAI_REKA_FLASH_3_FREE = "rekaai/reka-flash-3:free", + MODEL_FREE_GOOGLE_GEMMA_3_27B_IT_FREE = "google/gemma-3-27b-it:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_ZERO_FREE = "deepseek/deepseek-r1-zero:free", + MODEL_FREE_QWEN_QWQ_32B_FREE = "qwen/qwq-32b:free", + MODEL_FREE_MOONSHOTAI_MOONLIGHT_16B_A3B_INSTRUCT_FREE = "moonshotai/moonlight-16b-a3b-instruct:free", + MODEL_FREE_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE = "nousresearch/deephermes-3-llama-3-8b-preview:free", + MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free", + MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-mistral-24b:free", + MODEL_FREE_GOOGLE_GEMINI_2_0_PRO_EXP_02_05_FREE = "google/gemini-2.0-pro-exp-02-05:free", + MODEL_FREE_QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE = "qwen/qwen2.5-vl-72b-instruct:free", + MODEL_FREE_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE = "mistralai/mistral-small-24b-instruct-2501:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B_FREE = "deepseek/deepseek-r1-distill-qwen-32b:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE = "deepseek/deepseek-r1-distill-qwen-14b:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B_FREE = "deepseek/deepseek-r1-distill-llama-70b:free", + MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_FREE = "google/gemini-2.0-flash-thinking-exp:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_FREE = "deepseek/deepseek-r1:free", + MODEL_FREE_SOPHOSYMPATHEIA_ROGUE_ROSE_103B_V0_2_FREE = "sophosympatheia/rogue-rose-103b-v0.2:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_FREE = "deepseek/deepseek-chat:free", + MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_1219_FREE = "google/gemini-2.0-flash-thinking-exp-1219:free", + MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE = "google/gemini-2.0-flash-exp:free", + MODEL_FREE_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE = "meta-llama/llama-3.3-70b-instruct:free", + MODEL_FREE_QWEN_QWQ_32B_PREVIEW_FREE = "qwen/qwq-32b-preview:free", + MODEL_FREE_GOOGLE_LEARNLM_1_5_PRO_EXPERIMENTAL_FREE = "google/learnlm-1.5-pro-experimental:free", + MODEL_FREE_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free", + MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT_FREE = "nvidia/llama-3.1-nemotron-70b-instruct:free", + MODEL_FREE_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free", + MODEL_FREE_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE = "meta-llama/llama-3.2-1b-instruct:free", + MODEL_FREE_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free", + MODEL_FREE_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free", + MODEL_FREE_QWEN_QWEN_2_5_VL_7B_INSTRUCT_FREE = "qwen/qwen-2.5-vl-7b-instruct:free", + MODEL_FREE_GOOGLE_GEMINI_FLASH_1_5_8B_EXP = "google/gemini-flash-1.5-8b-exp", + MODEL_FREE_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE = "meta-llama/llama-3.1-8b-instruct:free", + MODEL_FREE_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free", + MODEL_FREE_QWEN_QWEN_2_7B_INSTRUCT_FREE = "qwen/qwen-2-7b-instruct:free", + MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free", + MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free", + MODEL_FREE_MICROSOFT_PHI_3_MINI_128K_INSTRUCT_FREE = "microsoft/phi-3-mini-128k-instruct:free", + MODEL_FREE_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT_FREE = "microsoft/phi-3-medium-128k-instruct:free", + MODEL_FREE_META_LLAMA_LLAMA_3_8B_INSTRUCT_FREE = "meta-llama/llama-3-8b-instruct:free", + MODEL_FREE_OPENCHAT_OPENCHAT_7B_FREE = "openchat/openchat-7b:free", + MODEL_FREE_UNDI95_TOPPY_M_7B_FREE = "undi95/toppy-m-7b:free", + MODEL_FREE_HUGGINGFACEH4_ZEPHYR_7B_BETA_FREE = "huggingfaceh4/zephyr-7b-beta:free", + MODEL_FREE_GRYPHE_MYTHOMAX_L2_13B_FREE = "gryphe/mythomax-l2-13b:free" +} diff --git a/packages/kbot/dist-in/models/cache/openrouter-models-free.js b/packages/kbot/dist-in/models/cache/openrouter-models-free.js new file mode 100644 index 00000000..c29765a8 --- /dev/null +++ b/packages/kbot/dist-in/models/cache/openrouter-models-free.js @@ -0,0 +1,61 @@ +export var E_OPENROUTER_MODEL_FREE; +(function (E_OPENROUTER_MODEL_FREE) { + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_V3_BASE_FREE"] = "deepseek/deepseek-v3-base:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_ALLENAI_MOLMO_7B_D_FREE"] = "allenai/molmo-7b-d:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_BYTEDANCE_RESEARCH_UI_TARS_72B_FREE"] = "bytedance-research/ui-tars-72b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN2_5_VL_3B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-3b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25_FREE"] = "google/gemini-2.5-pro-exp-03-25:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-32b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE"] = "deepseek/deepseek-chat-v3-0324:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_FEATHERLESS_QWERKY_72B_FREE"] = "featherless/qwerky-72b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE"] = "mistralai/mistral-small-3.1-24b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPEN_R1_OLYMPICCODER_7B_FREE"] = "open-r1/olympiccoder-7b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPEN_R1_OLYMPICCODER_32B_FREE"] = "open-r1/olympiccoder-32b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_1B_IT_FREE"] = "google/gemma-3-1b-it:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_4B_IT_FREE"] = "google/gemma-3-4b-it:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_12B_IT_FREE"] = "google/gemma-3-12b-it:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_REKAAI_REKA_FLASH_3_FREE"] = "rekaai/reka-flash-3:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_27B_IT_FREE"] = "google/gemma-3-27b-it:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_ZERO_FREE"] = "deepseek/deepseek-r1-zero:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWQ_32B_FREE"] = "qwen/qwq-32b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MOONSHOTAI_MOONLIGHT_16B_A3B_INSTRUCT_FREE"] = "moonshotai/moonlight-16b-a3b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE"] = "nousresearch/deephermes-3-llama-3-8b-preview:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE"] = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE"] = "cognitivecomputations/dolphin3.0-mistral-24b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_0_PRO_EXP_02_05_FREE"] = "google/gemini-2.0-pro-exp-02-05:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-72b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE"] = "mistralai/mistral-small-24b-instruct-2501:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B_FREE"] = "deepseek/deepseek-r1-distill-qwen-32b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE"] = "deepseek/deepseek-r1-distill-qwen-14b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B_FREE"] = "deepseek/deepseek-r1-distill-llama-70b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_FREE"] = "google/gemini-2.0-flash-thinking-exp:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_FREE"] = "deepseek/deepseek-r1:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_SOPHOSYMPATHEIA_ROGUE_ROSE_103B_V0_2_FREE"] = "sophosympatheia/rogue-rose-103b-v0.2:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_FREE"] = "deepseek/deepseek-chat:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_1219_FREE"] = "google/gemini-2.0-flash-thinking-exp-1219:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE"] = "google/gemini-2.0-flash-exp:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE"] = "meta-llama/llama-3.3-70b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWQ_32B_PREVIEW_FREE"] = "qwen/qwq-32b-preview:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_LEARNLM_1_5_PRO_EXPERIMENTAL_FREE"] = "google/learnlm-1.5-pro-experimental:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE"] = "qwen/qwen-2.5-coder-32b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT_FREE"] = "nvidia/llama-3.1-nemotron-70b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE"] = "meta-llama/llama-3.2-3b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE"] = "meta-llama/llama-3.2-1b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE"] = "meta-llama/llama-3.2-11b-vision-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN_2_5_72B_INSTRUCT_FREE"] = "qwen/qwen-2.5-72b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN_2_5_VL_7B_INSTRUCT_FREE"] = "qwen/qwen-2.5-vl-7b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_FLASH_1_5_8B_EXP"] = "google/gemini-flash-1.5-8b-exp"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE"] = "meta-llama/llama-3.1-8b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_NEMO_FREE"] = "mistralai/mistral-nemo:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN_2_7B_INSTRUCT_FREE"] = "qwen/qwen-2-7b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE"] = "google/gemma-2-9b-it:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MICROSOFT_PHI_3_MINI_128K_INSTRUCT_FREE"] = "microsoft/phi-3-mini-128k-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT_FREE"] = "microsoft/phi-3-medium-128k-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_8B_INSTRUCT_FREE"] = "meta-llama/llama-3-8b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENCHAT_OPENCHAT_7B_FREE"] = "openchat/openchat-7b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_UNDI95_TOPPY_M_7B_FREE"] = "undi95/toppy-m-7b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_HUGGINGFACEH4_ZEPHYR_7B_BETA_FREE"] = "huggingfaceh4/zephyr-7b-beta:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GRYPHE_MYTHOMAX_L2_13B_FREE"] = "gryphe/mythomax-l2-13b:free"; +})(E_OPENROUTER_MODEL_FREE || (E_OPENROUTER_MODEL_FREE = {})); +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkEwRFg7QUExREQsV0FBWSx1QkFBdUI7SUFDakMsdUdBQTRFLENBQUE7SUFDNUUseUZBQThELENBQUE7SUFDOUQsaUhBQXNGLENBQUE7SUFDdEYsMkdBQWdGLENBQUE7SUFDaEYsbUhBQXdGLENBQUE7SUFDeEYsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYsaUdBQXNFLENBQUE7SUFDdEUscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUscUdBQTBFLENBQUE7SUFDMUUsNkZBQWtFLENBQUE7SUFDbEUsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsMkZBQWdFLENBQUE7SUFDaEUsK0ZBQW9FLENBQUE7SUFDcEUsdUdBQTRFLENBQUE7SUFDNUUsNkVBQWtELENBQUE7SUFDbEQsK0hBQW9HLENBQUE7SUFDcEcsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsbUhBQXdGLENBQUE7SUFDeEYsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsK0hBQW9HLENBQUE7SUFDcEcsK0hBQW9HLENBQUE7SUFDcEcsaUlBQXNHLENBQUE7SUFDdEcsNkhBQWtHLENBQUE7SUFDbEcsNkZBQWtFLENBQUE7SUFDbEUsNkhBQWtHLENBQUE7SUFDbEcsaUdBQXNFLENBQUE7SUFDdEUsdUlBQTRHLENBQUE7SUFDNUcsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYsNkZBQWtFLENBQUE7SUFDbEUsMkhBQWdHLENBQUE7SUFDaEcscUhBQTBGLENBQUE7SUFDMUYsaUlBQXNHLENBQUE7SUFDdEcscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYscUlBQTBHLENBQUE7SUFDMUcseUdBQThFLENBQUE7SUFDOUUsNkdBQWtGLENBQUE7SUFDbEYsdUdBQTRFLENBQUE7SUFDNUUscUhBQTBGLENBQUE7SUFDMUYsaUdBQXNFLENBQUE7SUFDdEUsbUdBQXdFLENBQUE7SUFDeEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7SUFDcEYseUhBQThGLENBQUE7SUFDOUYsNkhBQWtHLENBQUE7SUFDbEcsaUhBQXNGLENBQUE7SUFDdEYsNkZBQWtFLENBQUE7SUFDbEUsdUZBQTRELENBQUE7SUFDNUQsNkdBQWtGLENBQUE7SUFDbEYsaUdBQXNFLENBQUE7QUFDeEUsQ0FBQyxFQTFEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBMERsQyJ9 \ No newline at end of file diff --git a/packages/kbot/dist-in/models/cache/openrouter-models.d.ts b/packages/kbot/dist-in/models/cache/openrouter-models.d.ts new file mode 100644 index 00000000..82ad6912 --- /dev/null +++ b/packages/kbot/dist-in/models/cache/openrouter-models.d.ts @@ -0,0 +1,292 @@ +export declare enum E_OPENROUTER_MODEL { + MODEL_MISTRAL_MINISTRAL_8B = "mistral/ministral-8b", + MODEL_DEEPSEEK_DEEPSEEK_V3_BASE_FREE = "deepseek/deepseek-v3-base:free", + MODEL_SCB10X_LLAMA3_1_TYPHOON2_8B_INSTRUCT = "scb10x/llama3.1-typhoon2-8b-instruct", + MODEL_SCB10X_LLAMA3_1_TYPHOON2_70B_INSTRUCT = "scb10x/llama3.1-typhoon2-70b-instruct", + MODEL_ALLENAI_MOLMO_7B_D_FREE = "allenai/molmo-7b-d:free", + MODEL_BYTEDANCE_RESEARCH_UI_TARS_72B_FREE = "bytedance-research/ui-tars-72b:free", + MODEL_QWEN_QWEN2_5_VL_3B_INSTRUCT_FREE = "qwen/qwen2.5-vl-3b-instruct:free", + MODEL_GOOGLE_GEMINI_2_5_PRO_EXP_03_25_FREE = "google/gemini-2.5-pro-exp-03-25:free", + MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free", + MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free", + MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324 = "deepseek/deepseek-chat-v3-0324", + MODEL_FEATHERLESS_QWERKY_72B_FREE = "featherless/qwerky-72b:free", + MODEL_OPENAI_O1_PRO = "openai/o1-pro", + MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free", + MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT = "mistralai/mistral-small-3.1-24b-instruct", + MODEL_OPEN_R1_OLYMPICCODER_7B_FREE = "open-r1/olympiccoder-7b:free", + MODEL_OPEN_R1_OLYMPICCODER_32B_FREE = "open-r1/olympiccoder-32b:free", + MODEL_STEELSKULL_L3_3_ELECTRA_R1_70B = "steelskull/l3.3-electra-r1-70b", + MODEL_ALLENAI_OLMO_2_0325_32B_INSTRUCT = "allenai/olmo-2-0325-32b-instruct", + MODEL_GOOGLE_GEMMA_3_1B_IT_FREE = "google/gemma-3-1b-it:free", + MODEL_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free", + MODEL_GOOGLE_GEMMA_3_4B_IT = "google/gemma-3-4b-it", + MODEL_AI21_JAMBA_1_6_LARGE = "ai21/jamba-1.6-large", + MODEL_AI21_JAMBA_1_6_MINI = "ai21/jamba-1.6-mini", + MODEL_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free", + MODEL_GOOGLE_GEMMA_3_12B_IT = "google/gemma-3-12b-it", + MODEL_COHERE_COMMAND_A = "cohere/command-a", + MODEL_OPENAI_GPT_4O_MINI_SEARCH_PREVIEW = "openai/gpt-4o-mini-search-preview", + MODEL_OPENAI_GPT_4O_SEARCH_PREVIEW = "openai/gpt-4o-search-preview", + MODEL_TOKYOTECH_LLM_LLAMA_3_1_SWALLOW_70B_INSTRUCT_V0_3 = "tokyotech-llm/llama-3.1-swallow-70b-instruct-v0.3", + MODEL_REKAAI_REKA_FLASH_3_FREE = "rekaai/reka-flash-3:free", + MODEL_GOOGLE_GEMMA_3_27B_IT_FREE = "google/gemma-3-27b-it:free", + MODEL_GOOGLE_GEMMA_3_27B_IT = "google/gemma-3-27b-it", + MODEL_THEDRUMMER_ANUBIS_PRO_105B_V1 = "thedrummer/anubis-pro-105b-v1", + MODEL_LATITUDEGAMES_WAYFARER_LARGE_70B_LLAMA_3_3 = "latitudegames/wayfarer-large-70b-llama-3.3", + MODEL_THEDRUMMER_SKYFALL_36B_V2 = "thedrummer/skyfall-36b-v2", + MODEL_MICROSOFT_PHI_4_MULTIMODAL_INSTRUCT = "microsoft/phi-4-multimodal-instruct", + MODEL_PERPLEXITY_SONAR_REASONING_PRO = "perplexity/sonar-reasoning-pro", + MODEL_PERPLEXITY_SONAR_PRO = "perplexity/sonar-pro", + MODEL_PERPLEXITY_SONAR_DEEP_RESEARCH = "perplexity/sonar-deep-research", + MODEL_DEEPSEEK_DEEPSEEK_R1_ZERO_FREE = "deepseek/deepseek-r1-zero:free", + MODEL_QWEN_QWQ_32B_FREE = "qwen/qwq-32b:free", + MODEL_QWEN_QWQ_32B = "qwen/qwq-32b", + MODEL_QWEN_QWEN2_5_32B_INSTRUCT = "qwen/qwen2.5-32b-instruct", + MODEL_MOONSHOTAI_MOONLIGHT_16B_A3B_INSTRUCT_FREE = "moonshotai/moonlight-16b-a3b-instruct:free", + MODEL_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE = "nousresearch/deephermes-3-llama-3-8b-preview:free", + MODEL_OPENAI_GPT_4_5_PREVIEW = "openai/gpt-4.5-preview", + MODEL_GOOGLE_GEMINI_2_0_FLASH_LITE_001 = "google/gemini-2.0-flash-lite-001", + MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_BETA = "anthropic/claude-3.7-sonnet:beta", + MODEL_ANTHROPIC_CLAUDE_3_7_SONNET = "anthropic/claude-3.7-sonnet", + MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_THINKING = "anthropic/claude-3.7-sonnet:thinking", + MODEL_PERPLEXITY_R1_1776 = "perplexity/r1-1776", + MODEL_MISTRALAI_MISTRAL_SABA = "mistralai/mistral-saba", + MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free", + MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-mistral-24b:free", + MODEL_META_LLAMA_LLAMA_GUARD_3_8B = "meta-llama/llama-guard-3-8b", + MODEL_OPENAI_O3_MINI_HIGH = "openai/o3-mini-high", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_8B = "deepseek/deepseek-r1-distill-llama-8b", + MODEL_GOOGLE_GEMINI_2_0_FLASH_001 = "google/gemini-2.0-flash-001", + MODEL_GOOGLE_GEMINI_2_0_PRO_EXP_02_05_FREE = "google/gemini-2.0-pro-exp-02-05:free", + MODEL_QWEN_QWEN_VL_PLUS = "qwen/qwen-vl-plus", + MODEL_AION_LABS_AION_1_0 = "aion-labs/aion-1.0", + MODEL_AION_LABS_AION_1_0_MINI = "aion-labs/aion-1.0-mini", + MODEL_AION_LABS_AION_RP_LLAMA_3_1_8B = "aion-labs/aion-rp-llama-3.1-8b", + MODEL_QWEN_QWEN_VL_MAX = "qwen/qwen-vl-max", + MODEL_QWEN_QWEN_TURBO = "qwen/qwen-turbo", + MODEL_QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE = "qwen/qwen2.5-vl-72b-instruct:free", + MODEL_QWEN_QWEN2_5_VL_72B_INSTRUCT = "qwen/qwen2.5-vl-72b-instruct", + MODEL_QWEN_QWEN_PLUS = "qwen/qwen-plus", + MODEL_QWEN_QWEN_MAX = "qwen/qwen-max", + MODEL_OPENAI_O3_MINI = "openai/o3-mini", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_1_5B = "deepseek/deepseek-r1-distill-qwen-1.5b", + MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE = "mistralai/mistral-small-24b-instruct-2501:free", + MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501 = "mistralai/mistral-small-24b-instruct-2501", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B_FREE = "deepseek/deepseek-r1-distill-qwen-32b:free", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B = "deepseek/deepseek-r1-distill-qwen-32b", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE = "deepseek/deepseek-r1-distill-qwen-14b:free", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B = "deepseek/deepseek-r1-distill-qwen-14b", + MODEL_PERPLEXITY_SONAR_REASONING = "perplexity/sonar-reasoning", + MODEL_PERPLEXITY_SONAR = "perplexity/sonar", + MODEL_LIQUID_LFM_7B = "liquid/lfm-7b", + MODEL_LIQUID_LFM_3B = "liquid/lfm-3b", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B_FREE = "deepseek/deepseek-r1-distill-llama-70b:free", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B = "deepseek/deepseek-r1-distill-llama-70b", + MODEL_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_FREE = "google/gemini-2.0-flash-thinking-exp:free", + MODEL_DEEPSEEK_DEEPSEEK_R1_FREE = "deepseek/deepseek-r1:free", + MODEL_DEEPSEEK_DEEPSEEK_R1 = "deepseek/deepseek-r1", + MODEL_SOPHOSYMPATHEIA_ROGUE_ROSE_103B_V0_2_FREE = "sophosympatheia/rogue-rose-103b-v0.2:free", + MODEL_MINIMAX_MINIMAX_01 = "minimax/minimax-01", + MODEL_MISTRALAI_CODESTRAL_2501 = "mistralai/codestral-2501", + MODEL_MICROSOFT_PHI_4 = "microsoft/phi-4", + MODEL_SAO10K_L3_1_70B_HANAMI_X1 = "sao10k/l3.1-70b-hanami-x1", + MODEL_DEEPSEEK_DEEPSEEK_CHAT_FREE = "deepseek/deepseek-chat:free", + MODEL_DEEPSEEK_DEEPSEEK_CHAT = "deepseek/deepseek-chat", + MODEL_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_1219_FREE = "google/gemini-2.0-flash-thinking-exp-1219:free", + MODEL_SAO10K_L3_3_EURYALE_70B = "sao10k/l3.3-euryale-70b", + MODEL_OPENAI_O1 = "openai/o1", + MODEL_EVA_UNIT_01_EVA_LLAMA_3_33_70B = "eva-unit-01/eva-llama-3.33-70b", + MODEL_X_AI_GROK_2_VISION_1212 = "x-ai/grok-2-vision-1212", + MODEL_X_AI_GROK_2_1212 = "x-ai/grok-2-1212", + MODEL_COHERE_COMMAND_R7B_12_2024 = "cohere/command-r7b-12-2024", + MODEL_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE = "google/gemini-2.0-flash-exp:free", + MODEL_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE = "meta-llama/llama-3.3-70b-instruct:free", + MODEL_META_LLAMA_LLAMA_3_3_70B_INSTRUCT = "meta-llama/llama-3.3-70b-instruct", + MODEL_AMAZON_NOVA_LITE_V1 = "amazon/nova-lite-v1", + MODEL_AMAZON_NOVA_MICRO_V1 = "amazon/nova-micro-v1", + MODEL_AMAZON_NOVA_PRO_V1 = "amazon/nova-pro-v1", + MODEL_QWEN_QWQ_32B_PREVIEW_FREE = "qwen/qwq-32b-preview:free", + MODEL_QWEN_QWQ_32B_PREVIEW = "qwen/qwq-32b-preview", + MODEL_GOOGLE_LEARNLM_1_5_PRO_EXPERIMENTAL_FREE = "google/learnlm-1.5-pro-experimental:free", + MODEL_EVA_UNIT_01_EVA_QWEN_2_5_72B = "eva-unit-01/eva-qwen-2.5-72b", + MODEL_OPENAI_GPT_4O_2024_11_20 = "openai/gpt-4o-2024-11-20", + MODEL_MISTRALAI_MISTRAL_LARGE_2411 = "mistralai/mistral-large-2411", + MODEL_MISTRALAI_MISTRAL_LARGE_2407 = "mistralai/mistral-large-2407", + MODEL_MISTRALAI_PIXTRAL_LARGE_2411 = "mistralai/pixtral-large-2411", + MODEL_X_AI_GROK_VISION_BETA = "x-ai/grok-vision-beta", + MODEL_INFERMATIC_MN_INFEROR_12B = "infermatic/mn-inferor-12b", + MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free", + MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT = "qwen/qwen-2.5-coder-32b-instruct", + MODEL_RAIFLE_SORCERERLM_8X22B = "raifle/sorcererlm-8x22b", + MODEL_EVA_UNIT_01_EVA_QWEN_2_5_32B = "eva-unit-01/eva-qwen-2.5-32b", + MODEL_THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_BETA = "anthropic/claude-3.5-haiku:beta", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022_BETA = "anthropic/claude-3.5-haiku-20241022:beta", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022", + MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_70B = "neversleep/llama-3.1-lumimaid-70b", + MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b", + MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_BETA = "anthropic/claude-3.5-sonnet:beta", + MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet", + MODEL_X_AI_GROK_BETA = "x-ai/grok-beta", + MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b", + MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b", + MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct", + MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT_FREE = "nvidia/llama-3.1-nemotron-70b-instruct:free", + MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct", + MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity", + MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi", + MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b", + MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b", + MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b", + MODEL_LIQUID_LFM_40B = "liquid/lfm-40b", + MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free", + MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct", + MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE = "meta-llama/llama-3.2-1b-instruct:free", + MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct", + MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct", + MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free", + MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct", + MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free", + MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct", + MODEL_QWEN_QWEN_2_5_VL_72B_INSTRUCT = "qwen/qwen-2.5-vl-72b-instruct", + MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b", + MODEL_OPENAI_O1_PREVIEW = "openai/o1-preview", + MODEL_OPENAI_O1_PREVIEW_2024_09_12 = "openai/o1-preview-2024-09-12", + MODEL_OPENAI_O1_MINI = "openai/o1-mini", + MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12", + MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b", + MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024", + MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024", + MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT_FREE = "qwen/qwen-2.5-vl-7b-instruct:free", + MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct", + MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b", + MODEL_GOOGLE_GEMINI_FLASH_1_5_8B_EXP = "google/gemini-flash-1.5-8b-exp", + MODEL_AI21_JAMBA_1_5_MINI = "ai21/jamba-1-5-mini", + MODEL_AI21_JAMBA_1_5_LARGE = "ai21/jamba-1-5-large", + MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct", + MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B = "nousresearch/hermes-3-llama-3.1-70b", + MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B = "nousresearch/hermes-3-llama-3.1-405b", + MODEL_OPENAI_CHATGPT_4O_LATEST = "openai/chatgpt-4o-latest", + MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b", + MODEL_AETHERWIING_MN_STARCANNON_12B = "aetherwiing/mn-starcannon-12b", + MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06", + MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b", + MODEL_NOTHINGIISREAL_MN_CELESTE_12B = "nothingiisreal/mn-celeste-12b", + MODEL_PERPLEXITY_LLAMA_3_1_SONAR_SMALL_128K_ONLINE = "perplexity/llama-3.1-sonar-small-128k-online", + MODEL_PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE = "perplexity/llama-3.1-sonar-large-128k-online", + MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE = "meta-llama/llama-3.1-8b-instruct:free", + MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct", + MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct", + MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct", + MODEL_MISTRALAI_CODESTRAL_MAMBA = "mistralai/codestral-mamba", + MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free", + MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo", + MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini", + MODEL_OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18", + MODEL_QWEN_QWEN_2_7B_INSTRUCT_FREE = "qwen/qwen-2-7b-instruct:free", + MODEL_QWEN_QWEN_2_7B_INSTRUCT = "qwen/qwen-2-7b-instruct", + MODEL_GOOGLE_GEMMA_2_27B_IT = "google/gemma-2-27b-it", + MODEL_ALPINDALE_MAGNUM_72B = "alpindale/magnum-72b", + MODEL_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free", + MODEL_GOOGLE_GEMMA_2_9B_IT = "google/gemma-2-9b-it", + MODEL_01_AI_YI_LARGE = "01-ai/yi-large", + MODEL_AI21_JAMBA_INSTRUCT = "ai21/jamba-instruct", + MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620_BETA = "anthropic/claude-3.5-sonnet-20240620:beta", + MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620 = "anthropic/claude-3.5-sonnet-20240620", + MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b", + MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b", + MODEL_QWEN_QWEN_2_72B_INSTRUCT = "qwen/qwen-2-72b-instruct", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3", + MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b", + MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT_FREE = "microsoft/phi-3-mini-128k-instruct:free", + MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct", + MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT_FREE = "microsoft/phi-3-medium-128k-instruct:free", + MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct", + MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b", + MODEL_GOOGLE_GEMINI_FLASH_1_5 = "google/gemini-flash-1.5", + MODEL_OPENAI_GPT_4O = "openai/gpt-4o", + MODEL_OPENAI_GPT_4O_EXTENDED = "openai/gpt-4o:extended", + MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13", + MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b", + MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_8B_EXTENDED = "neversleep/llama-3-lumimaid-8b:extended", + MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_8B = "neversleep/llama-3-lumimaid-8b", + MODEL_SAO10K_FIMBULVETR_11B_V2 = "sao10k/fimbulvetr-11b-v2", + MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT_FREE = "meta-llama/llama-3-8b-instruct:free", + MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct", + MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT = "meta-llama/llama-3-70b-instruct", + MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT = "mistralai/mixtral-8x22b-instruct", + MODEL_MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b", + MODEL_MICROSOFT_WIZARDLM_2_7B = "microsoft/wizardlm-2-7b", + MODEL_GOOGLE_GEMINI_PRO_1_5 = "google/gemini-pro-1.5", + MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo", + MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus", + MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024", + MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b", + MODEL_COHERE_COMMAND = "cohere/command", + MODEL_COHERE_COMMAND_R = "cohere/command-r", + MODEL_ANTHROPIC_CLAUDE_3_HAIKU_BETA = "anthropic/claude-3-haiku:beta", + MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku", + MODEL_ANTHROPIC_CLAUDE_3_OPUS_BETA = "anthropic/claude-3-opus:beta", + MODEL_ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus", + MODEL_ANTHROPIC_CLAUDE_3_SONNET_BETA = "anthropic/claude-3-sonnet:beta", + MODEL_ANTHROPIC_CLAUDE_3_SONNET = "anthropic/claude-3-sonnet", + MODEL_COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024", + MODEL_MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large", + MODEL_GOOGLE_GEMMA_7B_IT = "google/gemma-7b-it", + MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613", + MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview", + MODEL_NOUSRESEARCH_NOUS_HERMES_2_MIXTRAL_8X7B_DPO = "nousresearch/nous-hermes-2-mixtral-8x7b-dpo", + MODEL_MISTRALAI_MISTRAL_MEDIUM = "mistralai/mistral-medium", + MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small", + MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_2 = "mistralai/mistral-7b-instruct-v0.2", + MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X7B = "cognitivecomputations/dolphin-mixtral-8x7b", + MODEL_GOOGLE_GEMINI_PRO_VISION = "google/gemini-pro-vision", + MODEL_GOOGLE_GEMINI_PRO = "google/gemini-pro", + MODEL_MISTRALAI_MIXTRAL_8X7B = "mistralai/mixtral-8x7b", + MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct", + MODEL_OPENCHAT_OPENCHAT_7B_FREE = "openchat/openchat-7b:free", + MODEL_OPENCHAT_OPENCHAT_7B = "openchat/openchat-7b", + MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b", + MODEL_ANTHROPIC_CLAUDE_2_1_BETA = "anthropic/claude-2.1:beta", + MODEL_ANTHROPIC_CLAUDE_2_1 = "anthropic/claude-2.1", + MODEL_ANTHROPIC_CLAUDE_2_BETA = "anthropic/claude-2:beta", + MODEL_ANTHROPIC_CLAUDE_2 = "anthropic/claude-2", + MODEL_TEKNIUM_OPENHERMES_2_5_MISTRAL_7B = "teknium/openhermes-2.5-mistral-7b", + MODEL_UNDI95_TOPPY_M_7B_FREE = "undi95/toppy-m-7b:free", + MODEL_UNDI95_TOPPY_M_7B = "undi95/toppy-m-7b", + MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b", + MODEL_OPENROUTER_AUTO = "openrouter/auto", + MODEL_OPENAI_GPT_3_5_TURBO_1106 = "openai/gpt-3.5-turbo-1106", + MODEL_OPENAI_GPT_4_1106_PREVIEW = "openai/gpt-4-1106-preview", + MODEL_GOOGLE_PALM_2_CHAT_BISON_32K = "google/palm-2-chat-bison-32k", + MODEL_GOOGLE_PALM_2_CODECHAT_BISON_32K = "google/palm-2-codechat-bison-32k", + MODEL_JONDURBIN_AIROBOROS_L2_70B = "jondurbin/airoboros-l2-70b", + MODEL_XWIN_LM_XWIN_LM_70B = "xwin-lm/xwin-lm-70b", + MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1", + MODEL_PYGMALIONAI_MYTHALION_13B = "pygmalionai/mythalion-13b", + MODEL_OPENAI_GPT_3_5_TURBO_16K = "openai/gpt-3.5-turbo-16k", + MODEL_OPENAI_GPT_4_32K = "openai/gpt-4-32k", + MODEL_OPENAI_GPT_4_32K_0314 = "openai/gpt-4-32k-0314", + MODEL_NOUSRESEARCH_NOUS_HERMES_LLAMA2_13B = "nousresearch/nous-hermes-llama2-13b", + MODEL_MANCER_WEAVER = "mancer/weaver", + MODEL_HUGGINGFACEH4_ZEPHYR_7B_BETA_FREE = "huggingfaceh4/zephyr-7b-beta:free", + MODEL_ANTHROPIC_CLAUDE_2_0_BETA = "anthropic/claude-2.0:beta", + MODEL_ANTHROPIC_CLAUDE_2_0 = "anthropic/claude-2.0", + MODEL_UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b", + MODEL_GOOGLE_PALM_2_CHAT_BISON = "google/palm-2-chat-bison", + MODEL_GOOGLE_PALM_2_CODECHAT_BISON = "google/palm-2-codechat-bison", + MODEL_GRYPHE_MYTHOMAX_L2_13B_FREE = "gryphe/mythomax-l2-13b:free", + MODEL_GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b", + MODEL_META_LLAMA_LLAMA_2_13B_CHAT = "meta-llama/llama-2-13b-chat", + MODEL_META_LLAMA_LLAMA_2_70B_CHAT = "meta-llama/llama-2-70b-chat", + MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo", + MODEL_OPENAI_GPT_3_5_TURBO_0125 = "openai/gpt-3.5-turbo-0125", + MODEL_OPENAI_GPT_4 = "openai/gpt-4", + MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314" +} diff --git a/packages/kbot/dist-in/models/cache/openrouter-models.js b/packages/kbot/dist-in/models/cache/openrouter-models.js new file mode 100644 index 00000000..2a4f135f --- /dev/null +++ b/packages/kbot/dist-in/models/cache/openrouter-models.js @@ -0,0 +1,294 @@ +export var E_OPENROUTER_MODEL; +(function (E_OPENROUTER_MODEL) { + E_OPENROUTER_MODEL["MODEL_MISTRAL_MINISTRAL_8B"] = "mistral/ministral-8b"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_V3_BASE_FREE"] = "deepseek/deepseek-v3-base:free"; + E_OPENROUTER_MODEL["MODEL_SCB10X_LLAMA3_1_TYPHOON2_8B_INSTRUCT"] = "scb10x/llama3.1-typhoon2-8b-instruct"; + E_OPENROUTER_MODEL["MODEL_SCB10X_LLAMA3_1_TYPHOON2_70B_INSTRUCT"] = "scb10x/llama3.1-typhoon2-70b-instruct"; + E_OPENROUTER_MODEL["MODEL_ALLENAI_MOLMO_7B_D_FREE"] = "allenai/molmo-7b-d:free"; + E_OPENROUTER_MODEL["MODEL_BYTEDANCE_RESEARCH_UI_TARS_72B_FREE"] = "bytedance-research/ui-tars-72b:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_3B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-3b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_5_PRO_EXP_03_25_FREE"] = "google/gemini-2.5-pro-exp-03-25:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-32b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE"] = "deepseek/deepseek-chat-v3-0324:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324"] = "deepseek/deepseek-chat-v3-0324"; + E_OPENROUTER_MODEL["MODEL_FEATHERLESS_QWERKY_72B_FREE"] = "featherless/qwerky-72b:free"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_PRO"] = "openai/o1-pro"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE"] = "mistralai/mistral-small-3.1-24b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT"] = "mistralai/mistral-small-3.1-24b-instruct"; + E_OPENROUTER_MODEL["MODEL_OPEN_R1_OLYMPICCODER_7B_FREE"] = "open-r1/olympiccoder-7b:free"; + E_OPENROUTER_MODEL["MODEL_OPEN_R1_OLYMPICCODER_32B_FREE"] = "open-r1/olympiccoder-32b:free"; + E_OPENROUTER_MODEL["MODEL_STEELSKULL_L3_3_ELECTRA_R1_70B"] = "steelskull/l3.3-electra-r1-70b"; + E_OPENROUTER_MODEL["MODEL_ALLENAI_OLMO_2_0325_32B_INSTRUCT"] = "allenai/olmo-2-0325-32b-instruct"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_1B_IT_FREE"] = "google/gemma-3-1b-it:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_4B_IT_FREE"] = "google/gemma-3-4b-it:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_4B_IT"] = "google/gemma-3-4b-it"; + E_OPENROUTER_MODEL["MODEL_AI21_JAMBA_1_6_LARGE"] = "ai21/jamba-1.6-large"; + E_OPENROUTER_MODEL["MODEL_AI21_JAMBA_1_6_MINI"] = "ai21/jamba-1.6-mini"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_12B_IT_FREE"] = "google/gemma-3-12b-it:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_12B_IT"] = "google/gemma-3-12b-it"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_A"] = "cohere/command-a"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_MINI_SEARCH_PREVIEW"] = "openai/gpt-4o-mini-search-preview"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_SEARCH_PREVIEW"] = "openai/gpt-4o-search-preview"; + E_OPENROUTER_MODEL["MODEL_TOKYOTECH_LLM_LLAMA_3_1_SWALLOW_70B_INSTRUCT_V0_3"] = "tokyotech-llm/llama-3.1-swallow-70b-instruct-v0.3"; + E_OPENROUTER_MODEL["MODEL_REKAAI_REKA_FLASH_3_FREE"] = "rekaai/reka-flash-3:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_27B_IT_FREE"] = "google/gemma-3-27b-it:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_27B_IT"] = "google/gemma-3-27b-it"; + E_OPENROUTER_MODEL["MODEL_THEDRUMMER_ANUBIS_PRO_105B_V1"] = "thedrummer/anubis-pro-105b-v1"; + E_OPENROUTER_MODEL["MODEL_LATITUDEGAMES_WAYFARER_LARGE_70B_LLAMA_3_3"] = "latitudegames/wayfarer-large-70b-llama-3.3"; + E_OPENROUTER_MODEL["MODEL_THEDRUMMER_SKYFALL_36B_V2"] = "thedrummer/skyfall-36b-v2"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_4_MULTIMODAL_INSTRUCT"] = "microsoft/phi-4-multimodal-instruct"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_SONAR_REASONING_PRO"] = "perplexity/sonar-reasoning-pro"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_SONAR_PRO"] = "perplexity/sonar-pro"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_SONAR_DEEP_RESEARCH"] = "perplexity/sonar-deep-research"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_ZERO_FREE"] = "deepseek/deepseek-r1-zero:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWQ_32B_FREE"] = "qwen/qwq-32b:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWQ_32B"] = "qwen/qwq-32b"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_32B_INSTRUCT"] = "qwen/qwen2.5-32b-instruct"; + E_OPENROUTER_MODEL["MODEL_MOONSHOTAI_MOONLIGHT_16B_A3B_INSTRUCT_FREE"] = "moonshotai/moonlight-16b-a3b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE"] = "nousresearch/deephermes-3-llama-3-8b-preview:free"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_5_PREVIEW"] = "openai/gpt-4.5-preview"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_FLASH_LITE_001"] = "google/gemini-2.0-flash-lite-001"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_BETA"] = "anthropic/claude-3.7-sonnet:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_7_SONNET"] = "anthropic/claude-3.7-sonnet"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_THINKING"] = "anthropic/claude-3.7-sonnet:thinking"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_R1_1776"] = "perplexity/r1-1776"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SABA"] = "mistralai/mistral-saba"; + E_OPENROUTER_MODEL["MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE"] = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free"; + E_OPENROUTER_MODEL["MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE"] = "cognitivecomputations/dolphin3.0-mistral-24b:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_GUARD_3_8B"] = "meta-llama/llama-guard-3-8b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O3_MINI_HIGH"] = "openai/o3-mini-high"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_8B"] = "deepseek/deepseek-r1-distill-llama-8b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_FLASH_001"] = "google/gemini-2.0-flash-001"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_PRO_EXP_02_05_FREE"] = "google/gemini-2.0-pro-exp-02-05:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_VL_PLUS"] = "qwen/qwen-vl-plus"; + E_OPENROUTER_MODEL["MODEL_AION_LABS_AION_1_0"] = "aion-labs/aion-1.0"; + E_OPENROUTER_MODEL["MODEL_AION_LABS_AION_1_0_MINI"] = "aion-labs/aion-1.0-mini"; + E_OPENROUTER_MODEL["MODEL_AION_LABS_AION_RP_LLAMA_3_1_8B"] = "aion-labs/aion-rp-llama-3.1-8b"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_VL_MAX"] = "qwen/qwen-vl-max"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_TURBO"] = "qwen/qwen-turbo"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-72b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_72B_INSTRUCT"] = "qwen/qwen2.5-vl-72b-instruct"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_PLUS"] = "qwen/qwen-plus"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_MAX"] = "qwen/qwen-max"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O3_MINI"] = "openai/o3-mini"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_1_5B"] = "deepseek/deepseek-r1-distill-qwen-1.5b"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE"] = "mistralai/mistral-small-24b-instruct-2501:free"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501"] = "mistralai/mistral-small-24b-instruct-2501"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B_FREE"] = "deepseek/deepseek-r1-distill-qwen-32b:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B"] = "deepseek/deepseek-r1-distill-qwen-32b"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE"] = "deepseek/deepseek-r1-distill-qwen-14b:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B"] = "deepseek/deepseek-r1-distill-qwen-14b"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_SONAR_REASONING"] = "perplexity/sonar-reasoning"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_SONAR"] = "perplexity/sonar"; + E_OPENROUTER_MODEL["MODEL_LIQUID_LFM_7B"] = "liquid/lfm-7b"; + E_OPENROUTER_MODEL["MODEL_LIQUID_LFM_3B"] = "liquid/lfm-3b"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B_FREE"] = "deepseek/deepseek-r1-distill-llama-70b:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B"] = "deepseek/deepseek-r1-distill-llama-70b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_FREE"] = "google/gemini-2.0-flash-thinking-exp:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_FREE"] = "deepseek/deepseek-r1:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1"] = "deepseek/deepseek-r1"; + E_OPENROUTER_MODEL["MODEL_SOPHOSYMPATHEIA_ROGUE_ROSE_103B_V0_2_FREE"] = "sophosympatheia/rogue-rose-103b-v0.2:free"; + E_OPENROUTER_MODEL["MODEL_MINIMAX_MINIMAX_01"] = "minimax/minimax-01"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_CODESTRAL_2501"] = "mistralai/codestral-2501"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_4"] = "microsoft/phi-4"; + E_OPENROUTER_MODEL["MODEL_SAO10K_L3_1_70B_HANAMI_X1"] = "sao10k/l3.1-70b-hanami-x1"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_CHAT_FREE"] = "deepseek/deepseek-chat:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_CHAT"] = "deepseek/deepseek-chat"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_1219_FREE"] = "google/gemini-2.0-flash-thinking-exp-1219:free"; + E_OPENROUTER_MODEL["MODEL_SAO10K_L3_3_EURYALE_70B"] = "sao10k/l3.3-euryale-70b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1"] = "openai/o1"; + E_OPENROUTER_MODEL["MODEL_EVA_UNIT_01_EVA_LLAMA_3_33_70B"] = "eva-unit-01/eva-llama-3.33-70b"; + E_OPENROUTER_MODEL["MODEL_X_AI_GROK_2_VISION_1212"] = "x-ai/grok-2-vision-1212"; + E_OPENROUTER_MODEL["MODEL_X_AI_GROK_2_1212"] = "x-ai/grok-2-1212"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R7B_12_2024"] = "cohere/command-r7b-12-2024"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE"] = "google/gemini-2.0-flash-exp:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE"] = "meta-llama/llama-3.3-70b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_3_70B_INSTRUCT"] = "meta-llama/llama-3.3-70b-instruct"; + E_OPENROUTER_MODEL["MODEL_AMAZON_NOVA_LITE_V1"] = "amazon/nova-lite-v1"; + E_OPENROUTER_MODEL["MODEL_AMAZON_NOVA_MICRO_V1"] = "amazon/nova-micro-v1"; + E_OPENROUTER_MODEL["MODEL_AMAZON_NOVA_PRO_V1"] = "amazon/nova-pro-v1"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWQ_32B_PREVIEW_FREE"] = "qwen/qwq-32b-preview:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWQ_32B_PREVIEW"] = "qwen/qwq-32b-preview"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_LEARNLM_1_5_PRO_EXPERIMENTAL_FREE"] = "google/learnlm-1.5-pro-experimental:free"; + E_OPENROUTER_MODEL["MODEL_EVA_UNIT_01_EVA_QWEN_2_5_72B"] = "eva-unit-01/eva-qwen-2.5-72b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_2024_11_20"] = "openai/gpt-4o-2024-11-20"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE_2411"] = "mistralai/mistral-large-2411"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE_2407"] = "mistralai/mistral-large-2407"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_PIXTRAL_LARGE_2411"] = "mistralai/pixtral-large-2411"; + E_OPENROUTER_MODEL["MODEL_X_AI_GROK_VISION_BETA"] = "x-ai/grok-vision-beta"; + E_OPENROUTER_MODEL["MODEL_INFERMATIC_MN_INFEROR_12B"] = "infermatic/mn-inferor-12b"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE"] = "qwen/qwen-2.5-coder-32b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT"] = "qwen/qwen-2.5-coder-32b-instruct"; + E_OPENROUTER_MODEL["MODEL_RAIFLE_SORCERERLM_8X22B"] = "raifle/sorcererlm-8x22b"; + E_OPENROUTER_MODEL["MODEL_EVA_UNIT_01_EVA_QWEN_2_5_32B"] = "eva-unit-01/eva-qwen-2.5-32b"; + E_OPENROUTER_MODEL["MODEL_THEDRUMMER_UNSLOPNEMO_12B"] = "thedrummer/unslopnemo-12b"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_BETA"] = "anthropic/claude-3.5-haiku:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU"] = "anthropic/claude-3.5-haiku"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022_BETA"] = "anthropic/claude-3.5-haiku-20241022:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022"] = "anthropic/claude-3.5-haiku-20241022"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_70B"] = "neversleep/llama-3.1-lumimaid-70b"; + E_OPENROUTER_MODEL["MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B"] = "anthracite-org/magnum-v4-72b"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_BETA"] = "anthropic/claude-3.5-sonnet:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_SONNET"] = "anthropic/claude-3.5-sonnet"; + E_OPENROUTER_MODEL["MODEL_X_AI_GROK_BETA"] = "x-ai/grok-beta"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MINISTRAL_8B"] = "mistralai/ministral-8b"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MINISTRAL_3B"] = "mistralai/ministral-3b"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_7B_INSTRUCT"] = "qwen/qwen-2.5-7b-instruct"; + E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT_FREE"] = "nvidia/llama-3.1-nemotron-70b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT"] = "nvidia/llama-3.1-nemotron-70b-instruct"; + E_OPENROUTER_MODEL["MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY"] = "inflection/inflection-3-productivity"; + E_OPENROUTER_MODEL["MODEL_INFLECTION_INFLECTION_3_PI"] = "inflection/inflection-3-pi"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_FLASH_1_5_8B"] = "google/gemini-flash-1.5-8b"; + E_OPENROUTER_MODEL["MODEL_THEDRUMMER_ROCINANTE_12B"] = "thedrummer/rocinante-12b"; + E_OPENROUTER_MODEL["MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B"] = "anthracite-org/magnum-v2-72b"; + E_OPENROUTER_MODEL["MODEL_LIQUID_LFM_40B"] = "liquid/lfm-40b"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE"] = "meta-llama/llama-3.2-3b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT"] = "meta-llama/llama-3.2-3b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE"] = "meta-llama/llama-3.2-1b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT"] = "meta-llama/llama-3.2-1b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT"] = "meta-llama/llama-3.2-90b-vision-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE"] = "meta-llama/llama-3.2-11b-vision-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT"] = "meta-llama/llama-3.2-11b-vision-instruct"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE"] = "qwen/qwen-2.5-72b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_72B_INSTRUCT"] = "qwen/qwen-2.5-72b-instruct"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_VL_72B_INSTRUCT"] = "qwen/qwen-2.5-vl-72b-instruct"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B"] = "neversleep/llama-3.1-lumimaid-8b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_PREVIEW"] = "openai/o1-preview"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_PREVIEW_2024_09_12"] = "openai/o1-preview-2024-09-12"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_MINI"] = "openai/o1-mini"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_MINI_2024_09_12"] = "openai/o1-mini-2024-09-12"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_PIXTRAL_12B"] = "mistralai/pixtral-12b"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS_08_2024"] = "cohere/command-r-plus-08-2024"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_08_2024"] = "cohere/command-r-08-2024"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT_FREE"] = "qwen/qwen-2.5-vl-7b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT"] = "qwen/qwen-2.5-vl-7b-instruct"; + E_OPENROUTER_MODEL["MODEL_SAO10K_L3_1_EURYALE_70B"] = "sao10k/l3.1-euryale-70b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_FLASH_1_5_8B_EXP"] = "google/gemini-flash-1.5-8b-exp"; + E_OPENROUTER_MODEL["MODEL_AI21_JAMBA_1_5_MINI"] = "ai21/jamba-1-5-mini"; + E_OPENROUTER_MODEL["MODEL_AI21_JAMBA_1_5_LARGE"] = "ai21/jamba-1-5-large"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT"] = "microsoft/phi-3.5-mini-128k-instruct"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B"] = "nousresearch/hermes-3-llama-3.1-70b"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B"] = "nousresearch/hermes-3-llama-3.1-405b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_CHATGPT_4O_LATEST"] = "openai/chatgpt-4o-latest"; + E_OPENROUTER_MODEL["MODEL_SAO10K_L3_LUNARIS_8B"] = "sao10k/l3-lunaris-8b"; + E_OPENROUTER_MODEL["MODEL_AETHERWIING_MN_STARCANNON_12B"] = "aetherwiing/mn-starcannon-12b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_2024_08_06"] = "openai/gpt-4o-2024-08-06"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_405B"] = "meta-llama/llama-3.1-405b"; + E_OPENROUTER_MODEL["MODEL_NOTHINGIISREAL_MN_CELESTE_12B"] = "nothingiisreal/mn-celeste-12b"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_LLAMA_3_1_SONAR_SMALL_128K_ONLINE"] = "perplexity/llama-3.1-sonar-small-128k-online"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE"] = "perplexity/llama-3.1-sonar-large-128k-online"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE"] = "meta-llama/llama-3.1-8b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT"] = "meta-llama/llama-3.1-8b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT"] = "meta-llama/llama-3.1-405b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT"] = "meta-llama/llama-3.1-70b-instruct"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_CODESTRAL_MAMBA"] = "mistralai/codestral-mamba"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_NEMO_FREE"] = "mistralai/mistral-nemo:free"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_NEMO"] = "mistralai/mistral-nemo"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_MINI"] = "openai/gpt-4o-mini"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_MINI_2024_07_18"] = "openai/gpt-4o-mini-2024-07-18"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_7B_INSTRUCT_FREE"] = "qwen/qwen-2-7b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_7B_INSTRUCT"] = "qwen/qwen-2-7b-instruct"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_2_27B_IT"] = "google/gemma-2-27b-it"; + E_OPENROUTER_MODEL["MODEL_ALPINDALE_MAGNUM_72B"] = "alpindale/magnum-72b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_2_9B_IT_FREE"] = "google/gemma-2-9b-it:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_2_9B_IT"] = "google/gemma-2-9b-it"; + E_OPENROUTER_MODEL["MODEL_01_AI_YI_LARGE"] = "01-ai/yi-large"; + E_OPENROUTER_MODEL["MODEL_AI21_JAMBA_INSTRUCT"] = "ai21/jamba-instruct"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620_BETA"] = "anthropic/claude-3.5-sonnet-20240620:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620"] = "anthropic/claude-3.5-sonnet-20240620"; + E_OPENROUTER_MODEL["MODEL_SAO10K_L3_EURYALE_70B"] = "sao10k/l3-euryale-70b"; + E_OPENROUTER_MODEL["MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B"] = "cognitivecomputations/dolphin-mixtral-8x22b"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_72B_INSTRUCT"] = "qwen/qwen-2-72b-instruct"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT"] = "mistralai/mistral-7b-instruct"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3"] = "mistralai/mistral-7b-instruct-v0.3"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B"] = "nousresearch/hermes-2-pro-llama-3-8b"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT_FREE"] = "microsoft/phi-3-mini-128k-instruct:free"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT"] = "microsoft/phi-3-mini-128k-instruct"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT_FREE"] = "microsoft/phi-3-medium-128k-instruct:free"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT"] = "microsoft/phi-3-medium-128k-instruct"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B"] = "neversleep/llama-3-lumimaid-70b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_FLASH_1_5"] = "google/gemini-flash-1.5"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O"] = "openai/gpt-4o"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_EXTENDED"] = "openai/gpt-4o:extended"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_2024_05_13"] = "openai/gpt-4o-2024-05-13"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_GUARD_2_8B"] = "meta-llama/llama-guard-2-8b"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_8B_EXTENDED"] = "neversleep/llama-3-lumimaid-8b:extended"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_8B"] = "neversleep/llama-3-lumimaid-8b"; + E_OPENROUTER_MODEL["MODEL_SAO10K_FIMBULVETR_11B_V2"] = "sao10k/fimbulvetr-11b-v2"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT_FREE"] = "meta-llama/llama-3-8b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT"] = "meta-llama/llama-3-8b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT"] = "meta-llama/llama-3-70b-instruct"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT"] = "mistralai/mixtral-8x22b-instruct"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_WIZARDLM_2_8X22B"] = "microsoft/wizardlm-2-8x22b"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_WIZARDLM_2_7B"] = "microsoft/wizardlm-2-7b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_PRO_1_5"] = "google/gemini-pro-1.5"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_TURBO"] = "openai/gpt-4-turbo"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS"] = "cohere/command-r-plus"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS_04_2024"] = "cohere/command-r-plus-04-2024"; + E_OPENROUTER_MODEL["MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B"] = "sophosympatheia/midnight-rose-70b"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND"] = "cohere/command"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R"] = "cohere/command-r"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_HAIKU_BETA"] = "anthropic/claude-3-haiku:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_HAIKU"] = "anthropic/claude-3-haiku"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_OPUS_BETA"] = "anthropic/claude-3-opus:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_OPUS"] = "anthropic/claude-3-opus"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_SONNET_BETA"] = "anthropic/claude-3-sonnet:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_SONNET"] = "anthropic/claude-3-sonnet"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_03_2024"] = "cohere/command-r-03-2024"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE"] = "mistralai/mistral-large"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_7B_IT"] = "google/gemma-7b-it"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_0613"] = "openai/gpt-3.5-turbo-0613"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_TURBO_PREVIEW"] = "openai/gpt-4-turbo-preview"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_NOUS_HERMES_2_MIXTRAL_8X7B_DPO"] = "nousresearch/nous-hermes-2-mixtral-8x7b-dpo"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_MEDIUM"] = "mistralai/mistral-medium"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL"] = "mistralai/mistral-small"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_TINY"] = "mistralai/mistral-tiny"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_2"] = "mistralai/mistral-7b-instruct-v0.2"; + E_OPENROUTER_MODEL["MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X7B"] = "cognitivecomputations/dolphin-mixtral-8x7b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_PRO_VISION"] = "google/gemini-pro-vision"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_PRO"] = "google/gemini-pro"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MIXTRAL_8X7B"] = "mistralai/mixtral-8x7b"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT"] = "mistralai/mixtral-8x7b-instruct"; + E_OPENROUTER_MODEL["MODEL_OPENCHAT_OPENCHAT_7B_FREE"] = "openchat/openchat-7b:free"; + E_OPENROUTER_MODEL["MODEL_OPENCHAT_OPENCHAT_7B"] = "openchat/openchat-7b"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_NOROMAID_20B"] = "neversleep/noromaid-20b"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2_1_BETA"] = "anthropic/claude-2.1:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2_1"] = "anthropic/claude-2.1"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2_BETA"] = "anthropic/claude-2:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2"] = "anthropic/claude-2"; + E_OPENROUTER_MODEL["MODEL_TEKNIUM_OPENHERMES_2_5_MISTRAL_7B"] = "teknium/openhermes-2.5-mistral-7b"; + E_OPENROUTER_MODEL["MODEL_UNDI95_TOPPY_M_7B_FREE"] = "undi95/toppy-m-7b:free"; + E_OPENROUTER_MODEL["MODEL_UNDI95_TOPPY_M_7B"] = "undi95/toppy-m-7b"; + E_OPENROUTER_MODEL["MODEL_ALPINDALE_GOLIATH_120B"] = "alpindale/goliath-120b"; + E_OPENROUTER_MODEL["MODEL_OPENROUTER_AUTO"] = "openrouter/auto"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_1106"] = "openai/gpt-3.5-turbo-1106"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_1106_PREVIEW"] = "openai/gpt-4-1106-preview"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_PALM_2_CHAT_BISON_32K"] = "google/palm-2-chat-bison-32k"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_PALM_2_CODECHAT_BISON_32K"] = "google/palm-2-codechat-bison-32k"; + E_OPENROUTER_MODEL["MODEL_JONDURBIN_AIROBOROS_L2_70B"] = "jondurbin/airoboros-l2-70b"; + E_OPENROUTER_MODEL["MODEL_XWIN_LM_XWIN_LM_70B"] = "xwin-lm/xwin-lm-70b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT"] = "openai/gpt-3.5-turbo-instruct"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1"] = "mistralai/mistral-7b-instruct-v0.1"; + E_OPENROUTER_MODEL["MODEL_PYGMALIONAI_MYTHALION_13B"] = "pygmalionai/mythalion-13b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_16K"] = "openai/gpt-3.5-turbo-16k"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_32K"] = "openai/gpt-4-32k"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_32K_0314"] = "openai/gpt-4-32k-0314"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_NOUS_HERMES_LLAMA2_13B"] = "nousresearch/nous-hermes-llama2-13b"; + E_OPENROUTER_MODEL["MODEL_MANCER_WEAVER"] = "mancer/weaver"; + E_OPENROUTER_MODEL["MODEL_HUGGINGFACEH4_ZEPHYR_7B_BETA_FREE"] = "huggingfaceh4/zephyr-7b-beta:free"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2_0_BETA"] = "anthropic/claude-2.0:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2_0"] = "anthropic/claude-2.0"; + E_OPENROUTER_MODEL["MODEL_UNDI95_REMM_SLERP_L2_13B"] = "undi95/remm-slerp-l2-13b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_PALM_2_CHAT_BISON"] = "google/palm-2-chat-bison"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_PALM_2_CODECHAT_BISON"] = "google/palm-2-codechat-bison"; + E_OPENROUTER_MODEL["MODEL_GRYPHE_MYTHOMAX_L2_13B_FREE"] = "gryphe/mythomax-l2-13b:free"; + E_OPENROUTER_MODEL["MODEL_GRYPHE_MYTHOMAX_L2_13B"] = "gryphe/mythomax-l2-13b"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_2_13B_CHAT"] = "meta-llama/llama-2-13b-chat"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_2_70B_CHAT"] = "meta-llama/llama-2-70b-chat"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO"] = "openai/gpt-3.5-turbo"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_0125"] = "openai/gpt-3.5-turbo-0125"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4"] = "openai/gpt-4"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_0314"] = "openai/gpt-4-0314"; +})(E_OPENROUTER_MODEL || (E_OPENROUTER_MODEL = {})); +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvbW9kZWxzL2NhY2hlL29wZW5yb3V0ZXItbW9kZWxzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE1BQU0sQ0FBTixJQUFZLGtCQW1TWDtBQW5TRCxXQUFZLGtCQUFrQjtJQUM1Qix5RUFBbUQsQ0FBQTtJQUNuRCw2RkFBdUUsQ0FBQTtJQUN2RSx5R0FBbUYsQ0FBQTtJQUNuRiwyR0FBcUYsQ0FBQTtJQUNyRiwrRUFBeUQsQ0FBQTtJQUN6RCx1R0FBaUYsQ0FBQTtJQUNqRixpR0FBMkUsQ0FBQTtJQUMzRSx5R0FBbUYsQ0FBQTtJQUNuRixtR0FBNkUsQ0FBQTtJQUM3RSx1R0FBaUYsQ0FBQTtJQUNqRiw2RkFBdUUsQ0FBQTtJQUN2RSx1RkFBaUUsQ0FBQTtJQUNqRSwyREFBcUMsQ0FBQTtJQUNyQywySEFBcUcsQ0FBQTtJQUNyRyxpSEFBMkYsQ0FBQTtJQUMzRix5RkFBbUUsQ0FBQTtJQUNuRSwyRkFBcUUsQ0FBQTtJQUNyRSw2RkFBdUUsQ0FBQTtJQUN2RSxpR0FBMkUsQ0FBQTtJQUMzRSxtRkFBNkQsQ0FBQTtJQUM3RCxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCx5RUFBbUQsQ0FBQTtJQUNuRCx1RUFBaUQsQ0FBQTtJQUNqRCxxRkFBK0QsQ0FBQTtJQUMvRCwyRUFBcUQsQ0FBQTtJQUNyRCxpRUFBMkMsQ0FBQTtJQUMzQyxtR0FBNkUsQ0FBQTtJQUM3RSx5RkFBbUUsQ0FBQTtJQUNuRSxtSUFBNkcsQ0FBQTtJQUM3RyxpRkFBMkQsQ0FBQTtJQUMzRCxxRkFBK0QsQ0FBQTtJQUMvRCwyRUFBcUQsQ0FBQTtJQUNyRCwyRkFBcUUsQ0FBQTtJQUNyRSxxSEFBK0YsQ0FBQTtJQUMvRixtRkFBNkQsQ0FBQTtJQUM3RCx1R0FBaUYsQ0FBQTtJQUNqRiw2RkFBdUUsQ0FBQTtJQUN2RSx5RUFBbUQsQ0FBQTtJQUNuRCw2RkFBdUUsQ0FBQTtJQUN2RSw2RkFBdUUsQ0FBQTtJQUN2RSxtRUFBNkMsQ0FBQTtJQUM3Qyx5REFBbUMsQ0FBQTtJQUNuQyxtRkFBNkQsQ0FBQTtJQUM3RCxxSEFBK0YsQ0FBQTtJQUMvRixtSUFBNkcsQ0FBQTtJQUM3Ryw2RUFBdUQsQ0FBQTtJQUN2RCxpR0FBMkUsQ0FBQTtJQUMzRSxpR0FBMkUsQ0FBQTtJQUMzRSx1RkFBaUUsQ0FBQTtJQUNqRSx5R0FBbUYsQ0FBQTtJQUNuRixxRUFBK0MsQ0FBQTtJQUMvQyw2RUFBdUQsQ0FBQTtJQUN2RCx5SUFBbUgsQ0FBQTtJQUNuSCxtSUFBNkcsQ0FBQTtJQUM3Ryx1RkFBaUUsQ0FBQTtJQUNqRSx1RUFBaUQsQ0FBQTtJQUNqRCwyR0FBcUYsQ0FBQTtJQUNyRix1RkFBaUUsQ0FBQTtJQUNqRSx5R0FBbUYsQ0FBQTtJQUNuRixtRUFBNkMsQ0FBQTtJQUM3QyxxRUFBK0MsQ0FBQTtJQUMvQywrRUFBeUQsQ0FBQTtJQUN6RCw2RkFBdUUsQ0FBQTtJQUN2RSxpRUFBMkMsQ0FBQTtJQUMzQywrREFBeUMsQ0FBQTtJQUN6QyxtR0FBNkUsQ0FBQTtJQUM3RSx5RkFBbUUsQ0FBQTtJQUNuRSw2REFBdUMsQ0FBQTtJQUN2QywyREFBcUMsQ0FBQTtJQUNyQyw2REFBdUMsQ0FBQTtJQUN2Qyw2R0FBdUYsQ0FBQTtJQUN2Riw2SEFBdUcsQ0FBQTtJQUN2RyxtSEFBNkYsQ0FBQTtJQUM3RixxSEFBK0YsQ0FBQTtJQUMvRiwyR0FBcUYsQ0FBQTtJQUNyRixxSEFBK0YsQ0FBQTtJQUMvRiwyR0FBcUYsQ0FBQTtJQUNyRixxRkFBK0QsQ0FBQTtJQUMvRCxpRUFBMkMsQ0FBQTtJQUMzQywyREFBcUMsQ0FBQTtJQUNyQywyREFBcUMsQ0FBQTtJQUNyQyx1SEFBaUcsQ0FBQTtJQUNqRyw2R0FBdUYsQ0FBQTtJQUN2RixtSEFBNkYsQ0FBQTtJQUM3RixtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCxtSEFBNkYsQ0FBQTtJQUM3RixxRUFBK0MsQ0FBQTtJQUMvQyxpRkFBMkQsQ0FBQTtJQUMzRCwrREFBeUMsQ0FBQTtJQUN6QyxtRkFBNkQsQ0FBQTtJQUM3RCx1RkFBaUUsQ0FBQTtJQUNqRSw2RUFBdUQsQ0FBQTtJQUN2RCw2SEFBdUcsQ0FBQTtJQUN2RywrRUFBeUQsQ0FBQTtJQUN6RCxtREFBNkIsQ0FBQTtJQUM3Qiw2RkFBdUUsQ0FBQTtJQUN2RSwrRUFBeUQsQ0FBQTtJQUN6RCxpRUFBMkMsQ0FBQTtJQUMzQyxxRkFBK0QsQ0FBQTtJQUMvRCxpR0FBMkUsQ0FBQTtJQUMzRSw2R0FBdUYsQ0FBQTtJQUN2RixtR0FBNkUsQ0FBQTtJQUM3RSx1RUFBaUQsQ0FBQTtJQUNqRCx5RUFBbUQsQ0FBQTtJQUNuRCxxRUFBK0MsQ0FBQTtJQUMvQyxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCxpSEFBMkYsQ0FBQTtJQUMzRix5RkFBbUUsQ0FBQTtJQUNuRSxpRkFBMkQsQ0FBQTtJQUMzRCx5RkFBbUUsQ0FBQTtJQUNuRSx5RkFBbUUsQ0FBQTtJQUNuRSx5RkFBbUUsQ0FBQTtJQUNuRSwyRUFBcUQsQ0FBQTtJQUNyRCxtRkFBNkQsQ0FBQTtJQUM3RCwyR0FBcUYsQ0FBQTtJQUNyRixpR0FBMkUsQ0FBQTtJQUMzRSwrRUFBeUQsQ0FBQTtJQUN6RCx5RkFBbUUsQ0FBQTtJQUNuRSxtRkFBNkQsQ0FBQTtJQUM3RCwrRkFBeUUsQ0FBQTtJQUN6RSxxRkFBK0QsQ0FBQTtJQUMvRCxpSEFBMkYsQ0FBQTtJQUMzRix1R0FBaUYsQ0FBQTtJQUNqRixtR0FBNkUsQ0FBQTtJQUM3RSx5RkFBbUUsQ0FBQTtJQUNuRSxpR0FBMkUsQ0FBQTtJQUMzRSx1RkFBaUUsQ0FBQTtJQUNqRSw2REFBdUMsQ0FBQTtJQUN2Qyw2RUFBdUQsQ0FBQTtJQUN2RCw2RUFBdUQsQ0FBQTtJQUN2RCxtRkFBNkQsQ0FBQTtJQUM3RCx1SEFBaUcsQ0FBQTtJQUNqRyw2R0FBdUYsQ0FBQTtJQUN2Rix5R0FBbUYsQ0FBQTtJQUNuRixxRkFBK0QsQ0FBQTtJQUMvRCxxRkFBK0QsQ0FBQTtJQUMvRCxpRkFBMkQsQ0FBQTtJQUMzRCx5RkFBbUUsQ0FBQTtJQUNuRSw2REFBdUMsQ0FBQTtJQUN2QywyR0FBcUYsQ0FBQTtJQUNyRixpR0FBMkUsQ0FBQTtJQUMzRSwyR0FBcUYsQ0FBQTtJQUNyRixpR0FBMkUsQ0FBQTtJQUMzRSxpSEFBMkYsQ0FBQTtJQUMzRiwySEFBcUcsQ0FBQTtJQUNyRyxpSEFBMkYsQ0FBQTtJQUMzRiwrRkFBeUUsQ0FBQTtJQUN6RSxxRkFBK0QsQ0FBQTtJQUMvRCwyRkFBcUUsQ0FBQTtJQUNyRSxpR0FBMkUsQ0FBQTtJQUMzRSxtRUFBNkMsQ0FBQTtJQUM3Qyx5RkFBbUUsQ0FBQTtJQUNuRSw2REFBdUMsQ0FBQTtJQUN2QyxtRkFBNkQsQ0FBQTtJQUM3RCwyRUFBcUQsQ0FBQTtJQUNyRCwyRkFBcUUsQ0FBQTtJQUNyRSxpRkFBMkQsQ0FBQTtJQUMzRCxtR0FBNkUsQ0FBQTtJQUM3RSx5RkFBbUUsQ0FBQTtJQUNuRSwrRUFBeUQsQ0FBQTtJQUN6RCw2RkFBdUUsQ0FBQTtJQUN2RSx1RUFBaUQsQ0FBQTtJQUNqRCx5RUFBbUQsQ0FBQTtJQUNuRCx5R0FBbUYsQ0FBQTtJQUNuRix1R0FBaUYsQ0FBQTtJQUNqRix5R0FBbUYsQ0FBQTtJQUNuRixpRkFBMkQsQ0FBQTtJQUMzRCx5RUFBbUQsQ0FBQTtJQUNuRCwyRkFBcUUsQ0FBQTtJQUNyRSxpRkFBMkQsQ0FBQTtJQUMzRCxtRkFBNkQsQ0FBQTtJQUM3RCwyRkFBcUUsQ0FBQTtJQUNyRSx5SEFBbUcsQ0FBQTtJQUNuRyx5SEFBbUcsQ0FBQTtJQUNuRywyR0FBcUYsQ0FBQTtJQUNyRixpR0FBMkUsQ0FBQTtJQUMzRSxxR0FBK0UsQ0FBQTtJQUMvRSxtR0FBNkUsQ0FBQTtJQUM3RSxtRkFBNkQsQ0FBQTtJQUM3RCx1RkFBaUUsQ0FBQTtJQUNqRSw2RUFBdUQsQ0FBQTtJQUN2RCxxRUFBK0MsQ0FBQTtJQUMvQywyRkFBcUUsQ0FBQTtJQUNyRSx5RkFBbUUsQ0FBQTtJQUNuRSwrRUFBeUQsQ0FBQTtJQUN6RCwyRUFBcUQsQ0FBQTtJQUNyRCx5RUFBbUQsQ0FBQTtJQUNuRCxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCw2REFBdUMsQ0FBQTtJQUN2Qyx1RUFBaUQsQ0FBQTtJQUNqRCxtSEFBNkYsQ0FBQTtJQUM3Rix5R0FBbUYsQ0FBQTtJQUNuRiwyRUFBcUQsQ0FBQTtJQUNyRCx1SEFBaUcsQ0FBQTtJQUNqRyxpRkFBMkQsQ0FBQTtJQUMzRCxxR0FBK0UsQ0FBQTtJQUMvRSwyRkFBcUUsQ0FBQTtJQUNyRSxxR0FBK0UsQ0FBQTtJQUMvRSx5R0FBbUYsQ0FBQTtJQUNuRiwrR0FBeUYsQ0FBQTtJQUN6RixxR0FBK0UsQ0FBQTtJQUMvRSxtSEFBNkYsQ0FBQTtJQUM3Rix5R0FBbUYsQ0FBQTtJQUNuRiwrRkFBeUUsQ0FBQTtJQUN6RSwrRUFBeUQsQ0FBQTtJQUN6RCwyREFBcUMsQ0FBQTtJQUNyQyw2RUFBdUQsQ0FBQTtJQUN2RCxpRkFBMkQsQ0FBQTtJQUMzRCx1RkFBaUUsQ0FBQTtJQUNqRSwrR0FBeUYsQ0FBQTtJQUN6Riw2RkFBdUUsQ0FBQTtJQUN2RSxpRkFBMkQsQ0FBQTtJQUMzRCx1R0FBaUYsQ0FBQTtJQUNqRiw2RkFBdUUsQ0FBQTtJQUN2RSwrRkFBeUUsQ0FBQTtJQUN6RSxpR0FBMkUsQ0FBQTtJQUMzRSxxRkFBK0QsQ0FBQTtJQUMvRCwrRUFBeUQsQ0FBQTtJQUN6RCwyRUFBcUQsQ0FBQTtJQUNyRCxxRUFBK0MsQ0FBQTtJQUMvQywyRUFBcUQsQ0FBQTtJQUNyRCwyRkFBcUUsQ0FBQTtJQUNyRSxtR0FBNkUsQ0FBQTtJQUM3RSw2REFBdUMsQ0FBQTtJQUN2QyxpRUFBMkMsQ0FBQTtJQUMzQywyRkFBcUUsQ0FBQTtJQUNyRSxpRkFBMkQsQ0FBQTtJQUMzRCx5RkFBbUUsQ0FBQTtJQUNuRSwrRUFBeUQsQ0FBQTtJQUN6RCw2RkFBdUUsQ0FBQTtJQUN2RSxtRkFBNkQsQ0FBQTtJQUM3RCxpRkFBMkQsQ0FBQTtJQUMzRCwrRUFBeUQsQ0FBQTtJQUN6RCxxRUFBK0MsQ0FBQTtJQUMvQyxtRkFBNkQsQ0FBQTtJQUM3RCxxRkFBK0QsQ0FBQTtJQUMvRCx1SEFBaUcsQ0FBQTtJQUNqRyxpRkFBMkQsQ0FBQTtJQUMzRCwrRUFBeUQsQ0FBQTtJQUN6RCw2RUFBdUQsQ0FBQTtJQUN2RCxxR0FBK0UsQ0FBQTtJQUMvRSxxSEFBK0YsQ0FBQTtJQUMvRixpRkFBMkQsQ0FBQTtJQUMzRCxtRUFBNkMsQ0FBQTtJQUM3Qyw2RUFBdUQsQ0FBQTtJQUN2RCwrRkFBeUUsQ0FBQTtJQUN6RSxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCwrRUFBeUQsQ0FBQTtJQUN6RCxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCwrRUFBeUQsQ0FBQTtJQUN6RCxxRUFBK0MsQ0FBQTtJQUMvQyxtR0FBNkUsQ0FBQTtJQUM3RSw2RUFBdUQsQ0FBQTtJQUN2RCxtRUFBNkMsQ0FBQTtJQUM3Qyw2RUFBdUQsQ0FBQTtJQUN2RCwrREFBeUMsQ0FBQTtJQUN6QyxtRkFBNkQsQ0FBQTtJQUM3RCxtRkFBNkQsQ0FBQTtJQUM3RCx5RkFBbUUsQ0FBQTtJQUNuRSxpR0FBMkUsQ0FBQTtJQUMzRSxxRkFBK0QsQ0FBQTtJQUMvRCx1RUFBaUQsQ0FBQTtJQUNqRCwyRkFBcUUsQ0FBQTtJQUNyRSxxR0FBK0UsQ0FBQTtJQUMvRSxtRkFBNkQsQ0FBQTtJQUM3RCxpRkFBMkQsQ0FBQTtJQUMzRCxpRUFBMkMsQ0FBQTtJQUMzQywyRUFBcUQsQ0FBQTtJQUNyRCx1R0FBaUYsQ0FBQTtJQUNqRiwyREFBcUMsQ0FBQTtJQUNyQyxtR0FBNkUsQ0FBQTtJQUM3RSxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCxpRkFBMkQsQ0FBQTtJQUMzRCxpRkFBMkQsQ0FBQTtJQUMzRCx5RkFBbUUsQ0FBQTtJQUNuRSx1RkFBaUUsQ0FBQTtJQUNqRSw2RUFBdUQsQ0FBQTtJQUN2RCx1RkFBaUUsQ0FBQTtJQUNqRSx1RkFBaUUsQ0FBQTtJQUNqRSx5RUFBbUQsQ0FBQTtJQUNuRCxtRkFBNkQsQ0FBQTtJQUM3RCx5REFBbUMsQ0FBQTtJQUNuQyxtRUFBNkMsQ0FBQTtBQUMvQyxDQUFDLEVBblNXLGtCQUFrQixLQUFsQixrQkFBa0IsUUFtUzdCIn0= \ No newline at end of file diff --git a/packages/kbot/dist-in/models/index.js b/packages/kbot/dist-in/models/index.js index 03579cd3..b1699a32 100644 --- a/packages/kbot/dist-in/models/index.js +++ b/packages/kbot/dist-in/models/index.js @@ -89,4 +89,4 @@ export const all = () => { } return models; }; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvbW9kZWxzL2luZGV4LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxNQUFNLE9BQU8sQ0FBQTtBQUN6QixPQUFPLEtBQUssSUFBSSxNQUFNLFdBQVcsQ0FBQTtBQUNqQyxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksTUFBTSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFFcEQsT0FBTyxFQUFFLE1BQU0sRUFBRSxXQUFXLEVBQUUsTUFBTSxhQUFhLENBQUE7QUFDakQsT0FBTyxFQUFFLFVBQVUsSUFBSSxxQkFBcUIsRUFBMEMscUJBQXFCLEVBQUUsbUJBQW1CLElBQUksNkJBQTZCLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQTtBQUMxTCxPQUFPLEVBQUUsVUFBVSxJQUFJLGlCQUFpQixFQUFzQyxtQkFBbUIsSUFBSSx5QkFBeUIsRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQUVuSixPQUFPLEVBQUUsaUJBQWlCLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUN2RCxPQUFPLEVBQUUsY0FBYyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFFbEQsT0FBTyxFQUFFLE1BQU0sSUFBSSxZQUFZLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUMxRCxPQUFPLEVBQUUsTUFBTSxJQUFJLGdCQUFnQixFQUFFLE1BQU0sdUJBQXVCLENBQUE7QUFFbEUsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLEdBQUcsRUFBRTtJQUU5QixJQUFJLFNBQVMsR0FBRyxnQkFBZ0IsQ0FBQTtJQUNoQyxJQUFJLFVBQVUsR0FBRyxZQUFZLENBQUE7SUFDN0IsSUFBSSxlQUFlLEdBQUc7UUFDcEI7WUFDRSxJQUFJLEVBQUUsZUFBZTtZQUNyQixNQUFNLEVBQUUsZUFBZTtTQUN4QjtRQUNEO1lBQ0UsSUFBSSxFQUFFLG1CQUFtQjtZQUN6QixNQUFNLEVBQUUsbUJBQW1CO1NBQzVCO0tBQ0YsQ0FBQTtJQUVELE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxXQUFXLEVBQUUsRUFBRSxhQUFhLENBQUMsQ0FBQTtJQUNuRSxJQUFJLE1BQU0sQ0FBQyxnQkFBZ0IsQ0FBQyxFQUFFLENBQUM7UUFDN0IsVUFBVSxHQUFHLElBQUksQ0FBQyxnQkFBZ0IsRUFBRSxNQUFNLENBQVEsQ0FBQTtJQUNwRCxDQUFDO0lBRUQsTUFBTSxnQkFBZ0IsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLFdBQVcsRUFBRSxFQUFFLGlCQUFpQixDQUFDLENBQUE7SUFDdkUsSUFBSSxNQUFNLENBQUMsZ0JBQWdCLENBQUMsRUFBRSxDQUFDO1FBQzdCLFNBQVMsR0FBRyxJQUFJLENBQUMsZ0JBQWdCLEVBQUUsTUFBTSxDQUFRLENBQUE7SUFDbkQsQ0FBQztJQUNELE1BQU0sTUFBTSxHQUFhLEVBQUUsQ0FBQTtJQUMzQixNQUFNLENBQUMsSUFBSSxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLHlCQUF5QixDQUFDLENBQUMsQ0FBQTtJQUMxRCxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsNkJBQTZCLENBQUMsU0FBZ0IsQ0FBQyxDQUFDLENBQUE7SUFFL0QsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxxQkFBcUIsQ0FBQyxDQUFDLENBQUE7SUFDdEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLFVBQWlCLENBQUMsQ0FBQyxDQUFBO0lBQzVELE1BQU0sQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUE7SUFFdEIsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyx1QkFBdUIsQ0FBQyxDQUFDLENBQUE7SUFDeEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLGVBQXNCLENBQUMsQ0FBQyxDQUFBO0lBQ2pFLE1BQU0sQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUE7SUFDdEIsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUE7QUFJRCxNQUFNLENBQUMsTUFBTSxNQUFNLEdBQUcsR0FBRyxFQUFFO0lBQ3pCLE1BQU0sTUFBTSxHQUFhLEVBQUUsQ0FBQTtJQUMzQixNQUFNLGNBQWMsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLHFCQUFxQixDQUFDLENBQUE7SUFDMUQsSUFBSSxDQUFDLE1BQU0sQ0FBQyxjQUFjLENBQUMsRUFBRSxDQUFDO1FBQzVCLHFCQUFxQixFQUFFLENBQUE7SUFDekIsQ0FBQztJQUNELElBQUksTUFBTSxDQUFDLGNBQWMsQ0FBQyxFQUFFLENBQUM7UUFDM0IsTUFBTSxTQUFTLEdBQTJCLElBQUksQ0FBQyxjQUFjLEVBQUUsTUFBTSxDQUEyQixDQUFBO1FBQ2hHLE1BQU0sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMseUJBQXlCLENBQUMsQ0FBQyxDQUFBO1FBQzFELE1BQU0sQ0FBQyxJQUFJLENBQUMsR0FBRyw2QkFBNkIsQ0FBQyxTQUFTLENBQUMsTUFBTSxDQUFDLENBQUMsQ0FBQTtJQUNqRSxDQUFDO0lBQ0QsTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsRUFBRSxpQkFBaUIsQ0FBQyxDQUFBO0lBRTVELE1BQU0sVUFBVSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsaUJBQWlCLENBQUMsQ0FBQTtJQUNsRCxNQUFNLE1BQU0sR0FBRyxjQUFjLEVBQVMsQ0FBQTtJQUN0QyxJQUFJLENBQUMsTUFBTSxDQUFDLFVBQVUsQ0FBQyxJQUFJLE1BQU0sRUFBRSxNQUFNLEVBQUUsR0FBRyxFQUFFLENBQUM7UUFDL0MsaUJBQWlCLENBQUMsTUFBTSxDQUFDLE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQTtJQUN0QyxDQUFDO0lBRUQsSUFBSSxNQUFNLENBQUMsVUFBVSxDQUFDLEVBQUUsQ0FBQztRQUN2QixNQUFNLFNBQVMsR0FBdUIsSUFBSSxDQUFDLFVBQVUsRUFBRSxNQUFNLENBQXVCLENBQUE7UUFDcEYsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxxQkFBcUIsQ0FBQyxDQUFDLENBQUE7UUFDdEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQyxDQUFBO0lBQzdELENBQUM7SUFDRCxNQUFNLENBQUMsS0FBSyxDQUFDLHVCQUF1QixFQUFFLGlCQUFpQixDQUFDLENBQUE7SUFDeEQsTUFBTSxDQUFDLElBQUksQ0FBQyxTQUFTLENBQUMsQ0FBQTtJQUN0QixPQUFPLE1BQU0sQ0FBQTtBQUNmLENBQUMsQ0FBQTtBQUVELE1BQU0sQ0FBQyxNQUFNLEdBQUcsR0FBRyxHQUFHLEVBQUU7SUFDdEIsSUFBSSxNQUFNLEdBQVUsRUFBRSxDQUFBO0lBQ3RCLE1BQU0sY0FBYyxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMscUJBQXFCLENBQUMsQ0FBQTtJQUMxRCxJQUFJLENBQUMsTUFBTSxDQUFDLGNBQWMsQ0FBQyxFQUFFLENBQUM7UUFDNUIscUJBQXFCLEVBQUUsQ0FBQTtJQUN6QixDQUFDO0lBQ0QsSUFBSSxNQUFNLENBQUMsY0FBYyxDQUFDLEVBQUUsQ0FBQztRQUMzQixNQUFNLFNBQVMsR0FBMkIsSUFBSSxDQUFDLGNBQWMsRUFBRSxNQUFNLENBQTJCLENBQUE7UUFDaEcsTUFBTSxHQUFHLE1BQU0sQ0FBQyxNQUFNLENBQUMsU0FBUyxDQUFDLE1BQU0sQ0FBQyxDQUFBO0lBQzFDLENBQUM7SUFDRCxNQUFNLFVBQVUsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLGlCQUFpQixDQUFDLENBQUE7SUFDbEQsTUFBTSxNQUFNLEdBQUcsY0FBYyxFQUFTLENBQUE7SUFDdEMsSUFBSSxDQUFDLE1BQU0sQ0FBQyxVQUFVLENBQUMsSUFBSSxNQUFNLEVBQUUsTUFBTSxFQUFFLEdBQUcsRUFBRSxDQUFDO1FBQy9DLGlCQUFpQixDQUFDLE1BQU0sQ0FBQyxNQUFNLENBQUMsR0FBRyxDQUFDLENBQUE7SUFDdEMsQ0FBQztJQUVELElBQUksTUFBTSxDQUFDLFVBQVUsQ0FBQyxFQUFFLENBQUM7UUFDdkIsTUFBTSxTQUFTLEdBQXVCLElBQUksQ0FBQyxVQUFVLEVBQUUsTUFBTSxDQUF1QixDQUFBO1FBQ3BGLE1BQU0sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMscUJBQXFCLENBQUMsQ0FBQyxDQUFBO1FBQ3RELE1BQU0sR0FBRyxNQUFNLENBQUMsTUFBTSxDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQTtJQUMxQyxDQUFDO0lBQ0QsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUEifQ== \ No newline at end of file +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvbW9kZWxzL2luZGV4LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxNQUFNLE9BQU8sQ0FBQTtBQUN6QixPQUFPLEtBQUssSUFBSSxNQUFNLFdBQVcsQ0FBQTtBQUNqQyxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksTUFBTSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFFcEQsT0FBTyxFQUFFLE1BQU0sRUFBRSxXQUFXLEVBQUUsTUFBTSxhQUFhLENBQUE7QUFDakQsT0FBTyxFQUFFLFVBQVUsSUFBSSxxQkFBcUIsRUFBMEMscUJBQXFCLEVBQUUsbUJBQW1CLElBQUksNkJBQTZCLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQTtBQUMxTCxPQUFPLEVBQUUsVUFBVSxJQUFJLGlCQUFpQixFQUFzQyxtQkFBbUIsSUFBSSx5QkFBeUIsRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQUVuSixPQUFPLEVBQUUsaUJBQWlCLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUN2RCxPQUFPLEVBQUUsY0FBYyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFFbEQsT0FBTyxFQUFFLE1BQU0sSUFBSSxZQUFZLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUMxRCxPQUFPLEVBQUUsTUFBTSxJQUFJLGdCQUFnQixFQUFFLE1BQU0sdUJBQXVCLENBQUE7QUFFbEUsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLEdBQUcsRUFBRTtJQUU5QixJQUFJLFNBQVMsR0FBRyxnQkFBZ0IsQ0FBQTtJQUNoQyxJQUFJLFVBQVUsR0FBRyxZQUFZLENBQUE7SUFDN0IsSUFBSSxlQUFlLEdBQUc7UUFDcEI7WUFDRSxJQUFJLEVBQUUsZUFBZTtZQUNyQixNQUFNLEVBQUUsZUFBZTtTQUN4QjtRQUNEO1lBQ0UsSUFBSSxFQUFFLG1CQUFtQjtZQUN6QixNQUFNLEVBQUUsbUJBQW1CO1NBQzVCO0tBQ0YsQ0FBQTtJQUVELE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxXQUFXLEVBQUUsRUFBRSxhQUFhLENBQUMsQ0FBQTtJQUNuRSxJQUFJLE1BQU0sQ0FBQyxnQkFBZ0IsQ0FBQyxFQUFFLENBQUM7UUFDN0IsVUFBVSxHQUFHLElBQUksQ0FBQyxnQkFBZ0IsRUFBRSxNQUFNLENBQVEsQ0FBQTtJQUNwRCxDQUFDO0lBRUQsTUFBTSxnQkFBZ0IsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLFdBQVcsRUFBRSxFQUFFLGlCQUFpQixDQUFDLENBQUE7SUFDdkUsSUFBSSxNQUFNLENBQUMsZ0JBQWdCLENBQUMsRUFBRSxDQUFDO1FBQzdCLFNBQVMsR0FBRyxJQUFJLENBQUMsZ0JBQWdCLEVBQUUsTUFBTSxDQUFRLENBQUE7SUFDbkQsQ0FBQztJQUNELE1BQU0sTUFBTSxHQUFhLEVBQUUsQ0FBQTtJQUMzQixNQUFNLENBQUMsSUFBSSxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLHlCQUF5QixDQUFDLENBQUMsQ0FBQTtJQUMxRCxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsNkJBQTZCLENBQUMsU0FBZ0IsQ0FBQyxDQUFDLENBQUE7SUFFL0QsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxxQkFBcUIsQ0FBQyxDQUFDLENBQUE7SUFDdEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLFVBQWlCLENBQUMsQ0FBQyxDQUFBO0lBQzVELE1BQU0sQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUE7SUFFdEIsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyx1QkFBdUIsQ0FBQyxDQUFDLENBQUE7SUFDeEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLGVBQXNCLENBQUMsQ0FBQyxDQUFBO0lBQ2pFLE1BQU0sQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUE7SUFDdEIsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUE7QUFFRCxNQUFNLENBQUMsTUFBTSxNQUFNLEdBQUcsR0FBRyxFQUFFO0lBQ3pCLE1BQU0sTUFBTSxHQUFhLEVBQUUsQ0FBQTtJQUMzQixNQUFNLGNBQWMsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLHFCQUFxQixDQUFDLENBQUE7SUFDMUQsSUFBSSxDQUFDLE1BQU0sQ0FBQyxjQUFjLENBQUMsRUFBRSxDQUFDO1FBQzVCLHFCQUFxQixFQUFFLENBQUE7SUFDekIsQ0FBQztJQUNELElBQUksTUFBTSxDQUFDLGNBQWMsQ0FBQyxFQUFFLENBQUM7UUFDM0IsTUFBTSxTQUFTLEdBQTJCLElBQUksQ0FBQyxjQUFjLEVBQUUsTUFBTSxDQUEyQixDQUFBO1FBQ2hHLE1BQU0sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMseUJBQXlCLENBQUMsQ0FBQyxDQUFBO1FBQzFELE1BQU0sQ0FBQyxJQUFJLENBQUMsR0FBRyw2QkFBNkIsQ0FBQyxTQUFTLENBQUMsTUFBTSxDQUFDLENBQUMsQ0FBQTtJQUNqRSxDQUFDO0lBQ0QsTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsRUFBRSxpQkFBaUIsQ0FBQyxDQUFBO0lBRTVELE1BQU0sVUFBVSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsaUJBQWlCLENBQUMsQ0FBQTtJQUNsRCxNQUFNLE1BQU0sR0FBRyxjQUFjLEVBQVMsQ0FBQTtJQUN0QyxJQUFJLENBQUMsTUFBTSxDQUFDLFVBQVUsQ0FBQyxJQUFJLE1BQU0sRUFBRSxNQUFNLEVBQUUsR0FBRyxFQUFFLENBQUM7UUFDL0MsaUJBQWlCLENBQUMsTUFBTSxDQUFDLE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQTtJQUN0QyxDQUFDO0lBRUQsSUFBSSxNQUFNLENBQUMsVUFBVSxDQUFDLEVBQUUsQ0FBQztRQUN2QixNQUFNLFNBQVMsR0FBdUIsSUFBSSxDQUFDLFVBQVUsRUFBRSxNQUFNLENBQXVCLENBQUE7UUFDcEYsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxxQkFBcUIsQ0FBQyxDQUFDLENBQUE7UUFDdEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQyxDQUFBO0lBQzdELENBQUM7SUFDRCxNQUFNLENBQUMsS0FBSyxDQUFDLHVCQUF1QixFQUFFLGlCQUFpQixDQUFDLENBQUE7SUFDeEQsTUFBTSxDQUFDLElBQUksQ0FBQyxTQUFTLENBQUMsQ0FBQTtJQUN0QixPQUFPLE1BQU0sQ0FBQTtBQUNmLENBQUMsQ0FBQTtBQUVELE1BQU0sQ0FBQyxNQUFNLEdBQUcsR0FBRyxHQUFHLEVBQUU7SUFDdEIsSUFBSSxNQUFNLEdBQVUsRUFBRSxDQUFBO0lBQ3RCLE1BQU0sY0FBYyxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMscUJBQXFCLENBQUMsQ0FBQTtJQUMxRCxJQUFJLENBQUMsTUFBTSxDQUFDLGNBQWMsQ0FBQyxFQUFFLENBQUM7UUFDNUIscUJBQXFCLEVBQUUsQ0FBQTtJQUN6QixDQUFDO0lBQ0QsSUFBSSxNQUFNLENBQUMsY0FBYyxDQUFDLEVBQUUsQ0FBQztRQUMzQixNQUFNLFNBQVMsR0FBMkIsSUFBSSxDQUFDLGNBQWMsRUFBRSxNQUFNLENBQTJCLENBQUE7UUFDaEcsTUFBTSxHQUFHLE1BQU0sQ0FBQyxNQUFNLENBQUMsU0FBUyxDQUFDLE1BQU0sQ0FBQyxDQUFBO0lBQzFDLENBQUM7SUFDRCxNQUFNLFVBQVUsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLGlCQUFpQixDQUFDLENBQUE7SUFDbEQsTUFBTSxNQUFNLEdBQUcsY0FBYyxFQUFTLENBQUE7SUFDdEMsSUFBSSxDQUFDLE1BQU0sQ0FBQyxVQUFVLENBQUMsSUFBSSxNQUFNLEVBQUUsTUFBTSxFQUFFLEdBQUcsRUFBRSxDQUFDO1FBQy9DLGlCQUFpQixDQUFDLE1BQU0sQ0FBQyxNQUFNLENBQUMsR0FBRyxDQUFDLENBQUE7SUFDdEMsQ0FBQztJQUVELElBQUksTUFBTSxDQUFDLFVBQVUsQ0FBQyxFQUFFLENBQUM7UUFDdkIsTUFBTSxTQUFTLEdBQXVCLElBQUksQ0FBQyxVQUFVLEVBQUUsTUFBTSxDQUF1QixDQUFBO1FBQ3BGLE1BQU0sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMscUJBQXFCLENBQUMsQ0FBQyxDQUFBO1FBQ3RELE1BQU0sR0FBRyxNQUFNLENBQUMsTUFBTSxDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQTtJQUMxQyxDQUFDO0lBQ0QsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUEifQ== \ No newline at end of file diff --git a/packages/kbot/dist-in/models/openrouter.d.ts b/packages/kbot/dist-in/models/openrouter.d.ts index d9053961..1dca729e 100644 --- a/packages/kbot/dist-in/models/openrouter.d.ts +++ b/packages/kbot/dist-in/models/openrouter.d.ts @@ -7,6 +7,14 @@ interface ModelContext { max_tokens: number; supported_parameters: string[]; } +interface TopProvider { + context_length: number; + max_completion_tokens: number | null; + is_moderated: boolean; + supports_functions?: boolean; + supports_function_calling?: boolean; + supports_tools?: boolean; +} export interface OpenRouterModel { id: string; name: string; @@ -14,6 +22,7 @@ export interface OpenRouterModel { pricing: ModelPricing; context: ModelContext; created: number; + top_provider?: TopProvider; } export interface CachedModels { timestamp: number; diff --git a/packages/kbot/dist-in/models/openrouter.js b/packages/kbot/dist-in/models/openrouter.js index 6145d27f..40ccb466 100644 --- a/packages/kbot/dist-in/models/openrouter.js +++ b/packages/kbot/dist-in/models/openrouter.js @@ -71,4 +71,4 @@ export function listModelsAsStrings(models) { return `${model.id} | ${isFree ? 'free' : 'paid'}`; }); } -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9tb2RlbHMvb3BlbnJvdXRlci50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEtBQUssTUFBTSxPQUFPLENBQUE7QUFDekIsT0FBTyxLQUFLLElBQUksTUFBTSxXQUFXLENBQUM7QUFFbEMsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUNwRCxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksS0FBSyxFQUFFLE1BQU0sb0JBQW9CLENBQUE7QUFFbEQsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQTRCcEMsT0FBTyxFQUFFLGFBQWEsRUFBRSxNQUFNLFVBQVUsQ0FBQztBQUN6QyxpSEFBaUg7QUFDakgsTUFBTSxTQUFTLEdBQUcsYUFBYSxDQUFDLElBQUksR0FBRyxDQUFDLEdBQUcsRUFBRSxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsQ0FBQyxDQUFDLENBQUM7QUFDL0QsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLFNBQVMsQ0FBQyxDQUFDLEdBQUcsRUFBRSxNQUFNLEVBQUUsd0JBQXdCLENBQUMsQ0FBQyxDQUFBO0FBQzlHLE1BQU0sY0FBYyxHQUFHLEVBQUUsR0FBRyxFQUFFLEdBQUcsRUFBRSxHQUFHLElBQUksQ0FBQyxDQUFDLDJCQUEyQjtBQUV2RSxNQUFNLENBQUMsTUFBTSxhQUFhLEdBQUcsQ0FBQyxZQUFvQixVQUFVLEVBQXFDLEVBQUU7SUFDakcsSUFBSSxDQUFDO1FBQ0gsSUFBSSxDQUFDLE1BQU0sQ0FBQyxTQUFTLENBQUMsRUFBRSxDQUFDO1lBQ3ZCLE9BQU8sSUFBSSxDQUFBO1FBQ2IsQ0FBQztRQUNELE1BQU0sU0FBUyxHQUFHLElBQUksQ0FBQyxTQUFTLEVBQUUsTUFBTSxDQUFpQixDQUFDO1FBQzFELE1BQU0sR0FBRyxHQUFHLElBQUksQ0FBQyxHQUFHLEVBQUUsQ0FBQztRQUN2QixJQUFJLEdBQUcsR0FBRyxTQUFTLENBQUMsU0FBUyxHQUFHLGNBQWMsRUFBRSxDQUFDO1lBQy9DLGFBQWE7UUFDZixDQUFDO1FBQ0QsT0FBTyxTQUFTLENBQUMsTUFBYSxDQUFDO0lBQ2pDLENBQUM7SUFBQyxPQUFPLEtBQUssRUFBRSxDQUFDO1FBQ2YsTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsRUFBRSxLQUFLLENBQUMsQ0FBQTtRQUNoRCxPQUFPLElBQUksQ0FBQztJQUNkLENBQUM7QUFDSCxDQUFDLENBQUE7QUFDRCxTQUFTLFlBQVksQ0FBQyxNQUF5QixFQUFFLFlBQW9CLFVBQVU7SUFDN0UsTUFBTSxTQUFTLEdBQWlCO1FBQzlCLFNBQVMsRUFBRSxJQUFJLENBQUMsR0FBRyxFQUFFO1FBQ3JCLE1BQU07S0FDUCxDQUFDO0lBQ0YsS0FBSyxDQUFDLFNBQVMsRUFBRSxTQUFTLENBQUMsQ0FBQTtBQUM3QixDQUFDO0FBRUQsTUFBTSxDQUFDLE1BQU0scUJBQXFCLEdBQUcsS0FBSyxFQUFFLFlBQW9CLFVBQVUsRUFBOEIsRUFBRTtJQUN4RyxJQUFJLENBQUM7UUFDSCx5Q0FBeUM7UUFDekMsTUFBTSxRQUFRLEdBQUcsTUFBTSxLQUFLLENBQUMsR0FBRyxDQUM5QixxQ0FBcUMsRUFDckM7WUFDRSxNQUFNLEVBQUU7WUFDTixnQ0FBZ0M7YUFDakM7U0FDRixDQUNGLENBQUE7UUFDRCxZQUFZLENBQUMsUUFBUSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsQ0FBQTtRQUNoQyxNQUFNLENBQUMsS0FBSyxDQUFDLFdBQVcsUUFBUSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsTUFBTSx5QkFBeUIsU0FBUyxFQUFFLENBQUMsQ0FBQTtRQUN0RixPQUFPLFFBQVEsQ0FBQyxJQUFJLENBQUMsSUFBYSxDQUFBO0lBQ3BDLENBQUM7SUFBQyxPQUFPLEtBQUssRUFBRSxDQUFDO1FBQ2YsTUFBTSxDQUFDLEtBQUssQ0FBQyxtQ0FBbUMsRUFBRSxLQUFLLENBQUMsQ0FBQTtRQUN4RCxNQUFNLEtBQUssQ0FBQztJQUNkLENBQUM7QUFDSCxDQUFDLENBQUE7QUFFRCxNQUFNLFVBQVUsY0FBYyxDQUFDLE1BQXlCO0lBQ3RELE9BQU8sTUFBTSxDQUFDLE1BQU0sQ0FDbEIsQ0FBQyxLQUFLLEVBQUUsRUFBRSxDQUNSLEtBQUssQ0FBQyxPQUFPLENBQUMsTUFBTSxLQUFLLENBQUM7UUFDMUIsS0FBSyxDQUFDLE9BQU8sQ0FBQyxVQUFVLEtBQUssQ0FBQztRQUM5QixLQUFLLENBQUMsT0FBTyxDQUFDLEtBQUssS0FBSyxDQUFDLENBQzVCLENBQUM7QUFDSixDQUFDO0FBQ0QsTUFBTSxVQUFVLG9CQUFvQixDQUFDLE1BQXlCO0lBQzVELE9BQU8sTUFBTSxDQUFDLE1BQU0sQ0FBQyxDQUFDLEtBQUssRUFBRSxFQUFFLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxNQUFNLEtBQUssQ0FBQyxDQUFDLENBQUM7QUFDOUQsQ0FBQztBQUVELE1BQU0sVUFBVSx3QkFBd0IsQ0FBQyxNQUF5QjtJQUNoRSxPQUFPLE1BQU0sQ0FBQyxNQUFNLENBQUMsQ0FBQyxLQUFLLEVBQUUsRUFBRSxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsVUFBVSxLQUFLLENBQUMsQ0FBQyxDQUFDO0FBQ2xFLENBQUM7QUFFRCxNQUFNLFVBQVUsbUJBQW1CLENBQUMsTUFBeUI7SUFDM0QsT0FBTyxNQUFNLENBQUMsTUFBTSxDQUFDLENBQUMsS0FBSyxFQUFFLEVBQUUsQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLEtBQUssS0FBSyxDQUFDLENBQUMsQ0FBQztBQUM3RCxDQUFDO0FBQ0QsTUFBTSxVQUFVLG1CQUFtQixDQUFDLE1BQXlCO0lBQzNELE1BQU0sR0FBRyxNQUFNLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxFQUFFLENBQUMsRUFBRSxFQUFFLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxhQUFhLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUE7SUFDNUQsT0FBTyxNQUFNLENBQUMsR0FBRyxDQUFDLENBQUMsS0FBSyxFQUFFLEVBQUU7UUFDMUIsTUFBTSxNQUFNLEdBQUcsS0FBSyxDQUFDLElBQUksQ0FBQyxRQUFRLENBQUMsTUFBTSxDQUFDLENBQUE7UUFDMUMsT0FBTyxHQUFHLEtBQUssQ0FBQyxFQUFFLE1BQU0sTUFBTSxDQUFDLENBQUMsQ0FBQyxNQUFNLENBQUMsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFBO0lBQ3BELENBQUMsQ0FBQyxDQUFBO0FBQ0osQ0FBQyJ9 \ No newline at end of file +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9tb2RlbHMvb3BlbnJvdXRlci50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEtBQUssTUFBTSxPQUFPLENBQUE7QUFDekIsT0FBTyxLQUFLLElBQUksTUFBTSxXQUFXLENBQUM7QUFFbEMsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUNwRCxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksS0FBSyxFQUFFLE1BQU0sb0JBQW9CLENBQUE7QUFFbEQsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQXlDcEMsT0FBTyxFQUFFLGFBQWEsRUFBRSxNQUFNLFVBQVUsQ0FBQztBQUN6QyxpSEFBaUg7QUFDakgsTUFBTSxTQUFTLEdBQUcsYUFBYSxDQUFDLElBQUksR0FBRyxDQUFDLEdBQUcsRUFBRSxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsQ0FBQyxDQUFDLENBQUM7QUFDL0QsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLFNBQVMsQ0FBQyxDQUFDLEdBQUcsRUFBRSxNQUFNLEVBQUUsd0JBQXdCLENBQUMsQ0FBQyxDQUFBO0FBQzlHLE1BQU0sY0FBYyxHQUFHLEVBQUUsR0FBRyxFQUFFLEdBQUcsRUFBRSxHQUFHLElBQUksQ0FBQyxDQUFDLDJCQUEyQjtBQUV2RSxNQUFNLENBQUMsTUFBTSxhQUFhLEdBQUcsQ0FBQyxZQUFvQixVQUFVLEVBQXFDLEVBQUU7SUFDakcsSUFBSSxDQUFDO1FBQ0gsSUFBSSxDQUFDLE1BQU0sQ0FBQyxTQUFTLENBQUMsRUFBRSxDQUFDO1lBQ3ZCLE9BQU8sSUFBSSxDQUFBO1FBQ2IsQ0FBQztRQUNELE1BQU0sU0FBUyxHQUFHLElBQUksQ0FBQyxTQUFTLEVBQUUsTUFBTSxDQUFpQixDQUFDO1FBQzFELE1BQU0sR0FBRyxHQUFHLElBQUksQ0FBQyxHQUFHLEVBQUUsQ0FBQztRQUN2QixJQUFJLEdBQUcsR0FBRyxTQUFTLENBQUMsU0FBUyxHQUFHLGNBQWMsRUFBRSxDQUFDO1lBQy9DLGFBQWE7UUFDZixDQUFDO1FBQ0QsT0FBTyxTQUFTLENBQUMsTUFBYSxDQUFDO0lBQ2pDLENBQUM7SUFBQyxPQUFPLEtBQUssRUFBRSxDQUFDO1FBQ2YsTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsRUFBRSxLQUFLLENBQUMsQ0FBQTtRQUNoRCxPQUFPLElBQUksQ0FBQztJQUNkLENBQUM7QUFDSCxDQUFDLENBQUE7QUFDRCxTQUFTLFlBQVksQ0FBQyxNQUF5QixFQUFFLFlBQW9CLFVBQVU7SUFDN0UsTUFBTSxTQUFTLEdBQWlCO1FBQzlCLFNBQVMsRUFBRSxJQUFJLENBQUMsR0FBRyxFQUFFO1FBQ3JCLE1BQU07S0FDUCxDQUFDO0lBQ0YsS0FBSyxDQUFDLFNBQVMsRUFBRSxTQUFTLENBQUMsQ0FBQTtBQUM3QixDQUFDO0FBRUQsTUFBTSxDQUFDLE1BQU0scUJBQXFCLEdBQUcsS0FBSyxFQUFFLFlBQW9CLFVBQVUsRUFBOEIsRUFBRTtJQUN4RyxJQUFJLENBQUM7UUFDSCx5Q0FBeUM7UUFDekMsTUFBTSxRQUFRLEdBQUcsTUFBTSxLQUFLLENBQUMsR0FBRyxDQUM5QixxQ0FBcUMsRUFDckM7WUFDRSxNQUFNLEVBQUU7WUFDTixnQ0FBZ0M7YUFDakM7U0FDRixDQUNGLENBQUE7UUFDRCxZQUFZLENBQUMsUUFBUSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsQ0FBQTtRQUNoQyxNQUFNLENBQUMsS0FBSyxDQUFDLFdBQVcsUUFBUSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsTUFBTSx5QkFBeUIsU0FBUyxFQUFFLENBQUMsQ0FBQTtRQUN0RixPQUFPLFFBQVEsQ0FBQyxJQUFJLENBQUMsSUFBYSxDQUFBO0lBQ3BDLENBQUM7SUFBQyxPQUFPLEtBQUssRUFBRSxDQUFDO1FBQ2YsTUFBTSxDQUFDLEtBQUssQ0FBQyxtQ0FBbUMsRUFBRSxLQUFLLENBQUMsQ0FBQTtRQUN4RCxNQUFNLEtBQUssQ0FBQztJQUNkLENBQUM7QUFDSCxDQUFDLENBQUE7QUFFRCxNQUFNLFVBQVUsY0FBYyxDQUFDLE1BQXlCO0lBQ3RELE9BQU8sTUFBTSxDQUFDLE1BQU0sQ0FDbEIsQ0FBQyxLQUFLLEVBQUUsRUFBRSxDQUNSLEtBQUssQ0FBQyxPQUFPLENBQUMsTUFBTSxLQUFLLENBQUM7UUFDMUIsS0FBSyxDQUFDLE9BQU8sQ0FBQyxVQUFVLEtBQUssQ0FBQztRQUM5QixLQUFLLENBQUMsT0FBTyxDQUFDLEtBQUssS0FBSyxDQUFDLENBQzVCLENBQUM7QUFDSixDQUFDO0FBQ0QsTUFBTSxVQUFVLG9CQUFvQixDQUFDLE1BQXlCO0lBQzVELE9BQU8sTUFBTSxDQUFDLE1BQU0sQ0FBQyxDQUFDLEtBQUssRUFBRSxFQUFFLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxNQUFNLEtBQUssQ0FBQyxDQUFDLENBQUM7QUFDOUQsQ0FBQztBQUVELE1BQU0sVUFBVSx3QkFBd0IsQ0FBQyxNQUF5QjtJQUNoRSxPQUFPLE1BQU0sQ0FBQyxNQUFNLENBQUMsQ0FBQyxLQUFLLEVBQUUsRUFBRSxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsVUFBVSxLQUFLLENBQUMsQ0FBQyxDQUFDO0FBQ2xFLENBQUM7QUFFRCxNQUFNLFVBQVUsbUJBQW1CLENBQUMsTUFBeUI7SUFDM0QsT0FBTyxNQUFNLENBQUMsTUFBTSxDQUFDLENBQUMsS0FBSyxFQUFFLEVBQUUsQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLEtBQUssS0FBSyxDQUFDLENBQUMsQ0FBQztBQUM3RCxDQUFDO0FBQ0QsTUFBTSxVQUFVLG1CQUFtQixDQUFDLE1BQXlCO0lBQzNELE1BQU0sR0FBRyxNQUFNLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxFQUFFLENBQUMsRUFBRSxFQUFFLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxhQUFhLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUE7SUFDNUQsT0FBTyxNQUFNLENBQUMsR0FBRyxDQUFDLENBQUMsS0FBSyxFQUFFLEVBQUU7UUFDMUIsTUFBTSxNQUFNLEdBQUcsS0FBSyxDQUFDLElBQUksQ0FBQyxRQUFRLENBQUMsTUFBTSxDQUFDLENBQUE7UUFDMUMsT0FBTyxHQUFHLEtBQUssQ0FBQyxFQUFFLE1BQU0sTUFBTSxDQUFDLENBQUMsQ0FBQyxNQUFNLENBQUMsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFBO0lBQ3BELENBQUMsQ0FBQyxDQUFBO0FBQ0osQ0FBQyJ9 \ No newline at end of file diff --git a/packages/kbot/dist-in/src/models/cache/openai-models.ts b/packages/kbot/dist-in/src/models/cache/openai-models.ts new file mode 100644 index 00000000..e27598a4 --- /dev/null +++ b/packages/kbot/dist-in/src/models/cache/openai-models.ts @@ -0,0 +1,65 @@ +export enum E_OpenAI_Models { + GPT_4O_REALTIME_PREVIEW_2024_12_17 = "gpt-4o-realtime-preview-2024-12-17", + GPT_4O_AUDIO_PREVIEW_2024_12_17 = "gpt-4o-audio-preview-2024-12-17", + DALL_E_3 = "dall-e-3", + DALL_E_2 = "dall-e-2", + GPT_4O_AUDIO_PREVIEW_2024_10_01 = "gpt-4o-audio-preview-2024-10-01", + GPT_4O_MINI_REALTIME_PREVIEW_2024_12_17 = "gpt-4o-mini-realtime-preview-2024-12-17", + GPT_4O_MINI_REALTIME_PREVIEW = "gpt-4o-mini-realtime-preview", + GPT_4O_REALTIME_PREVIEW_2024_10_01 = "gpt-4o-realtime-preview-2024-10-01", + GPT_4O_TRANSCRIBE = "gpt-4o-transcribe", + GPT_4O_MINI_TRANSCRIBE = "gpt-4o-mini-transcribe", + GPT_4O_REALTIME_PREVIEW = "gpt-4o-realtime-preview", + BABBAGE_002 = "babbage-002", + GPT_4O_MINI_TTS = "gpt-4o-mini-tts", + TTS_1_HD_1106 = "tts-1-hd-1106", + TEXT_EMBEDDING_3_LARGE = "text-embedding-3-large", + GPT_4 = "gpt-4", + TEXT_EMBEDDING_ADA_002 = "text-embedding-ada-002", + TTS_1_HD = "tts-1-hd", + GPT_4O_MINI_AUDIO_PREVIEW = "gpt-4o-mini-audio-preview", + GPT_4O_AUDIO_PREVIEW = "gpt-4o-audio-preview", + O1_PREVIEW_2024_09_12 = "o1-preview-2024-09-12", + GPT_3_5_TURBO_INSTRUCT_0914 = "gpt-3.5-turbo-instruct-0914", + GPT_4O_MINI_SEARCH_PREVIEW = "gpt-4o-mini-search-preview", + TTS_1_1106 = "tts-1-1106", + DAVINCI_002 = "davinci-002", + GPT_3_5_TURBO_1106 = "gpt-3.5-turbo-1106", + GPT_4_TURBO = "gpt-4-turbo", + GPT_3_5_TURBO_INSTRUCT = "gpt-3.5-turbo-instruct", + GPT_3_5_TURBO = "gpt-3.5-turbo", + CHATGPT_4O_LATEST = "chatgpt-4o-latest", + GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = "gpt-4o-mini-search-preview-2025-03-11", + GPT_4O_2024_11_20 = "gpt-4o-2024-11-20", + WHISPER_1 = "whisper-1", + GPT_3_5_TURBO_0125 = "gpt-3.5-turbo-0125", + GPT_4O_2024_05_13 = "gpt-4o-2024-05-13", + GPT_3_5_TURBO_16K = "gpt-3.5-turbo-16k", + GPT_4_TURBO_2024_04_09 = "gpt-4-turbo-2024-04-09", + GPT_4_1106_PREVIEW = "gpt-4-1106-preview", + O1_PREVIEW = "o1-preview", + GPT_4_0613 = "gpt-4-0613", + GPT_4O_SEARCH_PREVIEW = "gpt-4o-search-preview", + GPT_4_5_PREVIEW = "gpt-4.5-preview", + GPT_4_5_PREVIEW_2025_02_27 = "gpt-4.5-preview-2025-02-27", + GPT_4O_SEARCH_PREVIEW_2025_03_11 = "gpt-4o-search-preview-2025-03-11", + OMNI_MODERATION_LATEST = "omni-moderation-latest", + TTS_1 = "tts-1", + OMNI_MODERATION_2024_09_26 = "omni-moderation-2024-09-26", + TEXT_EMBEDDING_3_SMALL = "text-embedding-3-small", + GPT_4O = "gpt-4o", + GPT_4O_MINI = "gpt-4o-mini", + GPT_4O_2024_08_06 = "gpt-4o-2024-08-06", + O3_MINI = "o3-mini", + GPT_4O_MINI_2024_07_18 = "gpt-4o-mini-2024-07-18", + GPT_4_TURBO_PREVIEW = "gpt-4-turbo-preview", + O3_MINI_2025_01_31 = "o3-mini-2025-01-31", + O1_MINI = "o1-mini", + GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = "gpt-4o-mini-audio-preview-2024-12-17", + O1_MINI_2024_09_12 = "o1-mini-2024-09-12", + GPT_4_0125_PREVIEW = "gpt-4-0125-preview", + O1 = "o1", + O1_2024_12_17 = "o1-2024-12-17", + O1_PRO = "o1-pro", + O1_PRO_2025_03_19 = "o1-pro-2025-03-19" +} \ No newline at end of file diff --git a/packages/kbot/dist-in/src/models/cache/openai.ts b/packages/kbot/dist-in/src/models/cache/openai.ts new file mode 100644 index 00000000..37ffe643 --- /dev/null +++ b/packages/kbot/dist-in/src/models/cache/openai.ts @@ -0,0 +1 @@ +export const models = [{"id":"gpt-4o-realtime-preview-2024-12-17","object":"model","created":1733945430,"owned_by":"system"},{"id":"gpt-4o-audio-preview-2024-12-17","object":"model","created":1734034239,"owned_by":"system"},{"id":"dall-e-3","object":"model","created":1698785189,"owned_by":"system"},{"id":"dall-e-2","object":"model","created":1698798177,"owned_by":"system"},{"id":"gpt-4o-audio-preview-2024-10-01","object":"model","created":1727389042,"owned_by":"system"},{"id":"o3-mini","object":"model","created":1737146383,"owned_by":"system"},{"id":"o3-mini-2025-01-31","object":"model","created":1738010200,"owned_by":"system"},{"id":"gpt-4o-mini-realtime-preview-2024-12-17","object":"model","created":1734112601,"owned_by":"system"},{"id":"gpt-4o-mini-realtime-preview","object":"model","created":1734387380,"owned_by":"system"},{"id":"gpt-4o-realtime-preview-2024-10-01","object":"model","created":1727131766,"owned_by":"system"},{"id":"gpt-4o-transcribe","object":"model","created":1742068463,"owned_by":"system"},{"id":"gpt-4o-mini-transcribe","object":"model","created":1742068596,"owned_by":"system"},{"id":"gpt-4o-realtime-preview","object":"model","created":1727659998,"owned_by":"system"},{"id":"babbage-002","object":"model","created":1692634615,"owned_by":"system"},{"id":"gpt-4o-mini-tts","object":"model","created":1742403959,"owned_by":"system"},{"id":"tts-1-hd-1106","object":"model","created":1699053533,"owned_by":"system"},{"id":"text-embedding-3-large","object":"model","created":1705953180,"owned_by":"system"},{"id":"gpt-4","object":"model","created":1687882411,"owned_by":"openai"},{"id":"text-embedding-ada-002","object":"model","created":1671217299,"owned_by":"openai-internal"},{"id":"tts-1-hd","object":"model","created":1699046015,"owned_by":"system"},{"id":"gpt-4o-mini-audio-preview","object":"model","created":1734387424,"owned_by":"system"},{"id":"gpt-4o-audio-preview","object":"model","created":1727460443,"owned_by":"system"},{"id":"o1-preview-2024-09-12","object":"model","created":1725648865,"owned_by":"system"},{"id":"gpt-3.5-turbo-instruct-0914","object":"model","created":1694122472,"owned_by":"system"},{"id":"gpt-4o-mini-search-preview","object":"model","created":1741391161,"owned_by":"system"},{"id":"tts-1-1106","object":"model","created":1699053241,"owned_by":"system"},{"id":"davinci-002","object":"model","created":1692634301,"owned_by":"system"},{"id":"gpt-3.5-turbo-1106","object":"model","created":1698959748,"owned_by":"system"},{"id":"gpt-4-turbo","object":"model","created":1712361441,"owned_by":"system"},{"id":"gpt-3.5-turbo-instruct","object":"model","created":1692901427,"owned_by":"system"},{"id":"gpt-3.5-turbo","object":"model","created":1677610602,"owned_by":"openai"},{"id":"chatgpt-4o-latest","object":"model","created":1723515131,"owned_by":"system"},{"id":"gpt-4o-mini-search-preview-2025-03-11","object":"model","created":1741390858,"owned_by":"system"},{"id":"gpt-4o-2024-11-20","object":"model","created":1739331543,"owned_by":"system"},{"id":"whisper-1","object":"model","created":1677532384,"owned_by":"openai-internal"},{"id":"gpt-3.5-turbo-0125","object":"model","created":1706048358,"owned_by":"system"},{"id":"gpt-4o-2024-05-13","object":"model","created":1715368132,"owned_by":"system"},{"id":"gpt-3.5-turbo-16k","object":"model","created":1683758102,"owned_by":"openai-internal"},{"id":"gpt-4-turbo-2024-04-09","object":"model","created":1712601677,"owned_by":"system"},{"id":"gpt-4-1106-preview","object":"model","created":1698957206,"owned_by":"system"},{"id":"o1-preview","object":"model","created":1725648897,"owned_by":"system"},{"id":"gpt-4-0613","object":"model","created":1686588896,"owned_by":"openai"},{"id":"gpt-4o-search-preview","object":"model","created":1741388720,"owned_by":"system"},{"id":"gpt-4.5-preview","object":"model","created":1740623059,"owned_by":"system"},{"id":"gpt-4.5-preview-2025-02-27","object":"model","created":1740623304,"owned_by":"system"},{"id":"gpt-4o-search-preview-2025-03-11","object":"model","created":1741388170,"owned_by":"system"},{"id":"omni-moderation-latest","object":"model","created":1731689265,"owned_by":"system"},{"id":"tts-1","object":"model","created":1681940951,"owned_by":"openai-internal"},{"id":"omni-moderation-2024-09-26","object":"model","created":1732734466,"owned_by":"system"},{"id":"text-embedding-3-small","object":"model","created":1705948997,"owned_by":"system"},{"id":"gpt-4o","object":"model","created":1715367049,"owned_by":"system"},{"id":"gpt-4o-mini","object":"model","created":1721172741,"owned_by":"system"},{"id":"gpt-4o-2024-08-06","object":"model","created":1722814719,"owned_by":"system"},{"id":"gpt-4o-mini-2024-07-18","object":"model","created":1721172717,"owned_by":"system"},{"id":"gpt-4-turbo-preview","object":"model","created":1706037777,"owned_by":"system"},{"id":"o1-mini","object":"model","created":1725649008,"owned_by":"system"},{"id":"gpt-4o-mini-audio-preview-2024-12-17","object":"model","created":1734115920,"owned_by":"system"},{"id":"o1-mini-2024-09-12","object":"model","created":1725648979,"owned_by":"system"},{"id":"gpt-4-0125-preview","object":"model","created":1706037612,"owned_by":"system"},{"id":"o1","object":"model","created":1734375816,"owned_by":"system"},{"id":"o1-2024-12-17","object":"model","created":1734326976,"owned_by":"system"},{"id":"o1-pro","object":"model","created":1742251791,"owned_by":"system"},{"id":"o1-pro-2025-03-19","object":"model","created":1742251504,"owned_by":"system"}] \ No newline at end of file diff --git a/packages/kbot/dist-in/src/models/cache/openrouter-models.ts b/packages/kbot/dist-in/src/models/cache/openrouter-models.ts new file mode 100644 index 00000000..6d38b0b6 --- /dev/null +++ b/packages/kbot/dist-in/src/models/cache/openrouter-models.ts @@ -0,0 +1,292 @@ +export enum E_OpenRouter_Models { + MISTRAL_MINISTRAL_8B = "mistral/ministral-8b", + DEEPSEEK_DEEPSEEK_V3_BASE_FREE = "deepseek/deepseek-v3-base:free", + SCB10X_LLAMA3_1_TYPHOON2_8B_INSTRUCT = "scb10x/llama3.1-typhoon2-8b-instruct", + SCB10X_LLAMA3_1_TYPHOON2_70B_INSTRUCT = "scb10x/llama3.1-typhoon2-70b-instruct", + ALLENAI_MOLMO_7B_D_FREE = "allenai/molmo-7b-d:free", + BYTEDANCE_RESEARCH_UI_TARS_72B_FREE = "bytedance-research/ui-tars-72b:free", + QWEN_QWEN2_5_VL_3B_INSTRUCT_FREE = "qwen/qwen2.5-vl-3b-instruct:free", + GOOGLE_GEMINI_2_5_PRO_EXP_03_25_FREE = "google/gemini-2.5-pro-exp-03-25:free", + QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free", + DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free", + DEEPSEEK_DEEPSEEK_CHAT_V3_0324 = "deepseek/deepseek-chat-v3-0324", + FEATHERLESS_QWERKY_72B_FREE = "featherless/qwerky-72b:free", + OPENAI_O1_PRO = "openai/o1-pro", + MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free", + MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT = "mistralai/mistral-small-3.1-24b-instruct", + OPEN_R1_OLYMPICCODER_7B_FREE = "open-r1/olympiccoder-7b:free", + OPEN_R1_OLYMPICCODER_32B_FREE = "open-r1/olympiccoder-32b:free", + STEELSKULL_L3_3_ELECTRA_R1_70B = "steelskull/l3.3-electra-r1-70b", + ALLENAI_OLMO_2_0325_32B_INSTRUCT = "allenai/olmo-2-0325-32b-instruct", + GOOGLE_GEMMA_3_1B_IT_FREE = "google/gemma-3-1b-it:free", + GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free", + GOOGLE_GEMMA_3_4B_IT = "google/gemma-3-4b-it", + AI21_JAMBA_1_6_LARGE = "ai21/jamba-1.6-large", + AI21_JAMBA_1_6_MINI = "ai21/jamba-1.6-mini", + GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free", + GOOGLE_GEMMA_3_12B_IT = "google/gemma-3-12b-it", + COHERE_COMMAND_A = "cohere/command-a", + OPENAI_GPT_4O_MINI_SEARCH_PREVIEW = "openai/gpt-4o-mini-search-preview", + OPENAI_GPT_4O_SEARCH_PREVIEW = "openai/gpt-4o-search-preview", + TOKYOTECH_LLM_LLAMA_3_1_SWALLOW_70B_INSTRUCT_V0_3 = "tokyotech-llm/llama-3.1-swallow-70b-instruct-v0.3", + REKAAI_REKA_FLASH_3_FREE = "rekaai/reka-flash-3:free", + GOOGLE_GEMMA_3_27B_IT_FREE = "google/gemma-3-27b-it:free", + GOOGLE_GEMMA_3_27B_IT = "google/gemma-3-27b-it", + THEDRUMMER_ANUBIS_PRO_105B_V1 = "thedrummer/anubis-pro-105b-v1", + LATITUDEGAMES_WAYFARER_LARGE_70B_LLAMA_3_3 = "latitudegames/wayfarer-large-70b-llama-3.3", + THEDRUMMER_SKYFALL_36B_V2 = "thedrummer/skyfall-36b-v2", + MICROSOFT_PHI_4_MULTIMODAL_INSTRUCT = "microsoft/phi-4-multimodal-instruct", + PERPLEXITY_SONAR_REASONING_PRO = "perplexity/sonar-reasoning-pro", + PERPLEXITY_SONAR_PRO = "perplexity/sonar-pro", + PERPLEXITY_SONAR_DEEP_RESEARCH = "perplexity/sonar-deep-research", + DEEPSEEK_DEEPSEEK_R1_ZERO_FREE = "deepseek/deepseek-r1-zero:free", + QWEN_QWQ_32B_FREE = "qwen/qwq-32b:free", + QWEN_QWQ_32B = "qwen/qwq-32b", + QWEN_QWEN2_5_32B_INSTRUCT = "qwen/qwen2.5-32b-instruct", + MOONSHOTAI_MOONLIGHT_16B_A3B_INSTRUCT_FREE = "moonshotai/moonlight-16b-a3b-instruct:free", + NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE = "nousresearch/deephermes-3-llama-3-8b-preview:free", + OPENAI_GPT_4_5_PREVIEW = "openai/gpt-4.5-preview", + GOOGLE_GEMINI_2_0_FLASH_LITE_001 = "google/gemini-2.0-flash-lite-001", + ANTHROPIC_CLAUDE_3_7_SONNET_BETA = "anthropic/claude-3.7-sonnet:beta", + ANTHROPIC_CLAUDE_3_7_SONNET = "anthropic/claude-3.7-sonnet", + ANTHROPIC_CLAUDE_3_7_SONNET_THINKING = "anthropic/claude-3.7-sonnet:thinking", + PERPLEXITY_R1_1776 = "perplexity/r1-1776", + MISTRALAI_MISTRAL_SABA = "mistralai/mistral-saba", + COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free", + COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-mistral-24b:free", + META_LLAMA_LLAMA_GUARD_3_8B = "meta-llama/llama-guard-3-8b", + OPENAI_O3_MINI_HIGH = "openai/o3-mini-high", + DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_8B = "deepseek/deepseek-r1-distill-llama-8b", + GOOGLE_GEMINI_2_0_FLASH_001 = "google/gemini-2.0-flash-001", + GOOGLE_GEMINI_2_0_PRO_EXP_02_05_FREE = "google/gemini-2.0-pro-exp-02-05:free", + QWEN_QWEN_VL_PLUS = "qwen/qwen-vl-plus", + AION_LABS_AION_1_0 = "aion-labs/aion-1.0", + AION_LABS_AION_1_0_MINI = "aion-labs/aion-1.0-mini", + AION_LABS_AION_RP_LLAMA_3_1_8B = "aion-labs/aion-rp-llama-3.1-8b", + QWEN_QWEN_VL_MAX = "qwen/qwen-vl-max", + QWEN_QWEN_TURBO = "qwen/qwen-turbo", + QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE = "qwen/qwen2.5-vl-72b-instruct:free", + QWEN_QWEN2_5_VL_72B_INSTRUCT = "qwen/qwen2.5-vl-72b-instruct", + QWEN_QWEN_PLUS = "qwen/qwen-plus", + QWEN_QWEN_MAX = "qwen/qwen-max", + OPENAI_O3_MINI = "openai/o3-mini", + DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_1_5B = "deepseek/deepseek-r1-distill-qwen-1.5b", + MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE = "mistralai/mistral-small-24b-instruct-2501:free", + MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501 = "mistralai/mistral-small-24b-instruct-2501", + DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B_FREE = "deepseek/deepseek-r1-distill-qwen-32b:free", + DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B = "deepseek/deepseek-r1-distill-qwen-32b", + DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE = "deepseek/deepseek-r1-distill-qwen-14b:free", + DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B = "deepseek/deepseek-r1-distill-qwen-14b", + PERPLEXITY_SONAR_REASONING = "perplexity/sonar-reasoning", + PERPLEXITY_SONAR = "perplexity/sonar", + LIQUID_LFM_7B = "liquid/lfm-7b", + LIQUID_LFM_3B = "liquid/lfm-3b", + DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B_FREE = "deepseek/deepseek-r1-distill-llama-70b:free", + DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B = "deepseek/deepseek-r1-distill-llama-70b", + GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_FREE = "google/gemini-2.0-flash-thinking-exp:free", + DEEPSEEK_DEEPSEEK_R1_FREE = "deepseek/deepseek-r1:free", + DEEPSEEK_DEEPSEEK_R1 = "deepseek/deepseek-r1", + SOPHOSYMPATHEIA_ROGUE_ROSE_103B_V0_2_FREE = "sophosympatheia/rogue-rose-103b-v0.2:free", + MINIMAX_MINIMAX_01 = "minimax/minimax-01", + MISTRALAI_CODESTRAL_2501 = "mistralai/codestral-2501", + MICROSOFT_PHI_4 = "microsoft/phi-4", + SAO10K_L3_1_70B_HANAMI_X1 = "sao10k/l3.1-70b-hanami-x1", + DEEPSEEK_DEEPSEEK_CHAT_FREE = "deepseek/deepseek-chat:free", + DEEPSEEK_DEEPSEEK_CHAT = "deepseek/deepseek-chat", + GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_1219_FREE = "google/gemini-2.0-flash-thinking-exp-1219:free", + SAO10K_L3_3_EURYALE_70B = "sao10k/l3.3-euryale-70b", + OPENAI_O1 = "openai/o1", + EVA_UNIT_01_EVA_LLAMA_3_33_70B = "eva-unit-01/eva-llama-3.33-70b", + X_AI_GROK_2_VISION_1212 = "x-ai/grok-2-vision-1212", + X_AI_GROK_2_1212 = "x-ai/grok-2-1212", + COHERE_COMMAND_R7B_12_2024 = "cohere/command-r7b-12-2024", + GOOGLE_GEMINI_2_0_FLASH_EXP_FREE = "google/gemini-2.0-flash-exp:free", + META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE = "meta-llama/llama-3.3-70b-instruct:free", + META_LLAMA_LLAMA_3_3_70B_INSTRUCT = "meta-llama/llama-3.3-70b-instruct", + AMAZON_NOVA_LITE_V1 = "amazon/nova-lite-v1", + AMAZON_NOVA_MICRO_V1 = "amazon/nova-micro-v1", + AMAZON_NOVA_PRO_V1 = "amazon/nova-pro-v1", + QWEN_QWQ_32B_PREVIEW_FREE = "qwen/qwq-32b-preview:free", + QWEN_QWQ_32B_PREVIEW = "qwen/qwq-32b-preview", + GOOGLE_LEARNLM_1_5_PRO_EXPERIMENTAL_FREE = "google/learnlm-1.5-pro-experimental:free", + EVA_UNIT_01_EVA_QWEN_2_5_72B = "eva-unit-01/eva-qwen-2.5-72b", + OPENAI_GPT_4O_2024_11_20 = "openai/gpt-4o-2024-11-20", + MISTRALAI_MISTRAL_LARGE_2411 = "mistralai/mistral-large-2411", + MISTRALAI_MISTRAL_LARGE_2407 = "mistralai/mistral-large-2407", + MISTRALAI_PIXTRAL_LARGE_2411 = "mistralai/pixtral-large-2411", + X_AI_GROK_VISION_BETA = "x-ai/grok-vision-beta", + INFERMATIC_MN_INFEROR_12B = "infermatic/mn-inferor-12b", + QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free", + QWEN_QWEN_2_5_CODER_32B_INSTRUCT = "qwen/qwen-2.5-coder-32b-instruct", + RAIFLE_SORCERERLM_8X22B = "raifle/sorcererlm-8x22b", + EVA_UNIT_01_EVA_QWEN_2_5_32B = "eva-unit-01/eva-qwen-2.5-32b", + THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b", + ANTHROPIC_CLAUDE_3_5_HAIKU_BETA = "anthropic/claude-3.5-haiku:beta", + ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku", + ANTHROPIC_CLAUDE_3_5_HAIKU_20241022_BETA = "anthropic/claude-3.5-haiku-20241022:beta", + ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022", + NEVERSLEEP_LLAMA_3_1_LUMIMAID_70B = "neversleep/llama-3.1-lumimaid-70b", + ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b", + ANTHROPIC_CLAUDE_3_5_SONNET_BETA = "anthropic/claude-3.5-sonnet:beta", + ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet", + X_AI_GROK_BETA = "x-ai/grok-beta", + MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b", + MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b", + QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct", + NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT_FREE = "nvidia/llama-3.1-nemotron-70b-instruct:free", + NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct", + INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity", + INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi", + GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b", + THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b", + ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b", + LIQUID_LFM_40B = "liquid/lfm-40b", + META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free", + META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct", + META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE = "meta-llama/llama-3.2-1b-instruct:free", + META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct", + META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct", + META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free", + META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct", + QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free", + QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct", + QWEN_QWEN_2_5_VL_72B_INSTRUCT = "qwen/qwen-2.5-vl-72b-instruct", + NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b", + OPENAI_O1_PREVIEW = "openai/o1-preview", + OPENAI_O1_PREVIEW_2024_09_12 = "openai/o1-preview-2024-09-12", + OPENAI_O1_MINI = "openai/o1-mini", + OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12", + MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b", + COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024", + COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024", + QWEN_QWEN_2_5_VL_7B_INSTRUCT_FREE = "qwen/qwen-2.5-vl-7b-instruct:free", + QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct", + SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b", + GOOGLE_GEMINI_FLASH_1_5_8B_EXP = "google/gemini-flash-1.5-8b-exp", + AI21_JAMBA_1_5_MINI = "ai21/jamba-1-5-mini", + AI21_JAMBA_1_5_LARGE = "ai21/jamba-1-5-large", + MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct", + NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B = "nousresearch/hermes-3-llama-3.1-70b", + NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B = "nousresearch/hermes-3-llama-3.1-405b", + OPENAI_CHATGPT_4O_LATEST = "openai/chatgpt-4o-latest", + SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b", + AETHERWIING_MN_STARCANNON_12B = "aetherwiing/mn-starcannon-12b", + OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06", + META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b", + NOTHINGIISREAL_MN_CELESTE_12B = "nothingiisreal/mn-celeste-12b", + PERPLEXITY_LLAMA_3_1_SONAR_SMALL_128K_ONLINE = "perplexity/llama-3.1-sonar-small-128k-online", + PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE = "perplexity/llama-3.1-sonar-large-128k-online", + META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE = "meta-llama/llama-3.1-8b-instruct:free", + META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct", + META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct", + META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct", + MISTRALAI_CODESTRAL_MAMBA = "mistralai/codestral-mamba", + MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free", + MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo", + OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini", + OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18", + QWEN_QWEN_2_7B_INSTRUCT_FREE = "qwen/qwen-2-7b-instruct:free", + QWEN_QWEN_2_7B_INSTRUCT = "qwen/qwen-2-7b-instruct", + GOOGLE_GEMMA_2_27B_IT = "google/gemma-2-27b-it", + ALPINDALE_MAGNUM_72B = "alpindale/magnum-72b", + GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free", + GOOGLE_GEMMA_2_9B_IT = "google/gemma-2-9b-it", + 01_AI_YI_LARGE = "01-ai/yi-large", + AI21_JAMBA_INSTRUCT = "ai21/jamba-instruct", + ANTHROPIC_CLAUDE_3_5_SONNET_20240620_BETA = "anthropic/claude-3.5-sonnet-20240620:beta", + ANTHROPIC_CLAUDE_3_5_SONNET_20240620 = "anthropic/claude-3.5-sonnet-20240620", + SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b", + COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b", + QWEN_QWEN_2_72B_INSTRUCT = "qwen/qwen-2-72b-instruct", + MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free", + MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct", + MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3", + NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b", + MICROSOFT_PHI_3_MINI_128K_INSTRUCT_FREE = "microsoft/phi-3-mini-128k-instruct:free", + MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct", + MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT_FREE = "microsoft/phi-3-medium-128k-instruct:free", + MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct", + NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b", + GOOGLE_GEMINI_FLASH_1_5 = "google/gemini-flash-1.5", + OPENAI_GPT_4O = "openai/gpt-4o", + OPENAI_GPT_4O_EXTENDED = "openai/gpt-4o:extended", + OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13", + META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b", + NEVERSLEEP_LLAMA_3_LUMIMAID_8B_EXTENDED = "neversleep/llama-3-lumimaid-8b:extended", + NEVERSLEEP_LLAMA_3_LUMIMAID_8B = "neversleep/llama-3-lumimaid-8b", + SAO10K_FIMBULVETR_11B_V2 = "sao10k/fimbulvetr-11b-v2", + META_LLAMA_LLAMA_3_8B_INSTRUCT_FREE = "meta-llama/llama-3-8b-instruct:free", + META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct", + META_LLAMA_LLAMA_3_70B_INSTRUCT = "meta-llama/llama-3-70b-instruct", + MISTRALAI_MIXTRAL_8X22B_INSTRUCT = "mistralai/mixtral-8x22b-instruct", + MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b", + MICROSOFT_WIZARDLM_2_7B = "microsoft/wizardlm-2-7b", + GOOGLE_GEMINI_PRO_1_5 = "google/gemini-pro-1.5", + OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo", + COHERE_COMMAND_R_PLUS = "cohere/command-r-plus", + COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024", + SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b", + COHERE_COMMAND = "cohere/command", + COHERE_COMMAND_R = "cohere/command-r", + ANTHROPIC_CLAUDE_3_HAIKU_BETA = "anthropic/claude-3-haiku:beta", + ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku", + ANTHROPIC_CLAUDE_3_OPUS_BETA = "anthropic/claude-3-opus:beta", + ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus", + ANTHROPIC_CLAUDE_3_SONNET_BETA = "anthropic/claude-3-sonnet:beta", + ANTHROPIC_CLAUDE_3_SONNET = "anthropic/claude-3-sonnet", + COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024", + MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large", + GOOGLE_GEMMA_7B_IT = "google/gemma-7b-it", + OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613", + OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview", + NOUSRESEARCH_NOUS_HERMES_2_MIXTRAL_8X7B_DPO = "nousresearch/nous-hermes-2-mixtral-8x7b-dpo", + MISTRALAI_MISTRAL_MEDIUM = "mistralai/mistral-medium", + MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small", + MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny", + MISTRALAI_MISTRAL_7B_INSTRUCT_V0_2 = "mistralai/mistral-7b-instruct-v0.2", + COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X7B = "cognitivecomputations/dolphin-mixtral-8x7b", + GOOGLE_GEMINI_PRO_VISION = "google/gemini-pro-vision", + GOOGLE_GEMINI_PRO = "google/gemini-pro", + MISTRALAI_MIXTRAL_8X7B = "mistralai/mixtral-8x7b", + MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct", + OPENCHAT_OPENCHAT_7B_FREE = "openchat/openchat-7b:free", + OPENCHAT_OPENCHAT_7B = "openchat/openchat-7b", + NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b", + ANTHROPIC_CLAUDE_2_1_BETA = "anthropic/claude-2.1:beta", + ANTHROPIC_CLAUDE_2_1 = "anthropic/claude-2.1", + ANTHROPIC_CLAUDE_2_BETA = "anthropic/claude-2:beta", + ANTHROPIC_CLAUDE_2 = "anthropic/claude-2", + TEKNIUM_OPENHERMES_2_5_MISTRAL_7B = "teknium/openhermes-2.5-mistral-7b", + UNDI95_TOPPY_M_7B_FREE = "undi95/toppy-m-7b:free", + UNDI95_TOPPY_M_7B = "undi95/toppy-m-7b", + ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b", + OPENROUTER_AUTO = "openrouter/auto", + OPENAI_GPT_3_5_TURBO_1106 = "openai/gpt-3.5-turbo-1106", + OPENAI_GPT_4_1106_PREVIEW = "openai/gpt-4-1106-preview", + GOOGLE_PALM_2_CHAT_BISON_32K = "google/palm-2-chat-bison-32k", + GOOGLE_PALM_2_CODECHAT_BISON_32K = "google/palm-2-codechat-bison-32k", + JONDURBIN_AIROBOROS_L2_70B = "jondurbin/airoboros-l2-70b", + XWIN_LM_XWIN_LM_70B = "xwin-lm/xwin-lm-70b", + OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct", + MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1", + PYGMALIONAI_MYTHALION_13B = "pygmalionai/mythalion-13b", + OPENAI_GPT_3_5_TURBO_16K = "openai/gpt-3.5-turbo-16k", + OPENAI_GPT_4_32K = "openai/gpt-4-32k", + OPENAI_GPT_4_32K_0314 = "openai/gpt-4-32k-0314", + NOUSRESEARCH_NOUS_HERMES_LLAMA2_13B = "nousresearch/nous-hermes-llama2-13b", + MANCER_WEAVER = "mancer/weaver", + HUGGINGFACEH4_ZEPHYR_7B_BETA_FREE = "huggingfaceh4/zephyr-7b-beta:free", + ANTHROPIC_CLAUDE_2_0_BETA = "anthropic/claude-2.0:beta", + ANTHROPIC_CLAUDE_2_0 = "anthropic/claude-2.0", + UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b", + GOOGLE_PALM_2_CHAT_BISON = "google/palm-2-chat-bison", + GOOGLE_PALM_2_CODECHAT_BISON = "google/palm-2-codechat-bison", + GRYPHE_MYTHOMAX_L2_13B_FREE = "gryphe/mythomax-l2-13b:free", + GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b", + META_LLAMA_LLAMA_2_13B_CHAT = "meta-llama/llama-2-13b-chat", + META_LLAMA_LLAMA_2_70B_CHAT = "meta-llama/llama-2-70b-chat", + OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo", + OPENAI_GPT_3_5_TURBO_0125 = "openai/gpt-3.5-turbo-0125", + OPENAI_GPT_4 = "openai/gpt-4", + OPENAI_GPT_4_0314 = "openai/gpt-4-0314" +} \ No newline at end of file diff --git a/packages/kbot/dist-in/src/models/cache/openrouter.ts b/packages/kbot/dist-in/src/models/cache/openrouter.ts new file mode 100644 index 00000000..e78875cc --- /dev/null +++ b/packages/kbot/dist-in/src/models/cache/openrouter.ts @@ -0,0 +1 @@ +export const models = [{"id":"mistral/ministral-8b","name":"Mistral: Ministral 8b","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1743430021,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-v3-base:free","name":"DeepSeek: DeepSeek V3 Base (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1743272023,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"scb10x/llama3.1-typhoon2-8b-instruct","name":"Typhoon2 8B Instruct","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1743196511,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"scb10x/llama3.1-typhoon2-70b-instruct","name":"Typhoon2 70B Instruct","pricing":{"prompt":"0.00000088","completion":"0.00000088","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1743196170,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"allenai/molmo-7b-d:free","name":"AllenAI: Molmo 7B D (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1743023247,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"bytedance-research/ui-tars-72b:free","name":"Bytedance: UI-TARS 72B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1743020065,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-3b-instruct:free","name":"Qwen: Qwen2.5 VL 3B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1743014573,"top_provider":{"context_length":64000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-exp-03-25:free","name":"Google: Gemini Pro 2.5 Experimental (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1742922099,"top_provider":{"context_length":1000000,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct:free","name":"Qwen: Qwen2.5 VL 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1742839838,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324:free","name":"DeepSeek: DeepSeek V3 0324 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1742824755,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324","name":"DeepSeek: DeepSeek V3 0324","pricing":{"prompt":"0.00000027","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1742824755,"top_provider":{"context_length":64000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"featherless/qwerky-72b:free","name":"Qwerky 72b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1742481597,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/o1-pro","name":"OpenAI: o1-pro","pricing":{"prompt":"0.00015","completion":"0.0006","request":"0","image":"0.21675","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1742423211,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-3.1-24b-instruct:free","name":"Mistral: Mistral Small 3.1 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1742238937,"top_provider":{"context_length":96000,"max_completion_tokens":96000,"is_moderated":false}},{"id":"mistralai/mistral-small-3.1-24b-instruct","name":"Mistral: Mistral Small 3.1 24B","pricing":{"prompt":"0.0000001","completion":"0.0000003","request":"0","image":"0.0009264","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1742238937,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"open-r1/olympiccoder-7b:free","name":"OlympicCoder 7B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1742078265,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"open-r1/olympiccoder-32b:free","name":"OlympicCoder 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1742077228,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"steelskull/l3.3-electra-r1-70b","name":"SteelSkull: L3.3 Electra R1 70B","pricing":{"prompt":"0.0000007","completion":"0.0000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1742067611,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"allenai/olmo-2-0325-32b-instruct","name":"AllenAI: Olmo 2 32B Instruct","pricing":{"prompt":"0.000001","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741988556,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemma-3-1b-it:free","name":"Google: Gemma 3 1B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741963556,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-4b-it:free","name":"Google: Gemma 3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741905510,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-4b-it","name":"Google: Gemma 3 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741905510,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"ai21/jamba-1.6-large","name":"AI21: Jamba 1.6 Large","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741905173,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"ai21/jamba-1.6-mini","name":"AI21: Jamba Mini 1.6","pricing":{"prompt":"0.0000002","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741905171,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemma-3-12b-it:free","name":"Google: Gemma 3 12B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741902625,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-12b-it","name":"Google: Gemma 3 12B","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741902625,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-a","name":"Cohere: Command A","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741894342,"top_provider":{"context_length":256000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4o-mini-search-preview","name":"OpenAI: GPT-4o-mini Search Preview","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0.0275","image":"0.000217","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741818122,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-search-preview","name":"OpenAI: GPT-4o Search Preview","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0.035","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741817949,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"tokyotech-llm/llama-3.1-swallow-70b-instruct-v0.3","name":"Swallow: Llama 3.1 Swallow 70B Instruct V0.3","pricing":{"prompt":"0.0000006","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741813936,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"rekaai/reka-flash-3:free","name":"Reka: Flash 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741812813,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-27b-it:free","name":"Google: Gemma 3 27B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-27b-it","name":"Google: Gemma 3 27B","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0.0000256","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741756359,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"thedrummer/anubis-pro-105b-v1","name":"TheDrummer: Anubis Pro 105B V1","pricing":{"prompt":"0.0000008","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741642290,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"latitudegames/wayfarer-large-70b-llama-3.3","name":"LatitudeGames: Wayfarer Large 70B Llama 3.3","pricing":{"prompt":"0.0000007","completion":"0.0000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741636885,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"thedrummer/skyfall-36b-v2","name":"TheDrummer: Skyfall 36B V2","pricing":{"prompt":"0.0000005","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741636566,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"microsoft/phi-4-multimodal-instruct","name":"Microsoft: Phi 4 Multimodal Instruct","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0.00017685","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741396284,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-reasoning-pro","name":"Perplexity: Sonar Reasoning Pro","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741313308,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-pro","name":"Perplexity: Sonar Pro","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741312423,"top_provider":{"context_length":200000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"perplexity/sonar-deep-research","name":"Perplexity: Sonar Deep Research","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741311246,"top_provider":{"context_length":200000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-zero:free","name":"DeepSeek: DeepSeek R1 Zero (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741297434,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b:free","name":"Qwen: QwQ 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741208814,"top_provider":{"context_length":40000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"qwen/qwq-32b","name":"Qwen: QwQ 32B","pricing":{"prompt":"0.00000012","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741208814,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen2.5-32b-instruct","name":"Qwen: Qwen2.5 32B Instruct","pricing":{"prompt":"0.00000079","completion":"0.00000079","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1741042744,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"moonshotai/moonlight-16b-a3b-instruct:free","name":"Moonshot AI: Moonlight 16B A3B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1740719801,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/deephermes-3-llama-3-8b-preview:free","name":"Nous: DeepHermes 3 Llama 3 8B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1740719372,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4.5-preview","name":"OpenAI: GPT-4.5 (Preview)","pricing":{"prompt":"0.000075","completion":"0.00015","request":"0","image":"0.108375","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1740687810,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemini-2.0-flash-lite-001","name":"Google: Gemini 2.0 Flash Lite","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1740506212,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet:beta","name":"Anthropic: Claude 3.7 Sonnet (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet","name":"Anthropic: Claude 3.7 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"anthropic/claude-3.7-sonnet:thinking","name":"Anthropic: Claude 3.7 Sonnet (thinking)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"perplexity/r1-1776","name":"Perplexity: R1 1776","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1740004929,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-saba","name":"Mistral: Saba","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1739803239,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-r1-mistral-24b:free","name":"Dolphin3.0 R1 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1739462498,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-mistral-24b:free","name":"Dolphin3.0 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1739462019,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-3-8b","name":"Llama Guard 3 8B","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1739401318,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o3-mini-high","name":"OpenAI: o3 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1739372611,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"deepseek/deepseek-r1-distill-llama-8b","name":"DeepSeek: R1 Distill Llama 8B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738937718,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"google/gemini-2.0-flash-001","name":"Google: Gemini Flash 2.0","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0.0000258","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738769413,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemini-2.0-pro-exp-02-05:free","name":"Google: Gemini Pro 2.0 Experimental (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738768044,"top_provider":{"context_length":2000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-vl-plus","name":"Qwen: Qwen VL Plus","pricing":{"prompt":"0.00000021","completion":"0.00000063","request":"0","image":"0.0002688","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738731255,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"aion-labs/aion-1.0","name":"AionLabs: Aion-1.0","pricing":{"prompt":"0.000004","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738697557,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-1.0-mini","name":"AionLabs: Aion-1.0-Mini","pricing":{"prompt":"0.0000007","completion":"0.0000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738697107,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-rp-llama-3.1-8b","name":"AionLabs: Aion-RP 1.0 (8B)","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738696718,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-vl-max","name":"Qwen: Qwen VL Max","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.001024","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738434304,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"qwen/qwen-turbo","name":"Qwen: Qwen-Turbo","pricing":{"prompt":"0.00000005","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738410974,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct:free","name":"Qwen: Qwen2.5 VL 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738410311,"top_provider":{"context_length":131072,"max_completion_tokens":2048,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct","name":"Qwen: Qwen2.5 VL 72B Instruct","pricing":{"prompt":"0.0000007","completion":"0.0000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738410311,"top_provider":{"context_length":128000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"qwen/qwen-plus","name":"Qwen: Qwen-Plus","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738409840,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-max","name":"Qwen: Qwen-Max ","pricing":{"prompt":"0.0000016","completion":"0.0000064","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738402289,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/o3-mini","name":"OpenAI: o3 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738351721,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"deepseek/deepseek-r1-distill-qwen-1.5b","name":"DeepSeek: R1 Distill Qwen 1.5B","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738328067,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"mistralai/mistral-small-24b-instruct-2501:free","name":"Mistral: Mistral Small 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-24b-instruct-2501","name":"Mistral: Mistral Small 3","pricing":{"prompt":"0.00000007","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-32b:free","name":"DeepSeek: R1 Distill Qwen 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738194830,"top_provider":{"context_length":16000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-32b","name":"DeepSeek: R1 Distill Qwen 32B","pricing":{"prompt":"0.00000012","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738194830,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b:free","name":"DeepSeek: R1 Distill Qwen 14B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738193940,"top_provider":{"context_length":64000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b","name":"DeepSeek: R1 Distill Qwen 14B","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738193940,"top_provider":{"context_length":64000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"perplexity/sonar-reasoning","name":"Perplexity: Sonar Reasoning","pricing":{"prompt":"0.000001","completion":"0.000005","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738131107,"top_provider":{"context_length":127000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar","name":"Perplexity: Sonar","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1738013808,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-7b","name":"Liquid: LFM 7B","pricing":{"prompt":"0.00000001","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1737806883,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-3b","name":"Liquid: LFM 3B","pricing":{"prompt":"0.00000002","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1737806501,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b:free","name":"DeepSeek: R1 Distill Llama 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1737663169,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b","name":"DeepSeek: R1 Distill Llama 70B","pricing":{"prompt":"0.00000023","completion":"0.00000069","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1737663169,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemini-2.0-flash-thinking-exp:free","name":"Google: Gemini 2.0 Flash Thinking Experimental 01-21 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1737547899,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepseek/deepseek-r1:free","name":"DeepSeek: R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1","name":"DeepSeek: R1","pricing":{"prompt":"0.00000055","completion":"0.00000219","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"sophosympatheia/rogue-rose-103b-v0.2:free","name":"Rogue Rose 103B v0.2 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1737195189,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"minimax/minimax-01","name":"MiniMax: MiniMax-01","pricing":{"prompt":"0.0000002","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1736915462,"top_provider":{"context_length":1000192,"max_completion_tokens":1000192,"is_moderated":false}},{"id":"mistralai/codestral-2501","name":"Mistral: Codestral 2501","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1736895522,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4","name":"Microsoft: Phi 4","pricing":{"prompt":"0.00000007","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1736489872,"top_provider":{"context_length":16384,"max_completion_tokens":8192,"is_moderated":false}},{"id":"sao10k/l3.1-70b-hanami-x1","name":"Sao10K: Llama 3.1 70B Hanami x1","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1736302854,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat:free","name":"DeepSeek: DeepSeek V3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1735241320,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat","name":"DeepSeek: DeepSeek V3","pricing":{"prompt":"0.0000004","completion":"0.00000089","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1735241320,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"google/gemini-2.0-flash-thinking-exp-1219:free","name":"Google: Gemini 2.0 Flash Thinking Experimental (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1734650026,"top_provider":{"context_length":40000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"sao10k/l3.3-euryale-70b","name":"Sao10K: Llama 3.3 Euryale 70B","pricing":{"prompt":"0.0000007","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1734535928,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/o1","name":"OpenAI: o1","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0.021675","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1734459999,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"eva-unit-01/eva-llama-3.33-70b","name":"EVA Llama 3.33 70B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1734377303,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"x-ai/grok-2-vision-1212","name":"xAI: Grok 2 Vision 1212","pricing":{"prompt":"0.000002","completion":"0.00001","request":"0","image":"0.0036","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1734237338,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-2-1212","name":"xAI: Grok 2 1212","pricing":{"prompt":"0.000002","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1734232814,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r7b-12-2024","name":"Cohere: Command R7B (12-2024)","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1734158152,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":false}},{"id":"google/gemini-2.0-flash-exp:free","name":"Google: Gemini Flash 2.0 Experimental (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1733937523,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct:free","name":"Meta: Llama 3.3 70B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1733506137,"top_provider":{"context_length":8000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct","name":"Meta: Llama 3.3 70B Instruct","pricing":{"prompt":"0.00000012","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1733506137,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"amazon/nova-lite-v1","name":"Amazon: Nova Lite 1.0","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0.00009","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1733437363,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":false}},{"id":"amazon/nova-micro-v1","name":"Amazon: Nova Micro 1.0","pricing":{"prompt":"0.000000035","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1733437237,"top_provider":{"context_length":128000,"max_completion_tokens":5120,"is_moderated":false}},{"id":"amazon/nova-pro-v1","name":"Amazon: Nova Pro 1.0","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.0012","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1733436303,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":false}},{"id":"qwen/qwq-32b-preview:free","name":"Qwen: QwQ 32B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1732754541,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b-preview","name":"Qwen: QwQ 32B Preview","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1732754541,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/learnlm-1.5-pro-experimental:free","name":"Google: LearnLM 1.5 Pro Experimental (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1732216551,"top_provider":{"context_length":40960,"max_completion_tokens":8192,"is_moderated":false}},{"id":"eva-unit-01/eva-qwen-2.5-72b","name":"EVA Qwen2.5 72B","pricing":{"prompt":"0.0000007","completion":"0.0000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1732210606,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/gpt-4o-2024-11-20","name":"OpenAI: GPT-4o (2024-11-20)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1732127594,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-large-2411","name":"Mistral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1731978685,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-large-2407","name":"Mistral Large 2407","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1731978415,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/pixtral-large-2411","name":"Mistral: Pixtral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0.002888","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1731977388,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-vision-beta","name":"xAI: Grok Vision Beta","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.009","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1731976624,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"infermatic/mn-inferor-12b","name":"Infermatic: Mistral Nemo Inferor 12B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1731464428,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct:free","name":"Qwen2.5 Coder 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct","name":"Qwen2.5 Coder 32B Instruct","pricing":{"prompt":"0.00000007","completion":"0.00000016","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1731368400,"top_provider":{"context_length":33000,"max_completion_tokens":3000,"is_moderated":false}},{"id":"raifle/sorcererlm-8x22b","name":"SorcererLM 8x22B","pricing":{"prompt":"0.0000045","completion":"0.0000045","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1731105083,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"eva-unit-01/eva-qwen-2.5-32b","name":"EVA Qwen2.5 32B","pricing":{"prompt":"0.0000026","completion":"0.0000034","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1731104847,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"thedrummer/unslopnemo-12b","name":"Unslopnemo 12B","pricing":{"prompt":"0.0000005","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1731103448,"top_provider":{"context_length":32000,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku:beta","name":"Anthropic: Claude 3.5 Haiku (self-moderated)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku","name":"Anthropic: Claude 3.5 Haiku","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"anthropic/claude-3.5-haiku-20241022:beta","name":"Anthropic: Claude 3.5 Haiku (2024-10-22) (self-moderated)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku-20241022","name":"Anthropic: Claude 3.5 Haiku (2024-10-22)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"neversleep/llama-3.1-lumimaid-70b","name":"NeverSleep: Lumimaid v0.2 70B","pricing":{"prompt":"0.000003375","completion":"0.0000045","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthracite-org/magnum-v4-72b","name":"Magnum v4 72B","pricing":{"prompt":"0.000001875","completion":"0.00000225","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":1024,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet:beta","name":"Anthropic: Claude 3.5 Sonnet (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet","name":"Anthropic: Claude 3.5 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"x-ai/grok-beta","name":"xAI: Grok Beta","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1729382400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-8b","name":"Mistral: Ministral 8B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1729123200,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-3b","name":"Mistral: Ministral 3B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1729123200,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-7b-instruct","name":"Qwen2.5 7B Instruct","pricing":{"prompt":"0.000000025","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1729036800,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-70b-instruct:free","name":"NVIDIA: Llama 3.1 Nemotron 70B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1728950400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-70b-instruct","name":"NVIDIA: Llama 3.1 Nemotron 70B Instruct","pricing":{"prompt":"0.00000012","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1728950400,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"inflection/inflection-3-productivity","name":"Inflection: Inflection 3 Productivity","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"inflection/inflection-3-pi","name":"Inflection: Inflection 3 Pi","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"google/gemini-flash-1.5-8b","name":"Google: Gemini Flash 1.5 8B","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1727913600,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"thedrummer/rocinante-12b","name":"Rocinante 12B","pricing":{"prompt":"0.00000025","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthracite-org/magnum-v2-72b","name":"Magnum v2 72B","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-40b","name":"Liquid: LFM 40B MoE","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct:free","name":"Meta: Llama 3.2 3B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1727222400,"top_provider":{"context_length":20000,"max_completion_tokens":20000,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct","name":"Meta: Llama 3.2 3B Instruct","pricing":{"prompt":"0.000000015","completion":"0.000000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1727222400,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"meta-llama/llama-3.2-1b-instruct:free","name":"Meta: Llama 3.2 1B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"meta-llama/llama-3.2-1b-instruct","name":"Meta: Llama 3.2 1B Instruct","pricing":{"prompt":"0.00000001","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-90b-vision-instruct","name":"Meta: Llama 3.2 90B Vision Instruct","pricing":{"prompt":"0.0000008","completion":"0.0000016","request":"0","image":"0.0051456","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1727222400,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct:free","name":"Meta: Llama 3.2 11B Vision Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":2048,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct","name":"Meta: Llama 3.2 11B Vision Instruct","pricing":{"prompt":"0.000000055","completion":"0.000000055","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1727222400,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct:free","name":"Qwen2.5 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct","name":"Qwen2.5 72B Instruct","pricing":{"prompt":"0.00000013","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1726704000,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-vl-72b-instruct","name":"Qwen: Qwen2.5-VL 72B Instruct","pricing":{"prompt":"0.0000006","completion":"0.0000006","request":"0","image":"0.000578","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1726617600,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"neversleep/llama-3.1-lumimaid-8b","name":"NeverSleep: Lumimaid v0.2 8B","pricing":{"prompt":"0.0000001875","completion":"0.000001125","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1726358400,"top_provider":{"context_length":32768,"max_completion_tokens":2048,"is_moderated":false}},{"id":"openai/o1-preview","name":"OpenAI: o1-preview","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/o1-preview-2024-09-12","name":"OpenAI: o1-preview (2024-09-12)","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/o1-mini","name":"OpenAI: o1-mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"openai/o1-mini-2024-09-12","name":"OpenAI: o1-mini (2024-09-12)","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"mistralai/pixtral-12b","name":"Mistral: Pixtral 12B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1725926400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r-plus-08-2024","name":"Cohere: Command R+ (08-2024)","pricing":{"prompt":"0.000002375","completion":"0.0000095","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":false}},{"id":"cohere/command-r-08-2024","name":"Cohere: Command R (08-2024)","pricing":{"prompt":"0.0000001425","completion":"0.00000057","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":false}},{"id":"qwen/qwen-2.5-vl-7b-instruct:free","name":"Qwen: Qwen2.5-VL 7B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1724803200,"top_provider":{"context_length":64000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"qwen/qwen-2.5-vl-7b-instruct","name":"Qwen: Qwen2.5-VL 7B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3.1-euryale-70b","name":"Sao10K: Llama 3.1 Euryale 70B v2.2","pricing":{"prompt":"0.0000007","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1724803200,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemini-flash-1.5-8b-exp","name":"Google: Gemini Flash 1.5 8B Experimental","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1724803200,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"ai21/jamba-1-5-mini","name":"AI21: Jamba 1.5 Mini","pricing":{"prompt":"0.0000002","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1724371200,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"ai21/jamba-1-5-large","name":"AI21: Jamba 1.5 Large","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1724371200,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"microsoft/phi-3.5-mini-128k-instruct","name":"Microsoft: Phi-3.5 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1724198400,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-70b","name":"Nous: Hermes 3 70B Instruct","pricing":{"prompt":"0.00000012","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1723939200,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-405b","name":"Nous: Hermes 3 405B Instruct","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1723766400,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"openai/chatgpt-4o-latest","name":"OpenAI: ChatGPT-4o","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1723593600,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"sao10k/l3-lunaris-8b","name":"Sao10K: Llama 3 8B Lunaris","pricing":{"prompt":"0.00000003","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1723507200,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"aetherwiing/mn-starcannon-12b","name":"Aetherwiing: Starcannon 12B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1723507200,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4o-2024-08-06","name":"OpenAI: GPT-4o (2024-08-06)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1722902400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"meta-llama/llama-3.1-405b","name":"Meta: Llama 3.1 405B (base)","pricing":{"prompt":"0.000002","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1722556800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"nothingiisreal/mn-celeste-12b","name":"Mistral Nemo 12B Celeste","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1722556800,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"perplexity/llama-3.1-sonar-small-128k-online","name":"Perplexity: Llama 3.1 Sonar 8B Online","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1722470400,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/llama-3.1-sonar-large-128k-online","name":"Perplexity: Llama 3.1 Sonar 70B Online","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1722470400,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-8b-instruct:free","name":"Meta: Llama 3.1 8B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":4096,"is_moderated":false}},{"id":"meta-llama/llama-3.1-8b-instruct","name":"Meta: Llama 3.1 8B Instruct","pricing":{"prompt":"0.00000002","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct","name":"Meta: Llama 3.1 405B Instruct","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1721692800,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3.1-70b-instruct","name":"Meta: Llama 3.1 70B Instruct","pricing":{"prompt":"0.00000012","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"mistralai/codestral-mamba","name":"Mistral: Codestral Mamba","pricing":{"prompt":"0.00000025","completion":"0.00000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1721347200,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-nemo:free","name":"Mistral: Mistral Nemo (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1721347200,"top_provider":{"context_length":128000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"mistralai/mistral-nemo","name":"Mistral: Mistral Nemo","pricing":{"prompt":"0.000000035","completion":"0.00000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4o-mini","name":"OpenAI: GPT-4o-mini","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.000217","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-mini-2024-07-18","name":"OpenAI: GPT-4o-mini (2024-07-18)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"qwen/qwen-2-7b-instruct:free","name":"Qwen 2 7B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1721088000,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"qwen/qwen-2-7b-instruct","name":"Qwen 2 7B Instruct","pricing":{"prompt":"0.000000054","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1721088000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-2-27b-it","name":"Google: Gemma 2 27B","pricing":{"prompt":"0.00000027","completion":"0.00000027","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1720828800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"alpindale/magnum-72b","name":"Magnum 72B","pricing":{"prompt":"0.000001875","completion":"0.00000225","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1720656000,"top_provider":{"context_length":16384,"max_completion_tokens":1024,"is_moderated":false}},{"id":"google/gemma-2-9b-it:free","name":"Google: Gemma 2 9B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemma-2-9b-it","name":"Google: Gemma 2 9B","pricing":{"prompt":"0.00000003","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"01-ai/yi-large","name":"01.AI: Yi Large","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1719273600,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"ai21/jamba-instruct","name":"AI21: Jamba Instruct","pricing":{"prompt":"0.0000005","completion":"0.0000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1719273600,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet-20240620:beta","name":"Anthropic: Claude 3.5 Sonnet (2024-06-20) (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1718841600,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet-20240620","name":"Anthropic: Claude 3.5 Sonnet (2024-06-20)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1718841600,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"sao10k/l3-euryale-70b","name":"Sao10k: Llama 3 Euryale 70B v2.1","pricing":{"prompt":"0.0000007","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1718668800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mixtral-8x22b","name":"Dolphin 2.9.2 Mixtral 8x22B 🐬","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1717804800,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2-72b-instruct","name":"Qwen 2 72B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1717718400,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct:free","name":"Mistral: Mistral 7B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1716768000,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct","name":"Mistral: Mistral 7B Instruct","pricing":{"prompt":"0.00000003","completion":"0.000000055","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.3","name":"Mistral: Mistral 7B Instruct v0.3","pricing":{"prompt":"0.00000003","completion":"0.000000055","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"nousresearch/hermes-2-pro-llama-3-8b","name":"NousResearch: Hermes 2 Pro - Llama-3 8B","pricing":{"prompt":"0.000000025","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1716768000,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"microsoft/phi-3-mini-128k-instruct:free","name":"Microsoft: Phi-3 Mini 128K Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1716681600,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"microsoft/phi-3-mini-128k-instruct","name":"Microsoft: Phi-3 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1716681600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3-medium-128k-instruct:free","name":"Microsoft: Phi-3 Medium 128K Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1716508800,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"microsoft/phi-3-medium-128k-instruct","name":"Microsoft: Phi-3 Medium 128K Instruct","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1716508800,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"neversleep/llama-3-lumimaid-70b","name":"NeverSleep: Llama 3 Lumimaid 70B","pricing":{"prompt":"0.000003375","completion":"0.0000045","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1715817600,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemini-flash-1.5","name":"Google: Gemini Flash 1.5","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0.00004","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1715644800,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4o","name":"OpenAI: GPT-4o","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o:extended","name":"OpenAI: GPT-4o (extended)","pricing":{"prompt":"0.000006","completion":"0.000018","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"openai/gpt-4o-2024-05-13","name":"OpenAI: GPT-4o (2024-05-13)","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"meta-llama/llama-guard-2-8b","name":"Meta: LlamaGuard 2 8B","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1715558400,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"neversleep/llama-3-lumimaid-8b:extended","name":"NeverSleep: Llama 3 Lumimaid 8B (extended)","pricing":{"prompt":"0.0000001875","completion":"0.000001125","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1714780800,"top_provider":{"context_length":24576,"max_completion_tokens":2048,"is_moderated":false}},{"id":"neversleep/llama-3-lumimaid-8b","name":"NeverSleep: Llama 3 Lumimaid 8B","pricing":{"prompt":"0.0000001875","completion":"0.000001125","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1714780800,"top_provider":{"context_length":24576,"max_completion_tokens":2048,"is_moderated":false}},{"id":"sao10k/fimbulvetr-11b-v2","name":"Fimbulvetr 11B v2","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1713657600,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"meta-llama/llama-3-8b-instruct:free","name":"Meta: Llama 3 8B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"meta-llama/llama-3-8b-instruct","name":"Meta: Llama 3 8B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3-70b-instruct","name":"Meta: Llama 3 70B Instruct","pricing":{"prompt":"0.00000023","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"mistralai/mixtral-8x22b-instruct","name":"Mistral: Mixtral 8x22B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1713312000,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/wizardlm-2-8x22b","name":"WizardLM-2 8x22B","pricing":{"prompt":"0.0000005","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1713225600,"top_provider":{"context_length":65536,"max_completion_tokens":8192,"is_moderated":false}},{"id":"microsoft/wizardlm-2-7b","name":"WizardLM-2 7B","pricing":{"prompt":"0.00000007","completion":"0.00000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1713225600,"top_provider":{"context_length":32000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-pro-1.5","name":"Google: Gemini Pro 1.5","pricing":{"prompt":"0.00000125","completion":"0.000005","request":"0","image":"0.0006575","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1712620800,"top_provider":{"context_length":2000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4-turbo","name":"OpenAI: GPT-4 Turbo","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0.01445","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1712620800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"cohere/command-r-plus","name":"Cohere: Command R+","pricing":{"prompt":"0.00000285","completion":"0.00001425","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1712188800,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":false}},{"id":"cohere/command-r-plus-04-2024","name":"Cohere: Command R+ (04-2024)","pricing":{"prompt":"0.00000285","completion":"0.00001425","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1712016000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":false}},{"id":"sophosympatheia/midnight-rose-70b","name":"Midnight Rose 70B","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1711065600,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command","name":"Cohere: Command","pricing":{"prompt":"0.00000095","completion":"0.0000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1710374400,"top_provider":{"context_length":4096,"max_completion_tokens":4000,"is_moderated":false}},{"id":"cohere/command-r","name":"Cohere: Command R","pricing":{"prompt":"0.000000475","completion":"0.000001425","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1710374400,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":false}},{"id":"anthropic/claude-3-haiku:beta","name":"Anthropic: Claude 3 Haiku (self-moderated)","pricing":{"prompt":"0.00000025","completion":"0.00000125","request":"0","image":"0.0004","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3-haiku","name":"Anthropic: Claude 3 Haiku","pricing":{"prompt":"0.00000025","completion":"0.00000125","request":"0","image":"0.0004","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-opus:beta","name":"Anthropic: Claude 3 Opus (self-moderated)","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3-opus","name":"Anthropic: Claude 3 Opus","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-sonnet:beta","name":"Anthropic: Claude 3 Sonnet (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3-sonnet","name":"Anthropic: Claude 3 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"cohere/command-r-03-2024","name":"Cohere: Command R (03-2024)","pricing":{"prompt":"0.000000475","completion":"0.000001425","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1709341200,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":false}},{"id":"mistralai/mistral-large","name":"Mistral Large","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1708905600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-7b-it","name":"Google: Gemma 7B","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1708560000,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-0613","name":"OpenAI: GPT-3.5 Turbo (older v0613)","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1706140800,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4-turbo-preview","name":"OpenAI: GPT-4 Turbo Preview","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1706140800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"nousresearch/nous-hermes-2-mixtral-8x7b-dpo","name":"Nous: Hermes 2 Mixtral 8x7B DPO","pricing":{"prompt":"0.0000006","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1705363200,"top_provider":{"context_length":32768,"max_completion_tokens":2048,"is_moderated":false}},{"id":"mistralai/mistral-medium","name":"Mistral Medium","pricing":{"prompt":"0.00000275","completion":"0.0000081","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small","name":"Mistral Small","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-tiny","name":"Mistral Tiny","pricing":{"prompt":"0.00000025","completion":"0.00000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.2","name":"Mistral: Mistral 7B Instruct v0.2","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1703721600,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mixtral-8x7b","name":"Dolphin 2.6 Mixtral 8x7B 🐬","pricing":{"prompt":"0.0000005","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1703116800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-pro-vision","name":"Google: Gemini Pro Vision 1.0","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0.0025","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1702425600,"top_provider":{"context_length":16384,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemini-pro","name":"Google: Gemini Pro 1.0","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0.0025","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1702425600,"top_provider":{"context_length":32760,"max_completion_tokens":8192,"is_moderated":false}},{"id":"mistralai/mixtral-8x7b","name":"Mistral: Mixtral 8x7B (base)","pricing":{"prompt":"0.0000006","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1702166400,"top_provider":{"context_length":32768,"max_completion_tokens":2048,"is_moderated":false}},{"id":"mistralai/mixtral-8x7b-instruct","name":"Mistral: Mixtral 8x7B Instruct","pricing":{"prompt":"0.00000024","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1702166400,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openchat/openchat-7b:free","name":"OpenChat 3.5 7B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1701129600,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openchat/openchat-7b","name":"OpenChat 3.5 7B","pricing":{"prompt":"0.000000055","completion":"0.000000055","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1701129600,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"neversleep/noromaid-20b","name":"Noromaid 20B","pricing":{"prompt":"0.0000015","completion":"0.00000225","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1700956800,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthropic/claude-2.1:beta","name":"Anthropic: Claude v2.1 (self-moderated)","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-2.1","name":"Anthropic: Claude v2.1","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-2:beta","name":"Anthropic: Claude v2 (self-moderated)","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-2","name":"Anthropic: Claude v2","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"teknium/openhermes-2.5-mistral-7b","name":"OpenHermes 2.5 Mistral 7B","pricing":{"prompt":"0.00000017","completion":"0.00000017","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1700438400,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"undi95/toppy-m-7b:free","name":"Toppy M 7B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1699574400,"top_provider":{"context_length":4096,"max_completion_tokens":2048,"is_moderated":false}},{"id":"undi95/toppy-m-7b","name":"Toppy M 7B","pricing":{"prompt":"0.00000007","completion":"0.00000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1699574400,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"alpindale/goliath-120b","name":"Goliath 120B","pricing":{"prompt":"0.000009375","completion":"0.000009375","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1699574400,"top_provider":{"context_length":6144,"max_completion_tokens":512,"is_moderated":false}},{"id":"openrouter/auto","name":"Auto Router","pricing":{"prompt":"-1","completion":"-1","request":"-1","image":"-1","web_search":"-1","internal_reasoning":"-1","input_cache_read":"-1","input_cache_write":"-1"},"created":1699401600,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-1106","name":"OpenAI: GPT-3.5 Turbo 16k (older v1106)","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1699228800,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4-1106-preview","name":"OpenAI: GPT-4 Turbo (older v1106)","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1699228800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"google/palm-2-chat-bison-32k","name":"Google: PaLM 2 Chat 32k","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1698969600,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/palm-2-codechat-bison-32k","name":"Google: PaLM 2 Code Chat 32k","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1698969600,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"jondurbin/airoboros-l2-70b","name":"Airoboros 70B","pricing":{"prompt":"0.0000005","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1698537600,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"xwin-lm/xwin-lm-70b","name":"Xwin 70B","pricing":{"prompt":"0.00000375","completion":"0.00000375","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1697328000,"top_provider":{"context_length":8192,"max_completion_tokens":512,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-instruct","name":"OpenAI: GPT-3.5 Turbo Instruct","pricing":{"prompt":"0.0000015","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1695859200,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-7b-instruct-v0.1","name":"Mistral: Mistral 7B Instruct v0.1","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1695859200,"top_provider":{"context_length":32768,"max_completion_tokens":2048,"is_moderated":false}},{"id":"pygmalionai/mythalion-13b","name":"Pygmalion: Mythalion 13B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1693612800,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-16k","name":"OpenAI: GPT-3.5 Turbo 16k","pricing":{"prompt":"0.000003","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1693180800,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4-32k","name":"OpenAI: GPT-4 32k","pricing":{"prompt":"0.00006","completion":"0.00012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1693180800,"top_provider":{"context_length":32767,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4-32k-0314","name":"OpenAI: GPT-4 32k (older v0314)","pricing":{"prompt":"0.00006","completion":"0.00012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1693180800,"top_provider":{"context_length":32767,"max_completion_tokens":4096,"is_moderated":true}},{"id":"nousresearch/nous-hermes-llama2-13b","name":"Nous: Hermes 13B","pricing":{"prompt":"0.00000017","completion":"0.00000017","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1692489600,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"mancer/weaver","name":"Mancer: Weaver (alpha)","pricing":{"prompt":"0.0000015","completion":"0.00000225","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1690934400,"top_provider":{"context_length":8000,"max_completion_tokens":1000,"is_moderated":false}},{"id":"huggingfaceh4/zephyr-7b-beta:free","name":"Hugging Face: Zephyr 7B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1690934400,"top_provider":{"context_length":4096,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthropic/claude-2.0:beta","name":"Anthropic: Claude v2.0 (self-moderated)","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1690502400,"top_provider":{"context_length":100000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-2.0","name":"Anthropic: Claude v2.0","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1690502400,"top_provider":{"context_length":100000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"undi95/remm-slerp-l2-13b","name":"ReMM SLERP 13B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1689984000,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/palm-2-chat-bison","name":"Google: PaLM 2 Chat","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1689811200,"top_provider":{"context_length":9216,"max_completion_tokens":1024,"is_moderated":false}},{"id":"google/palm-2-codechat-bison","name":"Google: PaLM 2 Code Chat","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1689811200,"top_provider":{"context_length":7168,"max_completion_tokens":1024,"is_moderated":false}},{"id":"gryphe/mythomax-l2-13b:free","name":"MythoMax 13B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1688256000,"top_provider":{"context_length":4096,"max_completion_tokens":2048,"is_moderated":false}},{"id":"gryphe/mythomax-l2-13b","name":"MythoMax 13B","pricing":{"prompt":"0.000000065","completion":"0.000000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1688256000,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"meta-llama/llama-2-13b-chat","name":"Meta: Llama 2 13B Chat","pricing":{"prompt":"0.00000022","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1687219200,"top_provider":{"context_length":4096,"max_completion_tokens":2048,"is_moderated":false}},{"id":"meta-llama/llama-2-70b-chat","name":"Meta: Llama 2 70B Chat","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1687219200,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo","name":"OpenAI: GPT-3.5 Turbo","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1685232000,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo-0125","name":"OpenAI: GPT-3.5 Turbo 16k","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1685232000,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4","name":"OpenAI: GPT-4","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4-0314","name":"OpenAI: GPT-4 (older v0314)","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}}] \ No newline at end of file diff --git a/packages/kbot/dist-in/zod_schema.js b/packages/kbot/dist-in/zod_schema.js index 826e60a7..e0a1806c 100644 --- a/packages/kbot/dist-in/zod_schema.js +++ b/packages/kbot/dist-in/zod_schema.js @@ -5,6 +5,7 @@ import env from 'env-var'; import { generate_interfaces, ZodMetaMap, resolve, write } from '@polymech/commons'; import { sync as writeFS } from '@polymech/fs/write'; import { isArray, isFunction, isString } from '@polymech/core/primitives'; +import { zodResponseFormat } from "openai/helpers/zod"; import { API_PREFIX, LOGGING_DIRECTORY, PREFERENCES_FILE_NAME } from './constants.js'; export const get_var = (key = '') => env.get(key).asString() || env.get(key.replace(/-/g, '_')).asString() || env.get(key.replace(/_/g, '-')).asString(); export const HOME = (sub = '') => path.join(process.env[(process.platform == 'win32') ? 'USERPROFILE' : 'HOME'] || '', sub); @@ -153,7 +154,12 @@ export const OptionsSchema = (opts) => { ]) .optional() .default(false) - .describe('Dry run - only write out parameters without making API calls')); + .describe('Dry run - only write out parameters without making API calls')) + .add('format', z.any() + .optional() + .default(null) + .describe('Zod schema for structured outputs') + .transform((val) => val ? zodResponseFormat(val, "format") : null)); return schemaMap.root() .passthrough() .describe('IKBotOptions'); @@ -167,4 +173,4 @@ export const schemas = () => { write([OptionsSchema()], 'schema.json', 'kbot', {}); writeFS('schema_ui.json', schemaMap.getUISchema()); }; -//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"zod_schema.js","sourceRoot":"","sources":["../src/zod_schema.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAA;AACvB,OAAO,KAAK,IAAI,MAAM,WAAW,CAAA;AACjC,OAAO,KAAK,MAAM,OAAO,CAAA;AACzB,OAAO,GAAG,MAAM,SAAS,CAAA;AACzB,OAAO,EAAE,mBAAmB,EAAE,UAAU,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,mBAAmB,CAAA;AAEnF,OAAO,EAAE,IAAI,IAAI,OAAO,EAAE,MAAM,oBAAoB,CAAA;AACpD,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAA;AAEzE,OAAO,EAAE,UAAU,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAA;AAErF,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,MAAc,EAAE,EAAE,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;AAChK,MAAM,CAAC,MAAM,IAAI,GAAG,CAAC,GAAG,GAAG,EAAE,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,EAAE,GAAG,CAAC,CAAA;AAC3H,MAAM,CAAC,MAAM,mBAAmB,GAAG,CAAC,MAAc,kBAAkB,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,UAAU,EAAE,CAAC,EAAE,qBAAqB,CAAC,CAAA;AAEjJ,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AAC/C,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAA;AAE5C,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAQ,CAAC,CAAA;AAE5D,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,YAAY,EAAE,QAAQ,EAAE,UAAU,EAAE,aAAa,EAAE,QAAQ,EAAE,WAAW,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAA;AAGnI,MAAM,CAAC,MAAM,KAAK,GAAG;IACnB,UAAU,EAAE,YAAY;IACxB,KAAK,EAAE,OAAO;IACd,SAAS,EAAE,WAAW;IACtB,MAAM,EAAE,QAAQ;CACR,CAAA;AAGV,MAAM,CAAC,MAAM,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC;IAC1B,KAAK,CAAC,UAAU;IAChB,KAAK,CAAC,KAAK;IACX,KAAK,CAAC,SAAS;IACf,KAAK,CAAC,MAAM;CACb,CAAC,CAAA;AACF,oFAAoF;AACpF;;;;;;EAME;AACF,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC;IAC9B,MAAM;IACN,iCAAiC;IACjC,6BAA6B;IAC7B,gCAAgC;IAChC,0CAA0C;IAC1C,6BAA6B;CAC9B,CAAC,CAAC,QAAQ,CAAC,8BAA8B,CAAC,CAAA;AAK3C,OAAO,EAAE,qBAAqB,EAAE,mBAAmB,IAAI,6BAA6B,EAAE,MAAM,wBAAwB,CAAA;AACpH,OAAO,EAAE,iBAAiB,EAAE,mBAAmB,IAAI,yBAAyB,EAAE,MAAM,oBAAoB,CAAA;AAExG,IAAI,SAAS,CAAA;AAEb,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,IAAU,EAAO,EAAE;IAE/C,SAAS,GAAG,UAAU,CAAC,MAAM,EAAqB,CAAA;IAClD,SAAS,CAAC,GAAG,CACX,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,GAAG,CAAC,CAAC,CAAC;SACN,OAAO,CAAC,GAAG,CAAC;SACZ,QAAQ,CAAC,kBAAkB,CAAC,EAC7B,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC;SACzB,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,CAAC,4DAA4D,CAAC;SACtE,QAAQ,EAAE;SACV,OAAO,CAAC,aAAa,CAAC,CAC1B;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,0DAA0D,CAAC,CACxE;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,yIAAyI,CAAC,CACvJ;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,6OAA6O,CAAC,CAC3P;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC,2CAA2C,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAC1F;SACA,GAAG,CACF,cAAc,EACd,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC,mCAAmC,CAAC,CACjD;SACA,GAAG,CACF,OAAO,EACP,CAAC,CAAC,KAAK,CACL;QACE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;QACnB,CAAC,CAAC,MAAM,EAAE;KACX,CAAC,CAAC,QAAQ,EAAE;SACZ,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC;SAC9B,QAAQ,CAAC,4FAA4F,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;SACvI,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CACjE;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,QAAQ,EAAE;SACV,QAAQ,CAAC,gGAAgG,CAAC,CAC9G;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,yBAAyB,CAAC,CACvC;SACA,GAAG,CACF,OAAO,EACP,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,sDAAsD,WAAW,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAC9F;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,YAAY,CAAC;SACrB,QAAQ,CAAC,+CAA+C,CAAC,CAC7D;SACA,GAAG,CACF,MAAM,EACN,KAAK;SACF,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC;SACpB,QAAQ,CAAC;YACN,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC;YAC9B,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC;YACzB,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC;YAC7B,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC;OAC/B,CAAC,CACH;SACA,GAAG,CACF,UAAU,EACV,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,CAAC,CAAC;SACV,QAAQ,CAAC,mCAAmC,CAAC,CACjD;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,gEAAgE,CAAC,CAC9E;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,oDAAoD,CAAC,CAClE;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,6EAA6E,CAAC,CAC3F;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,iBAAiB,CAAC,CAC/B;SACA,GAAG,CACF,aAAa,EACb,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,mBAAmB,EAAE,CAAC;SAC9B,QAAQ,CAAC,0GAA0G,CAAC,CACxH;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,iBAAiB,CAAC;SAC1B,QAAQ,CAAC,mBAAmB,CAAC,CACjC;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,SAAS,CAAC;SAClB,QAAQ,CAAC,0BAA0B,CAAC,CACxC;QACD,wCAAwC;QACxC,oFAAoF;SACnF,GAAG,CACF,WAAW,EACX,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC;SAC7B,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC,CACf;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC;QACN,CAAC,CAAC,MAAM,EAAE;QACV,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;QAClB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;QACnB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC;KACtB,CAAC;SACC,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC;;;uBAGK,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC;SACvC,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE;QACjB,IAAG,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,MAAM,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAC,CAAC;YACnD,OAAO,GAAG,CAAA;QACZ,CAAC;QACD,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAA;QAClD,OAAO,GAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;QAC9D,OAAO,OAAO,CAAA;IAChB,CAAC,CAAC,CACL;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,KAAK,CAAC;QACN,CAAC,CAAC,OAAO,EAAE;QACX,CAAC,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,WAAW,EAAE,KAAK,MAAM,CAAC;KAC5D,CAAC;SACC,QAAQ,EAAE;SACV,OAAO,CAAC,KAAK,CAAC;SACd,QAAQ,CAAC,8DAA8D,CAAC,CAC5E,CAAC;IACJ,OAAO,SAAS,CAAC,IAAI,EAAE;SACpB,WAAW,EAAE;SACb,QAAQ,CAAC,cAAc,CAAC,CAAA;AAC7B,CAAC,CAAA;AAGD,MAAM,CAAC,MAAM,KAAK,GAAG,GAAG,EAAE;IACxB,mBAAmB,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,kBAAkB,CAAC,CAAA;IAC1D,mBAAmB,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,+BAA+B,CAAC,CAAC,CAAC,CAAA;IAC9F,OAAO,EAAE,CAAA;AACX,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,OAAO,GAAG,GAAG,EAAE;IAC1B,KAAK,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,aAAa,EAAE,MAAM,EAAE,EAAE,CAAC,CAAA;IACnD,OAAO,CAAC,gBAAgB,EAAE,SAAS,CAAC,WAAW,EAAE,CAAC,CAAA;AACpD,CAAC,CAAA"} \ No newline at end of file +//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"zod_schema.js","sourceRoot":"","sources":["../src/zod_schema.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAA;AACvB,OAAO,KAAK,IAAI,MAAM,WAAW,CAAA;AACjC,OAAO,KAAK,MAAM,OAAO,CAAA;AACzB,OAAO,GAAG,MAAM,SAAS,CAAA;AACzB,OAAO,EAAE,mBAAmB,EAAE,UAAU,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,mBAAmB,CAAA;AAEnF,OAAO,EAAE,IAAI,IAAI,OAAO,EAAE,MAAM,oBAAoB,CAAA;AACpD,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAA;AACzE,OAAO,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAA;AAEtD,OAAO,EAAE,UAAU,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAA;AAErF,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,MAAc,EAAE,EAAE,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;AAChK,MAAM,CAAC,MAAM,IAAI,GAAG,CAAC,GAAG,GAAG,EAAE,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,EAAE,GAAG,CAAC,CAAA;AAC3H,MAAM,CAAC,MAAM,mBAAmB,GAAG,CAAC,MAAc,kBAAkB,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,UAAU,EAAE,CAAC,EAAE,qBAAqB,CAAC,CAAA;AAEjJ,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AAC/C,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAA;AAE5C,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAQ,CAAC,CAAA;AAE5D,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,YAAY,EAAE,QAAQ,EAAE,UAAU,EAAE,aAAa,EAAE,QAAQ,EAAE,WAAW,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAA;AAGnI,MAAM,CAAC,MAAM,KAAK,GAAG;IACnB,UAAU,EAAE,YAAY;IACxB,KAAK,EAAE,OAAO;IACd,SAAS,EAAE,WAAW;IACtB,MAAM,EAAE,QAAQ;CACR,CAAA;AAGV,MAAM,CAAC,MAAM,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC;IAC1B,KAAK,CAAC,UAAU;IAChB,KAAK,CAAC,KAAK;IACX,KAAK,CAAC,SAAS;IACf,KAAK,CAAC,MAAM;CACb,CAAC,CAAA;AACF,oFAAoF;AACpF;;;;;;EAME;AACF,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC;IAC9B,MAAM;IACN,iCAAiC;IACjC,6BAA6B;IAC7B,gCAAgC;IAChC,0CAA0C;IAC1C,6BAA6B;CAC9B,CAAC,CAAC,QAAQ,CAAC,8BAA8B,CAAC,CAAA;AAK3C,OAAO,EAAE,qBAAqB,EAAE,mBAAmB,IAAI,6BAA6B,EAAE,MAAM,wBAAwB,CAAA;AACpH,OAAO,EAAE,iBAAiB,EAAE,mBAAmB,IAAI,yBAAyB,EAAE,MAAM,oBAAoB,CAAA;AAExG,IAAI,SAAS,CAAA;AAEb,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,IAAU,EAAO,EAAE;IAE/C,SAAS,GAAG,UAAU,CAAC,MAAM,EAAqB,CAAA;IAClD,SAAS,CAAC,GAAG,CACX,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,GAAG,CAAC,CAAC,CAAC;SACN,OAAO,CAAC,GAAG,CAAC;SACZ,QAAQ,CAAC,kBAAkB,CAAC,EAC7B,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC;SACzB,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,CAAC,4DAA4D,CAAC;SACtE,QAAQ,EAAE;SACV,OAAO,CAAC,aAAa,CAAC,CAC1B;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,0DAA0D,CAAC,CACxE;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,yIAAyI,CAAC,CACvJ;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,6OAA6O,CAAC,CAC3P;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC,2CAA2C,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAC1F;SACA,GAAG,CACF,cAAc,EACd,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC,mCAAmC,CAAC,CACjD;SACA,GAAG,CACF,OAAO,EACP,CAAC,CAAC,KAAK,CACL;QACE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;QACnB,CAAC,CAAC,MAAM,EAAE;KACX,CAAC,CAAC,QAAQ,EAAE;SACZ,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC;SAC9B,QAAQ,CAAC,4FAA4F,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;SACvI,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CACjE;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,QAAQ,EAAE;SACV,QAAQ,CAAC,gGAAgG,CAAC,CAC9G;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,yBAAyB,CAAC,CACvC;SACA,GAAG,CACF,OAAO,EACP,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,sDAAsD,WAAW,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAC9F;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,YAAY,CAAC;SACrB,QAAQ,CAAC,+CAA+C,CAAC,CAC7D;SACA,GAAG,CACF,MAAM,EACN,KAAK;SACF,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC;SACpB,QAAQ,CAAC;YACN,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC;YAC9B,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC;YACzB,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC;YAC7B,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC;OAC/B,CAAC,CACH;SACA,GAAG,CACF,UAAU,EACV,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,CAAC,CAAC;SACV,QAAQ,CAAC,mCAAmC,CAAC,CACjD;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,gEAAgE,CAAC,CAC9E;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,oDAAoD,CAAC,CAClE;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,6EAA6E,CAAC,CAC3F;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,iBAAiB,CAAC,CAC/B;SACA,GAAG,CACF,aAAa,EACb,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,mBAAmB,EAAE,CAAC;SAC9B,QAAQ,CAAC,0GAA0G,CAAC,CACxH;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,iBAAiB,CAAC;SAC1B,QAAQ,CAAC,mBAAmB,CAAC,CACjC;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,SAAS,CAAC;SAClB,QAAQ,CAAC,0BAA0B,CAAC,CACxC;QACD,wCAAwC;QACxC,oFAAoF;SACnF,GAAG,CACF,WAAW,EACX,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC;SAC7B,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC,CACf;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC;QACN,CAAC,CAAC,MAAM,EAAE;QACV,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;QAClB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;QACnB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC;KACtB,CAAC;SACC,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC;;;uBAGK,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC;SACvC,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE;QACjB,IAAG,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,MAAM,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAC,CAAC;YACnD,OAAO,GAAG,CAAA;QACZ,CAAC;QACD,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAA;QAClD,OAAO,GAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;QAC9D,OAAO,OAAO,CAAA;IAChB,CAAC,CAAC,CACL;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,KAAK,CAAC;QACN,CAAC,CAAC,OAAO,EAAE;QACX,CAAC,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,WAAW,EAAE,KAAK,MAAM,CAAC;KAC5D,CAAC;SACC,QAAQ,EAAE;SACV,OAAO,CAAC,KAAK,CAAC;SACd,QAAQ,CAAC,8DAA8D,CAAC,CAC5E;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,GAAG,EAAE;SACJ,QAAQ,EAAE;SACV,OAAO,CAAC,IAAI,CAAC;SACb,QAAQ,CAAC,mCAAmC,CAAC;SAC7C,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,iBAAiB,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CACrE,CAAC;IACJ,OAAO,SAAS,CAAC,IAAI,EAAE;SACpB,WAAW,EAAE;SACb,QAAQ,CAAC,cAAc,CAAC,CAAA;AAC7B,CAAC,CAAA;AAGD,MAAM,CAAC,MAAM,KAAK,GAAG,GAAG,EAAE;IACxB,mBAAmB,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,kBAAkB,CAAC,CAAA;IAC1D,mBAAmB,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,+BAA+B,CAAC,CAAC,CAAC,CAAA;IAC9F,OAAO,EAAE,CAAA;AACX,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,OAAO,GAAG,GAAG,EAAE;IAC1B,KAAK,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,aAAa,EAAE,MAAM,EAAE,EAAE,CAAC,CAAA;IACnD,OAAO,CAAC,gBAAgB,EAAE,SAAS,CAAC,WAAW,EAAE,CAAC,CAAA;AACpD,CAAC,CAAA"} \ No newline at end of file diff --git a/packages/kbot/dist-in/zod_types.d.ts b/packages/kbot/dist-in/zod_types.d.ts index c8e55967..6fc0d2a4 100644 --- a/packages/kbot/dist-in/zod_types.d.ts +++ b/packages/kbot/dist-in/zod_types.d.ts @@ -34,6 +34,7 @@ export interface IKBotOptions { aion-labs/aion-1.0-mini | paid aion-labs/aion-rp-llama-3.1-8b | paid jondurbin/airoboros-l2-70b | paid + allenai/molmo-7b-d:free | free allenai/olmo-2-0325-32b-instruct | paid amazon/nova-lite-v1 | paid amazon/nova-micro-v1 | paid @@ -62,6 +63,7 @@ export interface IKBotOptions { anthropic/claude-2.1 | paid anthropic/claude-2.1:beta | paid openrouter/auto | paid + bytedance-research/ui-tars-72b:free | free cohere/command | paid cohere/command-a | paid cohere/command-r | paid @@ -74,6 +76,9 @@ export interface IKBotOptions { deepseek/deepseek-r1-zero:free | free deepseek/deepseek-chat | paid deepseek/deepseek-chat:free | free + deepseek/deepseek-chat-v3-0324 | paid + deepseek/deepseek-chat-v3-0324:free | free + deepseek/deepseek-v3-base:free | free deepseek/deepseek-r1 | paid deepseek/deepseek-r1:free | free deepseek/deepseek-r1-distill-llama-70b | paid @@ -96,24 +101,25 @@ export interface IKBotOptions { google/gemini-2.0-flash-lite-001 | paid google/gemini-2.0-flash-thinking-exp-1219:free | free google/gemini-2.0-flash-thinking-exp:free | free - google/gemini-exp-1206:free | free google/gemini-flash-1.5 | paid google/gemini-flash-1.5-8b | paid google/gemini-flash-1.5-8b-exp | paid google/gemini-2.0-flash-001 | paid google/gemini-2.0-flash-exp:free | free - google/gemini-2.0-flash-lite-preview-02-05:free | free google/gemini-pro | paid google/gemini-pro-1.5 | paid google/gemini-2.0-pro-exp-02-05:free | free + google/gemini-2.5-pro-exp-03-25:free | free google/gemini-pro-vision | paid google/gemma-2-27b-it | paid google/gemma-2-9b-it | paid google/gemma-2-9b-it:free | free + google/gemma-3-12b-it | paid google/gemma-3-12b-it:free | free google/gemma-3-1b-it:free | free google/gemma-3-27b-it | paid google/gemma-3-27b-it:free | free + google/gemma-3-4b-it | paid google/gemma-3-4b-it:free | free google/gemma-7b-it | paid google/learnlm-1.5-pro-experimental:free | free @@ -129,7 +135,6 @@ export interface IKBotOptions { liquid/lfm-3b | paid liquid/lfm-40b | paid liquid/lfm-7b | paid - allenai/llama-3.1-tulu-3-405b | paid meta-llama/llama-guard-3-8b | paid alpindale/magnum-72b | paid anthracite-org/magnum-v2-72b | paid @@ -174,6 +179,7 @@ export interface IKBotOptions { mistralai/codestral-2501 | paid mistralai/codestral-mamba | paid mistralai/ministral-3b | paid + mistral/ministral-8b | paid mistralai/ministral-8b | paid mistralai/mistral-7b-instruct | paid mistralai/mistral-7b-instruct:free | free @@ -185,6 +191,7 @@ export interface IKBotOptions { mistralai/mistral-small-24b-instruct-2501 | paid mistralai/mistral-small-24b-instruct-2501:free | free mistralai/mistral-small-3.1-24b-instruct | paid + mistralai/mistral-small-3.1-24b-instruct:free | free mistralai/mixtral-8x22b-instruct | paid mistralai/mixtral-8x7b | paid mistralai/mixtral-8x7b-instruct | paid @@ -239,14 +246,13 @@ export interface IKBotOptions { openai/o1-mini-2024-09-12 | paid openai/o1-preview | paid openai/o1-preview-2024-09-12 | paid + openai/o1-pro | paid openai/o3-mini | paid openai/o3-mini-high | paid openchat/openchat-7b | paid openchat/openchat-7b:free | free teknium/openhermes-2.5-mistral-7b | paid - perplexity/llama-3.1-sonar-large-128k-chat | paid perplexity/llama-3.1-sonar-large-128k-online | paid - perplexity/llama-3.1-sonar-small-128k-chat | paid perplexity/llama-3.1-sonar-small-128k-online | paid perplexity/r1-1776 | paid perplexity/sonar | paid @@ -264,10 +270,13 @@ export interface IKBotOptions { qwen/qwen-plus | paid qwen/qwen-turbo | paid qwen/qwen2.5-32b-instruct | paid + qwen/qwen2.5-vl-32b-instruct:free | free + qwen/qwen2.5-vl-3b-instruct:free | free qwen/qwen2.5-vl-72b-instruct | paid qwen/qwen2.5-vl-72b-instruct:free | free qwen/qwen-2.5-vl-72b-instruct | paid qwen/qwen-2.5-vl-7b-instruct | paid + qwen/qwen-2.5-vl-7b-instruct:free | free qwen/qwq-32b | paid qwen/qwq-32b:free | free qwen/qwq-32b-preview | paid @@ -277,6 +286,7 @@ export interface IKBotOptions { qwen/qwen-2.5-7b-instruct | paid qwen/qwen-2.5-coder-32b-instruct | paid qwen/qwen-2.5-coder-32b-instruct:free | free + featherless/qwerky-72b:free | free rekaai/reka-flash-3:free | free undi95/remm-slerp-l2-13b | paid thedrummer/rocinante-12b | paid @@ -293,6 +303,8 @@ export interface IKBotOptions { thedrummer/skyfall-36b-v2 | paid undi95/toppy-m-7b | paid undi95/toppy-m-7b:free | free + scb10x/llama3.1-typhoon2-70b-instruct | paid + scb10x/llama3.1-typhoon2-8b-instruct | paid thedrummer/unslopnemo-12b | paid microsoft/wizardlm-2-7b | paid microsoft/wizardlm-2-8x22b | paid @@ -339,17 +351,22 @@ export interface IKBotOptions { gpt-4o-mini-realtime-preview-2024-12-17 gpt-4o-mini-search-preview gpt-4o-mini-search-preview-2025-03-11 + gpt-4o-mini-transcribe + gpt-4o-mini-tts gpt-4o-realtime-preview gpt-4o-realtime-preview-2024-10-01 gpt-4o-realtime-preview-2024-12-17 gpt-4o-search-preview gpt-4o-search-preview-2025-03-11 + gpt-4o-transcribe o1 o1-2024-12-17 o1-mini o1-mini-2024-09-12 o1-preview o1-preview-2024-09-12 + o1-pro + o1-pro-2025-03-19 o3-mini o3-mini-2025-01-31 omni-moderation-2024-09-26 @@ -410,4 +427,6 @@ export interface IKBotOptions { filters?: (string | ("JSON" | "JSONUnescape" | "JSONPretty" | "AlphaSort" | "code" | "JSONParse" | "trim")[] | string[] | ((...args_0: unknown[]) => unknown)[]); /** Dry run - only write out parameters without making API calls */ dry?: (boolean | string); + /** Zod schema for structured outputs */ + format?: any; } diff --git a/packages/kbot/dist/3ab7e05d10164fc409c7.js b/packages/kbot/dist/3ab7e05d10164fc409c7.js new file mode 100644 index 00000000..b1699a32 --- /dev/null +++ b/packages/kbot/dist/3ab7e05d10164fc409c7.js @@ -0,0 +1,92 @@ +import chalk from 'chalk'; +import * as path from 'node:path'; +import { sync as read } from '@polymech/fs/read'; +import { sync as exists } from '@polymech/fs/exists'; +import { logger, module_root } from '../index.js'; +import { CACHE_PATH as OPENROUTER_CACHE_PATH, fetchOpenRouterModels, listModelsAsStrings as listOpenRouterModelsAsStrings } from './openrouter.js'; +import { CACHE_PATH as OPENAI_CACHE_PATH, listModelsAsStrings as listOpenAIModelsAsStrings } from './openai.js'; +import { fetchOpenAIModels } from '../models/openai.js'; +import { CONFIG_DEFAULT } from '@polymech/commons'; +import { models as OpenAIModels } from './cache/openai.js'; +import { models as OpenRouterModels } from './cache/openrouter.js'; +export const models_dist = () => { + let or_models = OpenRouterModels; + let oai_models = OpenAIModels; + let deepseek_models = [ + { + "id": "deepseek-chat", + "name": "deepseek-chat" + }, + { + "id": "deepseek-reasoner", + "name": "deepseek-reasoner" + }, + ]; + const modelsOpenAIPath = path.resolve(module_root(), 'openai.json'); + if (exists(modelsOpenAIPath)) { + oai_models = read(modelsOpenAIPath, 'json'); + } + const modelsRouterPath = path.resolve(module_root(), 'openrouter.json'); + if (exists(modelsRouterPath)) { + or_models = read(modelsRouterPath, 'json'); + } + const models = []; + models.push(chalk.magenta.bold('\n OpenRouter models:\n')); + models.push(...listOpenRouterModelsAsStrings(or_models)); + models.push(chalk.magenta.bold('\n OpenAI models:\n')); + models.push(...listOpenAIModelsAsStrings(oai_models)); + models.push('-----\n'); + models.push(chalk.magenta.bold('\n Deepseek models:\n')); + models.push(...listOpenAIModelsAsStrings(deepseek_models)); + models.push('-----\n'); + return models; +}; +export const models = () => { + const models = []; + const openRouterPath = path.resolve(OPENROUTER_CACHE_PATH); + if (!exists(openRouterPath)) { + fetchOpenRouterModels(); + } + if (exists(openRouterPath)) { + const modelData = read(openRouterPath, 'json'); + models.push(chalk.magenta.bold('\n OpenRouter models:\n')); + models.push(...listOpenRouterModelsAsStrings(modelData.models)); + } + logger.debug('Openrouter models cache: ', OPENAI_CACHE_PATH); + const openAIPath = path.resolve(OPENAI_CACHE_PATH); + const config = CONFIG_DEFAULT(); + if (!exists(openAIPath) && config?.openai?.key) { + fetchOpenAIModels(config.openai.key); + } + if (exists(openAIPath)) { + const modelData = read(openAIPath, 'json'); + models.push(chalk.magenta.bold('\n OpenAI models:\n')); + models.push(...listOpenAIModelsAsStrings(modelData.models)); + } + logger.debug('OpenAI models cache: ', OPENAI_CACHE_PATH); + models.push('-----\n'); + return models; +}; +export const all = () => { + let models = []; + const openRouterPath = path.resolve(OPENROUTER_CACHE_PATH); + if (!exists(openRouterPath)) { + fetchOpenRouterModels(); + } + if (exists(openRouterPath)) { + const modelData = read(openRouterPath, 'json'); + models = models.concat(modelData.models); + } + const openAIPath = path.resolve(OPENAI_CACHE_PATH); + const config = CONFIG_DEFAULT(); + if (!exists(openAIPath) && config?.openai?.key) { + fetchOpenAIModels(config.openai.key); + } + if (exists(openAIPath)) { + const modelData = read(openAIPath, 'json'); + models.push(chalk.magenta.bold('\n OpenAI models:\n')); + models = models.concat(modelData.models); + } + return models; +}; +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvbW9kZWxzL2luZGV4LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxNQUFNLE9BQU8sQ0FBQTtBQUN6QixPQUFPLEtBQUssSUFBSSxNQUFNLFdBQVcsQ0FBQTtBQUNqQyxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksTUFBTSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFFcEQsT0FBTyxFQUFFLE1BQU0sRUFBRSxXQUFXLEVBQUUsTUFBTSxhQUFhLENBQUE7QUFDakQsT0FBTyxFQUFFLFVBQVUsSUFBSSxxQkFBcUIsRUFBMEMscUJBQXFCLEVBQUUsbUJBQW1CLElBQUksNkJBQTZCLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQTtBQUMxTCxPQUFPLEVBQUUsVUFBVSxJQUFJLGlCQUFpQixFQUFzQyxtQkFBbUIsSUFBSSx5QkFBeUIsRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQUVuSixPQUFPLEVBQUUsaUJBQWlCLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUN2RCxPQUFPLEVBQUUsY0FBYyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFFbEQsT0FBTyxFQUFFLE1BQU0sSUFBSSxZQUFZLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUMxRCxPQUFPLEVBQUUsTUFBTSxJQUFJLGdCQUFnQixFQUFFLE1BQU0sdUJBQXVCLENBQUE7QUFFbEUsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLEdBQUcsRUFBRTtJQUU5QixJQUFJLFNBQVMsR0FBRyxnQkFBZ0IsQ0FBQTtJQUNoQyxJQUFJLFVBQVUsR0FBRyxZQUFZLENBQUE7SUFDN0IsSUFBSSxlQUFlLEdBQUc7UUFDcEI7WUFDRSxJQUFJLEVBQUUsZUFBZTtZQUNyQixNQUFNLEVBQUUsZUFBZTtTQUN4QjtRQUNEO1lBQ0UsSUFBSSxFQUFFLG1CQUFtQjtZQUN6QixNQUFNLEVBQUUsbUJBQW1CO1NBQzVCO0tBQ0YsQ0FBQTtJQUVELE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxXQUFXLEVBQUUsRUFBRSxhQUFhLENBQUMsQ0FBQTtJQUNuRSxJQUFJLE1BQU0sQ0FBQyxnQkFBZ0IsQ0FBQyxFQUFFLENBQUM7UUFDN0IsVUFBVSxHQUFHLElBQUksQ0FBQyxnQkFBZ0IsRUFBRSxNQUFNLENBQVEsQ0FBQTtJQUNwRCxDQUFDO0lBRUQsTUFBTSxnQkFBZ0IsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLFdBQVcsRUFBRSxFQUFFLGlCQUFpQixDQUFDLENBQUE7SUFDdkUsSUFBSSxNQUFNLENBQUMsZ0JBQWdCLENBQUMsRUFBRSxDQUFDO1FBQzdCLFNBQVMsR0FBRyxJQUFJLENBQUMsZ0JBQWdCLEVBQUUsTUFBTSxDQUFRLENBQUE7SUFDbkQsQ0FBQztJQUNELE1BQU0sTUFBTSxHQUFhLEVBQUUsQ0FBQTtJQUMzQixNQUFNLENBQUMsSUFBSSxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLHlCQUF5QixDQUFDLENBQUMsQ0FBQTtJQUMxRCxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsNkJBQTZCLENBQUMsU0FBZ0IsQ0FBQyxDQUFDLENBQUE7SUFFL0QsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxxQkFBcUIsQ0FBQyxDQUFDLENBQUE7SUFDdEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLFVBQWlCLENBQUMsQ0FBQyxDQUFBO0lBQzVELE1BQU0sQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUE7SUFFdEIsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyx1QkFBdUIsQ0FBQyxDQUFDLENBQUE7SUFDeEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLGVBQXNCLENBQUMsQ0FBQyxDQUFBO0lBQ2pFLE1BQU0sQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUE7SUFDdEIsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUE7QUFFRCxNQUFNLENBQUMsTUFBTSxNQUFNLEdBQUcsR0FBRyxFQUFFO0lBQ3pCLE1BQU0sTUFBTSxHQUFhLEVBQUUsQ0FBQTtJQUMzQixNQUFNLGNBQWMsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLHFCQUFxQixDQUFDLENBQUE7SUFDMUQsSUFBSSxDQUFDLE1BQU0sQ0FBQyxjQUFjLENBQUMsRUFBRSxDQUFDO1FBQzVCLHFCQUFxQixFQUFFLENBQUE7SUFDekIsQ0FBQztJQUNELElBQUksTUFBTSxDQUFDLGNBQWMsQ0FBQyxFQUFFLENBQUM7UUFDM0IsTUFBTSxTQUFTLEdBQTJCLElBQUksQ0FBQyxjQUFjLEVBQUUsTUFBTSxDQUEyQixDQUFBO1FBQ2hHLE1BQU0sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMseUJBQXlCLENBQUMsQ0FBQyxDQUFBO1FBQzFELE1BQU0sQ0FBQyxJQUFJLENBQUMsR0FBRyw2QkFBNkIsQ0FBQyxTQUFTLENBQUMsTUFBTSxDQUFDLENBQUMsQ0FBQTtJQUNqRSxDQUFDO0lBQ0QsTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsRUFBRSxpQkFBaUIsQ0FBQyxDQUFBO0lBRTVELE1BQU0sVUFBVSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsaUJBQWlCLENBQUMsQ0FBQTtJQUNsRCxNQUFNLE1BQU0sR0FBRyxjQUFjLEVBQVMsQ0FBQTtJQUN0QyxJQUFJLENBQUMsTUFBTSxDQUFDLFVBQVUsQ0FBQyxJQUFJLE1BQU0sRUFBRSxNQUFNLEVBQUUsR0FBRyxFQUFFLENBQUM7UUFDL0MsaUJBQWlCLENBQUMsTUFBTSxDQUFDLE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQTtJQUN0QyxDQUFDO0lBRUQsSUFBSSxNQUFNLENBQUMsVUFBVSxDQUFDLEVBQUUsQ0FBQztRQUN2QixNQUFNLFNBQVMsR0FBdUIsSUFBSSxDQUFDLFVBQVUsRUFBRSxNQUFNLENBQXVCLENBQUE7UUFDcEYsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxxQkFBcUIsQ0FBQyxDQUFDLENBQUE7UUFDdEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQyxDQUFBO0lBQzdELENBQUM7SUFDRCxNQUFNLENBQUMsS0FBSyxDQUFDLHVCQUF1QixFQUFFLGlCQUFpQixDQUFDLENBQUE7SUFDeEQsTUFBTSxDQUFDLElBQUksQ0FBQyxTQUFTLENBQUMsQ0FBQTtJQUN0QixPQUFPLE1BQU0sQ0FBQTtBQUNmLENBQUMsQ0FBQTtBQUVELE1BQU0sQ0FBQyxNQUFNLEdBQUcsR0FBRyxHQUFHLEVBQUU7SUFDdEIsSUFBSSxNQUFNLEdBQVUsRUFBRSxDQUFBO0lBQ3RCLE1BQU0sY0FBYyxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMscUJBQXFCLENBQUMsQ0FBQTtJQUMxRCxJQUFJLENBQUMsTUFBTSxDQUFDLGNBQWMsQ0FBQyxFQUFFLENBQUM7UUFDNUIscUJBQXFCLEVBQUUsQ0FBQTtJQUN6QixDQUFDO0lBQ0QsSUFBSSxNQUFNLENBQUMsY0FBYyxDQUFDLEVBQUUsQ0FBQztRQUMzQixNQUFNLFNBQVMsR0FBMkIsSUFBSSxDQUFDLGNBQWMsRUFBRSxNQUFNLENBQTJCLENBQUE7UUFDaEcsTUFBTSxHQUFHLE1BQU0sQ0FBQyxNQUFNLENBQUMsU0FBUyxDQUFDLE1BQU0sQ0FBQyxDQUFBO0lBQzFDLENBQUM7SUFDRCxNQUFNLFVBQVUsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLGlCQUFpQixDQUFDLENBQUE7SUFDbEQsTUFBTSxNQUFNLEdBQUcsY0FBYyxFQUFTLENBQUE7SUFDdEMsSUFBSSxDQUFDLE1BQU0sQ0FBQyxVQUFVLENBQUMsSUFBSSxNQUFNLEVBQUUsTUFBTSxFQUFFLEdBQUcsRUFBRSxDQUFDO1FBQy9DLGlCQUFpQixDQUFDLE1BQU0sQ0FBQyxNQUFNLENBQUMsR0FBRyxDQUFDLENBQUE7SUFDdEMsQ0FBQztJQUVELElBQUksTUFBTSxDQUFDLFVBQVUsQ0FBQyxFQUFFLENBQUM7UUFDdkIsTUFBTSxTQUFTLEdBQXVCLElBQUksQ0FBQyxVQUFVLEVBQUUsTUFBTSxDQUF1QixDQUFBO1FBQ3BGLE1BQU0sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMscUJBQXFCLENBQUMsQ0FBQyxDQUFBO1FBQ3RELE1BQU0sR0FBRyxNQUFNLENBQUMsTUFBTSxDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQTtJQUMxQyxDQUFDO0lBQ0QsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUEifQ== \ No newline at end of file diff --git a/packages/kbot/dist/main_node.js b/packages/kbot/dist/main_node.js index c096cbba..36917ab6 100644 --- a/packages/kbot/dist/main_node.js +++ b/packages/kbot/dist/main_node.js @@ -124707,7 +124707,7 @@ exports.SourceMapConsumer = __webpack_require__(38243).SourceMapConsumer; /***/ ((module, __unused_webpack_exports, __webpack_require__) => { "use strict"; -module.exports = __webpack_require__.p + "a12ab9f9e1a5d45e0de8.js"; +module.exports = __webpack_require__.p + "3ab7e05d10164fc409c7.js"; /***/ }), @@ -133430,7 +133430,7 @@ const imports_json = { parse: JSON.parse, serialize: JSON.stringify }; - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW1wb3J0cy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9pbXBvcnRzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sRUFBRSxJQUFJLEVBQUUsTUFBTyxtQkFBbUIsQ0FBQTtBQUV6QyxNQUFNLENBQUMsTUFBTSxJQUFJLEdBQUc7SUFDbEIsWUFBWSxFQUFFLElBQUk7Q0FDbkIsQ0FBQztBQUNGLE1BQU0sQ0FBQyxNQUFNLElBQUksR0FBRztJQUNsQixLQUFLLEVBQUUsSUFBSSxDQUFDLEtBQUs7SUFDakIsU0FBUyxFQUFFLElBQUksQ0FBQyxTQUFTO0NBQzFCLENBQUMifQ== ;// ../fs/dist/read.js @@ -133519,7 +133519,7 @@ function read_async(path, returnAs) { .catch((err) => (err.code === 'ENOENT' ? resolve(null) : reject(err))); }); } - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicmVhZC5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9yZWFkLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sRUFBWSxRQUFRLEVBQUMsWUFBWSxFQUFFLE1BQU0sSUFBSSxDQUFDO0FBRXJELE9BQU8sRUFBRSxJQUFJLEVBQUUsTUFBTSxjQUFjLENBQUM7QUFDcEMsT0FBTyxFQUFFLGdCQUFnQixFQUFFLE1BQU0scUJBQXFCLENBQUM7QUFHdkQsTUFBTSxpQkFBaUIsR0FBRyxDQUFDLE1BQU0sRUFBRSxRQUFRLEVBQUUsTUFBTSxFQUFFLGVBQWUsQ0FBQyxDQUFDO0FBR3RFLE1BQU0sVUFBVSxhQUFhLENBQUMsVUFBa0IsRUFBRSxJQUFZLEVBQUUsUUFBZ0I7SUFDL0UsTUFBTSxlQUFlLEdBQUcsVUFBVSxHQUFHLGtCQUFrQixDQUFDO0lBQ3hELGdCQUFnQixDQUFDLGVBQWUsRUFBRSxNQUFNLEVBQUUsSUFBSSxFQUFFLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQztJQUM1RCxnQkFBZ0IsQ0FBQyxlQUFlLEVBQUUsVUFBVSxFQUFFLFFBQVEsRUFBRSxDQUFDLFFBQVEsRUFBRSxXQUFXLENBQUMsQ0FBQyxDQUFDO0lBQ2pGLElBQUksUUFBUSxJQUFJLENBQUMsaUJBQWlCLENBQUMsUUFBUSxDQUFDLFFBQVEsQ0FBQyxFQUFFLENBQUM7UUFDdkQsTUFBTSxJQUFJLEtBQUssQ0FBQyxnQ0FBZ0MsR0FBRyxlQUFlO2NBQy9ELDRCQUE0QixHQUFHLGlCQUFpQixDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDO0lBQ2pFLENBQUM7QUFDRixDQUFDO0FBRUQsNkNBQTZDO0FBQzdDLDZDQUE2QztBQUM3QyxNQUFNLGNBQWMsR0FBRyxDQUFDLEdBQVcsRUFBRSxLQUFvQixFQUFRLEVBQUU7SUFDbEUsTUFBTSxLQUFLLEdBQUcsOEVBQThFLENBQUM7SUFDN0YsSUFBSSxPQUFPLEtBQUssS0FBSyxRQUFRLEVBQUUsQ0FBQztRQUMvQixJQUFJLEtBQUssQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLEVBQUUsQ0FBQztZQUN2QixPQUFPLElBQUksSUFBSSxDQUFDLEtBQUssQ0FBQyxDQUFDO1FBQ3hCLENBQUM7SUFDRixDQUFDO0lBQ0QsT0FBTyxLQUFhLENBQUM7QUFDdEIsQ0FBQyxDQUFDO0FBRUYsTUFBTSxPQUFPLEdBQUcsQ0FBQyxJQUFZLEVBQUUsR0FBVSxFQUFTLEVBQUU7SUFDbkQsTUFBTSxVQUFVLEdBQVEsSUFBSSxLQUFLLENBQUMsb0NBQW9DO1VBQ25FLElBQUksR0FBRyxJQUFJLEdBQUcsR0FBRyxHQUFHLEdBQUcsQ0FBQyxDQUFDO0lBQzVCLFVBQVUsQ0FBQyxhQUFhLEdBQUcsR0FBRyxDQUFDO0lBQy9CLE9BQU8sVUFBVSxDQUFDO0FBQ25CLENBQUMsQ0FBQztBQUVGLDREQUE0RDtBQUM1RCxPQUFPO0FBQ1AsNERBQTREO0FBQzVELE1BQU0sVUFBVSxJQUFJLENBQUMsSUFBWSxFQUFFLFFBQWlCO0lBQ25ELE1BQU0sS0FBSyxHQUFHLFFBQVEsSUFBSSxNQUFNLENBQUM7SUFDakMsSUFBSSxJQUFJLENBQUM7SUFDVCxJQUFJLENBQUM7UUFDSixJQUFJLEdBQUcsWUFBWSxDQUFDLElBQUksRUFBRSxFQUFFLFFBQVEsRUFBRSxLQUFLLEtBQUssUUFBUSxDQUFDLENBQUMsQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFDLENBQUM7SUFDN0UsQ0FBQztJQUFDLE9BQU8sR0FBRyxFQUFFLENBQUM7UUFDZCxJQUFJLEdBQUcsQ0FBQyxJQUFJLEtBQUssUUFBUSxFQUFFLENBQUM7WUFDM0IsOERBQThEO1lBQzlELE9BQU8sU0FBUyxDQUFDO1FBQ2xCLENBQUM7UUFDRCw4QkFBOEI7UUFDOUIsTUFBTSxHQUFHLENBQUM7SUFDWCxDQUFDO0lBRUQsSUFBSSxDQUFDO1FBQ0osSUFBSSxLQUFLLEtBQUssTUFBTSxFQUFFLENBQUM7WUFDdEIsSUFBSSxHQUFHLElBQUksQ0FBQyxLQUFLLENBQUMsSUFBSSxDQUFDLENBQUM7UUFDekIsQ0FBQzthQUFNLElBQUksS0FBSyxLQUFLLGVBQWUsRUFBRSxDQUFDO1lBQ3RDLElBQUksR0FBRyxJQUFJLENBQUMsS0FBSyxDQUFDLElBQUksRUFBRSxjQUFjLENBQUMsQ0FBQztRQUN6QyxDQUFDO0lBQ0YsQ0FBQztJQUFDLE9BQU8sR0FBRyxFQUFFLENBQUM7UUFDZCxNQUFNLE9BQU8sQ0FBQyxJQUFJLEVBQUUsR0FBRyxDQUFDLENBQUM7SUFDMUIsQ0FBQztJQUVELE9BQU8sSUFBSSxDQUFDO0FBQ2IsQ0FBQztBQUVELDREQUE0RDtBQUM1RCxRQUFRO0FBQ1IsNERBQTREO0FBQzVELE1BQU0sVUFBVSxLQUFLLENBQUMsSUFBWSxFQUFFLFFBQWlCO0lBQ3BELE9BQU8sSUFBSSxPQUFPLENBQUMsQ0FBQyxPQUFPLEVBQUUsTUFBTSxFQUFFLEVBQUU7UUFDdEMsTUFBTSxLQUFLLEdBQUcsUUFBUSxJQUFJLE1BQU0sQ0FBQztRQUNqQyxRQUFRLENBQUMsUUFBUSxDQUFDLElBQUksRUFBRSxFQUFFLFFBQVEsRUFBRSxLQUFLLEtBQUssUUFBUSxDQUFDLENBQUMsQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFDO2FBQ3ZFLElBQUksQ0FBQyxDQUFDLElBQXVCLEVBQUUsRUFBRTtZQUNqQyxtREFBbUQ7WUFDbkQsSUFBSSxDQUFDO2dCQUNKLElBQUksS0FBSyxLQUFLLE1BQU0sRUFBRSxDQUFDO29CQUN0QixPQUFPLENBQUMsSUFBSSxDQUFDLEtBQUssQ0FBQyxJQUFXLENBQUMsQ0FBQyxDQUFDO2dCQUNsQyxDQUFDO3FCQUFNLElBQUksS0FBSyxLQUFLLGVBQWUsRUFBRSxDQUFDO29CQUN0QyxPQUFPLENBQUMsSUFBSSxDQUFDLEtBQUssQ0FBQyxJQUFXLEVBQUUsY0FBYyxDQUFDLENBQUMsQ0FBQztnQkFDbEQsQ0FBQztxQkFBTSxDQUFDO29CQUNQLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQztnQkFDZixDQUFDO1lBQ0YsQ0FBQztZQUFDLE9BQU8sR0FBRyxFQUFFLENBQUM7Z0JBQ2QsTUFBTSxDQUFDLE9BQU8sQ0FBQyxJQUFJLEVBQUUsR0FBRyxDQUFDLENBQUMsQ0FBQztZQUM1QixDQUFDO1FBQ0YsQ0FBQyxDQUFDO2FBQ0QsS0FBSyxDQUFDLENBQUMsR0FBUSxFQUFFLEVBQUUsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxJQUFJLEtBQUssUUFBUSxDQUFDLENBQUMsQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxDQUFDLE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQyxDQUFDLENBQUM7SUFDOUUsQ0FBQyxDQUFDLENBQUM7QUFDSixDQUFDIn0= ;// ../fs/dist/interfaces.js ///////////////////////////////////////////////////////// // @@ -133684,7 +133684,7 @@ var interfaces_EDeleteFlags; (function (EDeleteFlags) { EDeleteFlags[EDeleteFlags["REPORT"] = 16] = "REPORT"; })(interfaces_EDeleteFlags || (interfaces_EDeleteFlags = {})); - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW50ZXJmYWNlcy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9pbnRlcmZhY2VzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLHlEQUF5RDtBQUN6RCxFQUFFO0FBQ0YsU0FBUztBQUNULEVBQUU7QUFDRixNQUFNLENBQU4sSUFBWSxTQU1YO0FBTkQsV0FBWSxTQUFTO0lBQ3BCLDBCQUFhLENBQUE7SUFDYix3QkFBVyxDQUFBO0lBQ1gsZ0NBQW1CLENBQUE7SUFDbkIsNEJBQWUsQ0FBQTtJQUNmLDRCQUFlLENBQUE7QUFDaEIsQ0FBQyxFQU5XLFNBQVMsS0FBVCxTQUFTLFFBTXBCO0FBRUQ7OztHQUdHO0FBQ0gsTUFBTSxDQUFDLElBQUksTUFBTSxHQUFRO0lBQ3hCLElBQUksRUFBRSxNQUFNO0lBQ1osTUFBTSxFQUFFLFFBQVE7SUFDaEIsVUFBVSxFQUFFLFFBQVE7SUFDcEIsUUFBUSxFQUFFLFFBQVE7SUFDbEIsWUFBWSxFQUFFLE9BQU87Q0FDckIsQ0FBQztBQWtERjs7Ozs7OztHQU9HO0FBQ0gsTUFBTSxPQUFPLGNBQWUsU0FBUSxLQUFLO0lBQ2pDLEtBQUssQ0FBUztJQUNkLElBQUksQ0FBUztJQUNiLElBQUksQ0FBUztJQUNiLE9BQU8sQ0FBUztJQUNoQixLQUFLLENBQVM7Q0FDckI7QUFZRDs7Ozs7R0FLRztBQUNILE1BQU0sQ0FBTixJQUFZLFVBS1g7QUFMRCxXQUFZLFVBQVU7SUFDckI7O09BRUc7SUFDSCxpRUFBbUIsQ0FBQTtBQUNwQixDQUFDLEVBTFcsVUFBVSxLQUFWLFVBQVUsUUFLckI7QUFFRDs7Ozs7R0FLRztBQUNILE1BQU0sQ0FBTixJQUFZLGFBUVg7QUFSRCxXQUFZLGFBQWE7SUFDeEIsaURBQVEsQ0FBQTtJQUNSLG1EQUFTLENBQUE7SUFDVCx5REFBWSxDQUFBO0lBQ1osNERBQWMsQ0FBQTtJQUNkLHNFQUFtQixDQUFBO0lBQ25CLDBEQUFhLENBQUE7SUFDYixtREFBVSxDQUFBO0FBQ1gsQ0FBQyxFQVJXLGFBQWEsS0FBYixhQUFhLFFBUXhCO0FBdUZEOzs7OztHQUtHO0FBQ0gsTUFBTSxDQUFOLElBQVksb0JBZVg7QUFmRCxXQUFZLG9CQUFvQjtJQUMvQiwwQkFBMEI7SUFDMUIseUVBQVMsQ0FBQTtJQUNULHNDQUFzQztJQUN0QyxxRUFBTyxDQUFBO0lBQ1AsbUNBQW1DO0lBQ25DLDJFQUFVLENBQUE7SUFDVixxQkFBcUI7SUFDckIscUVBQU8sQ0FBQTtJQUNQLDBEQUEwRDtJQUMxRCxtRUFBTSxDQUFBO0lBQ04sMENBQTBDO0lBQzFDLHVFQUFRLENBQUE7SUFDUix1QkFBdUI7SUFDdkIsK0RBQUksQ0FBQTtBQUNMLENBQUMsRUFmVyxvQkFBb0IsS0FBcEIsb0JBQW9CLFFBZS9CO0FBRUQ7Ozs7O0dBS0c7QUFDSCxNQUFNLENBQU4sSUFBWSxZQVNYO0FBVEQsV0FBWSxZQUFZO0lBQ3ZCLCtDQUFRLENBQUE7SUFDUix5REFBUyxDQUFBO0lBQ1QsdURBQVEsQ0FBQTtJQUNSLHFFQUFlLENBQUE7SUFDZixtREFBTSxDQUFBO0lBQ04saURBQUssQ0FBQTtJQUNMLGlEQUFLLENBQUE7SUFDTCxpREFBSyxDQUFBO0FBQ04sQ0FBQyxFQVRXLFlBQVksS0FBWixZQUFZLFFBU3ZCO0FBRUQ7Ozs7O0dBS0c7QUFDSCxNQUFNLENBQU4sSUFBWSxVQXFCWDtBQXJCRCxXQUFZLFVBQVU7SUFDckI7O09BRUc7SUFDSCwyQ0FBUSxDQUFBO0lBQ1I7O09BRUc7SUFDSCwrREFBa0IsQ0FBQTtJQUNsQjs7T0FFRztJQUNILDZDQUFTLENBQUE7SUFDVDs7T0FFRztJQUNILGlFQUFtQixDQUFBO0lBQ25COztPQUVHO0lBQ0gsZ0RBQVcsQ0FBQTtBQUNaLENBQUMsRUFyQlcsVUFBVSxLQUFWLFVBQVUsUUFxQnJCO0FBcUhEOzs7OztHQUtHO0FBQ0gsTUFBTSxDQUFOLElBQVksUUFTWDtBQVRELFdBQVksUUFBUTtJQUNuQjs7T0FFRztJQUNILDJDQUFNLENBQUE7SUFDTjs7T0FFRztJQUNILHVDQUFJLENBQUE7QUFDTCxDQUFDLEVBVFcsUUFBUSxLQUFSLFFBQVEsUUFTbkI7QUF3REQ7Ozs7O0dBS0c7QUFDSCxNQUFNLENBQU4sSUFBWSxZQUVYO0FBRkQsV0FBWSxZQUFZO0lBQ3ZCLG9EQUFXLENBQUE7QUFDWixDQUFDLEVBRlcsWUFBWSxLQUFaLFlBQVksUUFFdkIifQ== ;// ../fs/dist/exists.js @@ -133741,7 +133741,7 @@ function exists_async(path) { }); }); } - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZXhpc3RzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL2V4aXN0cy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQVMsUUFBUSxFQUFFLEtBQUssRUFBRSxNQUFNLElBQUksQ0FBQztBQUM1QyxPQUFPLEVBQUUsZ0JBQWdCLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQztBQUN2RCxPQUFPLEVBQUUsU0FBUyxFQUFrQixNQUFNLGlCQUFpQixDQUFDO0FBRTVELE1BQU0sVUFBVSxhQUFhLENBQUMsVUFBa0IsRUFBRSxJQUFZO0lBQzdELE1BQU0sZUFBZSxHQUFHLFVBQVUsR0FBRyxRQUFRLENBQUM7SUFDOUMsZ0JBQWdCLENBQUMsZUFBZSxFQUFFLE1BQU0sRUFBRSxJQUFJLEVBQUUsQ0FBQyxRQUFRLENBQUMsQ0FBQyxDQUFDO0FBQzdELENBQUM7QUFFRCw0REFBNEQ7QUFDNUQsT0FBTztBQUNQLDREQUE0RDtBQUM1RCxNQUFNLFVBQVUsSUFBSSxDQUFDLElBQVk7SUFDaEMsSUFBSSxJQUFXLENBQUM7SUFDaEIsSUFBSSxDQUFDO1FBQ0osSUFBSSxHQUFHLFFBQVEsQ0FBQyxJQUFJLENBQUMsQ0FBQztRQUN0QixJQUFJLElBQUksQ0FBQyxXQUFXLEVBQUUsRUFBRSxDQUFDO1lBQ3hCLE9BQU8sS0FBSyxDQUFDO1FBQ2QsQ0FBQzthQUFNLElBQUksSUFBSSxDQUFDLE1BQU0sRUFBRSxFQUFFLENBQUM7WUFDMUIsT0FBTyxNQUFNLENBQUM7UUFDZixDQUFDO1FBQ0QsT0FBTyxPQUFPLENBQUM7SUFDaEIsQ0FBQztJQUFDLE9BQU8sR0FBRyxFQUFFLENBQUM7UUFDZCxJQUFJLEdBQUcsQ0FBQyxJQUFJLEtBQUssUUFBUSxJQUFJLEdBQUcsQ0FBQyxJQUFJLEtBQUssU0FBUyxFQUFFLENBQUM7WUFDckQsTUFBTSxHQUFHLENBQUM7UUFDWCxDQUFDO0lBQ0YsQ0FBQztJQUNELE9BQU8sS0FBSyxDQUFDO0FBQ2QsQ0FBQztBQUVELDREQUE0RDtBQUM1RCxRQUFRO0FBQ1IsNERBQTREO0FBQzVELE1BQU0sVUFBVSxLQUFLLENBQUMsSUFBWTtJQUNqQyxPQUFPLElBQUksT0FBTyxDQUFnQyxDQUFDLE9BQU8sRUFBRSxNQUFNLEVBQUUsRUFBRTtRQUNyRSxLQUFLLENBQUMsSUFBSSxFQUFFLENBQUMsR0FBbUIsRUFBRSxJQUFXLEVBQUUsRUFBRTtZQUNoRCxJQUFJLEdBQUcsRUFBRSxDQUFDO2dCQUNULElBQUksR0FBRyxDQUFDLElBQUksS0FBSyxRQUFRLElBQUksR0FBRyxDQUFDLElBQUksS0FBSyxTQUFTLEVBQUUsQ0FBQztvQkFDckQsT0FBTyxDQUFDLEtBQUssQ0FBQyxDQUFDO2dCQUNoQixDQUFDO3FCQUFNLENBQUM7b0JBQ1AsTUFBTSxDQUFDLEdBQUcsQ0FBQyxDQUFDO2dCQUNiLENBQUM7WUFDRixDQUFDO2lCQUFNLElBQUksSUFBSSxDQUFDLFdBQVcsRUFBRSxFQUFFLENBQUM7Z0JBQy9CLE9BQU8sQ0FBQyxTQUFTLENBQUMsR0FBRyxDQUFDLENBQUM7WUFDeEIsQ0FBQztpQkFBTSxJQUFJLElBQUksQ0FBQyxNQUFNLEVBQUUsRUFBRSxDQUFDO2dCQUMxQixPQUFPLENBQUMsU0FBUyxDQUFDLElBQUksQ0FBQyxDQUFDO1lBQ3pCLENBQUM7aUJBQU0sQ0FBQztnQkFDUCxPQUFPLENBQUMsU0FBUyxDQUFDLEtBQUssQ0FBQyxDQUFDO1lBQzFCLENBQUM7UUFDRixDQUFDLENBQUMsQ0FBQztJQUNKLENBQUMsQ0FBQyxDQUFDO0FBQ0osQ0FBQyJ9 ;// ../core/dist/primitives.js /*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. @@ -148895,7 +148895,7 @@ function _try(f, thisContext) { } }); } - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJvbWlzaWZ5LmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL3Byb21pc2lmeS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFPQSxNQUFNLFVBQVUsU0FBUyxDQUFDLENBQU0sRUFBRSxXQUFpQjtJQUNsRCxPQUFPO1FBQ04sTUFBTSxJQUFJLEdBQUcsS0FBSyxDQUFDLFNBQVMsQ0FBQyxLQUFLLENBQUMsSUFBSSxDQUFDLFNBQVMsQ0FBQyxDQUFDO1FBQ25ELE9BQU8sSUFBSSxPQUFPLENBQUMsQ0FBQyxPQUFPLEVBQUUsTUFBTSxFQUFFLEVBQUU7WUFDdEMsSUFBSSxDQUFDLElBQUksQ0FBQyxDQUFDLEdBQVEsRUFBRSxNQUFXLEVBQUUsRUFBRSxDQUFDLEdBQUcsS0FBSyxJQUFJLENBQUMsQ0FBQyxDQUFDLE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQyxDQUFDLENBQUMsT0FBTyxDQUFDLE1BQU0sQ0FBQyxDQUFDLENBQUM7WUFDbkYsQ0FBQyxDQUFDLEtBQUssQ0FBQyxXQUFXLEVBQUUsSUFBSSxDQUFDLENBQUM7UUFDNUIsQ0FBQyxDQUFDLENBQUM7SUFDSixDQUFDLENBQUM7QUFDSCxDQUFDO0FBT0QsTUFBTSxVQUFVLEdBQUcsQ0FBQyxJQUFTLEVBQUUsQ0FBTTtJQUNwQyxNQUFNLEtBQUssR0FBRyxDQUFDLE9BQVksRUFBRSxFQUFFLENBQUMsT0FBTyxDQUFDLEdBQUcsQ0FBQyxPQUFPLENBQUMsR0FBRyxDQUFDLENBQUMsR0FBUSxFQUFFLEVBQUUsQ0FBQyxPQUFPLEdBQUcsQ0FBQyxJQUFJLEtBQUssVUFBVSxDQUFDLENBQUMsQ0FBQyxHQUFHLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsR0FBRyxDQUFDLENBQUMsQ0FBQyxDQUFDO0lBQzlILE9BQU8sT0FBTyxJQUFJLENBQUMsSUFBSSxLQUFLLFVBQVUsQ0FBQyxDQUFDLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLENBQUMsS0FBSyxDQUFDLElBQUksQ0FBQyxDQUFDO0FBQ3pFLENBQUM7QUFRRCxNQUFNLFVBQVUsSUFBSSxDQUFDLENBQU0sRUFBRSxXQUFpQjtJQUM3QyxNQUFNLElBQUksR0FBRyxLQUFLLENBQUMsU0FBUyxDQUFDLEtBQUssQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUM7SUFDbkQsT0FBTyxJQUFJLE9BQU8sQ0FBQyxDQUFDLEdBQUcsRUFBRSxHQUFHLEVBQUUsRUFBRTtRQUMvQixJQUFJLENBQUM7WUFDSixJQUFJLENBQUMsS0FBSyxFQUFFLENBQUM7WUFDYixHQUFHLENBQUMsQ0FBQyxDQUFDLEtBQUssQ0FBQyxXQUFXLEVBQUUsSUFBSSxDQUFDLENBQUMsQ0FBQztRQUNqQyxDQUFDO1FBQUMsT0FBTyxHQUFHLEVBQUUsQ0FBQztZQUNkLEdBQUcsQ0FBQyxHQUFHLENBQUMsQ0FBQztRQUNWLENBQUM7SUFDRixDQUFDLENBQUMsQ0FBQztBQUNKLENBQUMifQ== ;// ../fs/dist/write.js @@ -148979,7 +148979,7 @@ function write_async(path, data, options) { const processedData = toJson(data, opts.jsonIndent); return (opts.atomic ? promisedAtomic : writeFileAsync)(path, processedData, { mode: opts.mode }); } - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoid3JpdGUuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvd3JpdGUudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxLQUFLLFFBQVEsTUFBTSxNQUFNLENBQUE7QUFDaEMsT0FBTyxLQUFLLEVBQUUsTUFBTSxJQUFJLENBQUE7QUFDeEIsT0FBTyxFQUFFLGFBQWEsRUFBRSxNQUFNLElBQUksQ0FBQTtBQUNsQyxPQUFPLEVBQUUsSUFBSSxJQUFJLE1BQU0sRUFBRSxNQUFNLFFBQVEsQ0FBQTtBQUN2QyxPQUFPLEVBQUUsSUFBSSxFQUFFLElBQUksRUFBRSxNQUFNLGNBQWMsQ0FBQTtBQUV6QyxPQUFPLEVBQUUsZ0JBQWdCLEVBQUUsZUFBZSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDdkUsT0FBTyxFQUFFLFNBQVMsRUFBRSxNQUFNLGdCQUFnQixDQUFBO0FBQzFDLE1BQU0sTUFBTSxHQUFHLFVBQVUsQ0FBQTtBQUV6QixNQUFNLFVBQVUsYUFBYSxDQUFDLFVBQWtCLEVBQUUsSUFBWSxFQUFFLElBQXVCLEVBQUUsT0FBc0I7SUFDOUcsTUFBTSxlQUFlLEdBQUcsVUFBVSxHQUFHLHlCQUF5QixDQUFDO0lBQy9ELGdCQUFnQixDQUFDLGVBQWUsRUFBRSxNQUFNLEVBQUUsSUFBSSxFQUFFLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQztJQUM1RCxnQkFBZ0IsQ0FBQyxlQUFlLEVBQUUsTUFBTSxFQUFFLElBQUksRUFBRSxDQUFDLFFBQVEsRUFBRSxRQUFRLEVBQUUsUUFBUSxFQUFFLE9BQU8sQ0FBQyxDQUFDLENBQUM7SUFDekYsZUFBZSxDQUFDLGVBQWUsRUFBRSxTQUFTLEVBQUUsT0FBTyxFQUFFO1FBQ3BELE1BQU0sRUFBRSxDQUFDLFNBQVMsQ0FBQztRQUNuQixVQUFVLEVBQUUsQ0FBQyxRQUFRLENBQUM7UUFDdEIsUUFBUSxFQUFFLENBQUMsVUFBVSxDQUFDO0tBQ3RCLENBQUMsQ0FBQztBQUNKLENBQUM7QUFFRCxNQUFNLE1BQU0sR0FBRyxDQUFDLElBQXVCLEVBQUUsVUFBa0IsRUFBVSxFQUFFO0lBQ3RFLElBQUksT0FBTyxJQUFJLEtBQUssUUFBUTtXQUN4QixDQUFDLE1BQU0sQ0FBQyxRQUFRLENBQUMsSUFBSSxDQUFDO1dBQ3RCLElBQUksS0FBSyxJQUFJLEVBQUUsQ0FBQztRQUNuQixPQUFPLElBQUksQ0FBQyxTQUFTLENBQUMsSUFBSSxFQUFFLElBQUksRUFBRSxPQUFPLFVBQVUsS0FBSyxRQUFRLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsVUFBVSxDQUFDLENBQUM7SUFDcEYsQ0FBQztJQUNELE9BQU8sSUFBYyxDQUFDO0FBQ3ZCLENBQUMsQ0FBQztBQUVGLDREQUE0RDtBQUM1RCxPQUFPO0FBQ1AsNERBQTREO0FBQzVELE1BQU0sY0FBYyxHQUFHLENBQUMsSUFBWSxFQUFFLElBQWtCLEVBQUUsT0FBdUIsRUFBUSxFQUFFO0lBQzFGLElBQUksQ0FBQztRQUNKLGFBQWEsQ0FBQyxJQUFJLEVBQUUsSUFBSSxFQUFFLE9BQU8sQ0FBQyxDQUFDO0lBQ3BDLENBQUM7SUFBQyxPQUFPLEdBQUcsRUFBRSxDQUFDO1FBQ2QsSUFBSSxHQUFHLENBQUMsSUFBSSxLQUFLLFFBQVEsRUFBRSxDQUFDO1lBQzNCLG9FQUFvRTtZQUNwRSxNQUFNLENBQUMsUUFBUSxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFBO1lBQzlCLEVBQUUsQ0FBQyxhQUFhLENBQUMsSUFBSSxFQUFFLElBQUksRUFBRSxPQUFPLENBQUMsQ0FBQztRQUN2QyxDQUFDO2FBQU0sQ0FBQztZQUNQLE1BQU0sR0FBRyxDQUFDO1FBQ1gsQ0FBQztJQUNGLENBQUM7QUFDRixDQUFDLENBQUM7QUFFRixNQUFNLGVBQWUsR0FBRyxDQUFDLElBQVksRUFBRSxJQUFZLEVBQUUsT0FBdUIsRUFBUSxFQUFFO0lBQ3JGLE9BQU8sSUFBSSxDQUFDLFlBQVksQ0FBQyxJQUFJLEdBQUcsTUFBTSxFQUFFLElBQUksRUFBRSxPQUFPLEVBQUUsY0FBYyxDQUFDLENBQUMsQ0FBQztBQUN6RSxDQUFDLENBQUM7QUFFRixNQUFNLFVBQVUsSUFBSSxDQUFDLElBQVksRUFBRSxJQUF1QixFQUFFLE9BQXVCO0lBQ2xGLE1BQU0sSUFBSSxHQUFRLE9BQU8sSUFBSSxFQUFFLENBQUM7SUFDaEMsTUFBTSxhQUFhLEdBQUcsTUFBTSxDQUFDLElBQUksRUFBRSxJQUFJLENBQUMsVUFBVSxDQUFDLENBQUM7SUFDcEQsTUFBTSxhQUFhLEdBQUcsSUFBSSxDQUFDLE1BQU0sQ0FBQyxDQUFDLENBQUMsZUFBZSxDQUFDLENBQUMsQ0FBQyxjQUFjLENBQUM7SUFDckUsYUFBYSxDQUFDLElBQUksRUFBRSxhQUFhLEVBQUUsRUFBRSxJQUFJLEVBQUUsSUFBSSxDQUFDLElBQUksRUFBRSxDQUFDLENBQUM7QUFDekQsQ0FBQztBQUVELDREQUE0RDtBQUM1RCxRQUFRO0FBQ1IsNERBQTREO0FBQzVELE1BQU0saUJBQWlCLEdBQUcsRUFBRSxDQUFFLFFBQVEsQ0FBQyxTQUFTLENBQUE7QUFDaEQsTUFBTSxjQUFjLEdBQUcsU0FBUyxDQUFDLGVBQWUsQ0FBQyxDQUFBO0FBRWpELFNBQVMsY0FBYyxDQUFDLElBQVksRUFBRSxJQUFZLEVBQUUsT0FBdUI7SUFDMUUsT0FBTyxJQUFJLE9BQU8sQ0FBTyxDQUFDLE9BQU8sRUFBRSxNQUFNLEVBQUUsRUFBRTtRQUM1QyxpQkFBaUIsQ0FBQyxJQUFJLEVBQUUsSUFBSSxFQUFFLE9BQU8sQ0FBQzthQUNwQyxJQUFJLENBQUMsT0FBTyxDQUFDO2FBQ2IsS0FBSyxDQUFDLENBQUMsR0FBUSxFQUFFLEVBQUU7WUFDbkIsa0RBQWtEO1lBQ2xELDREQUE0RDtZQUM1RCxJQUFJLEdBQUcsQ0FBQyxJQUFJLEtBQUssUUFBUSxFQUFFLENBQUM7Z0JBQzNCLDhEQUE4RDtnQkFDOUQsTUFBTSxDQUFDLFFBQVEsQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQTtnQkFDOUIsaUJBQWlCLENBQUMsSUFBSSxFQUFFLElBQUksRUFBRSxPQUFPLENBQUMsQ0FBQTtnQkFDdEMsT0FBTyxFQUFFLENBQUE7WUFDVixDQUFDO2lCQUFNLENBQUM7Z0JBQ1AsTUFBTSxDQUFDLEdBQUcsQ0FBQyxDQUFDO1lBQ2IsQ0FBQztRQUNGLENBQUMsQ0FBQyxDQUFDO0lBQ0wsQ0FBQyxDQUFDLENBQUM7QUFDSixDQUFDO0FBQ0QsTUFBTSxVQUFVLEtBQUssQ0FBQyxJQUFZLEVBQUUsSUFBdUIsRUFBRSxPQUF1QjtJQUNuRixNQUFNLElBQUksR0FBUSxPQUFPLElBQUksRUFBRSxDQUFDO0lBQ2hDLE1BQU0sYUFBYSxHQUFXLE1BQU0sQ0FBQyxJQUFJLEVBQUUsSUFBSSxDQUFDLFVBQVUsQ0FBQyxDQUFDO0lBQzVELE9BQU8sQ0FBQyxJQUFJLENBQUMsTUFBTSxDQUFDLENBQUMsQ0FBQyxjQUFjLENBQUMsQ0FBQyxDQUFDLGNBQWMsQ0FBQyxDQUFDLElBQUksRUFBRSxhQUFhLEVBQUUsRUFBRSxJQUFJLEVBQUUsSUFBSSxDQUFDLElBQUksRUFBRSxDQUFDLENBQUE7QUFDakcsQ0FBQyJ9 ;// external "typescript" const external_typescript_namespaceObject = require("typescript"); ;// ../commons/node_modules/zod-to-ts/dist/index.js @@ -156703,6 +156703,5592 @@ var zod_lib_z = /*#__PURE__*/Object.freeze({ var source = __webpack_require__(55248); // EXTERNAL MODULE: ./node_modules/env-var/env-var.js var env_var_env_var = __webpack_require__(87151); +;// ./node_modules/openai/version.mjs +const VERSION = '4.91.0'; // x-release-please-version +//# sourceMappingURL=version.mjs.map +;// ./node_modules/openai/_shims/registry.mjs +let auto = false; +let kind = undefined; +let registry_fetch = undefined; +let registry_Request = (/* unused pure expression or super */ null && (undefined)); +let registry_Response = (/* unused pure expression or super */ null && (undefined)); +let Headers = (/* unused pure expression or super */ null && (undefined)); +let registry_FormData = undefined; +let registry_Blob = (/* unused pure expression or super */ null && (undefined)); +let File = undefined; +let registry_ReadableStream = undefined; +let registry_getMultipartRequestOptions = undefined; +let getDefaultAgent = undefined; +let fileFromPath = undefined; +let isFsReadStream = undefined; +function setShims(shims, options = { auto: false }) { + if (auto) { + throw new Error(`you must \`import 'openai/shims/${shims.kind}'\` before importing anything else from openai`); + } + if (kind) { + throw new Error(`can't \`import 'openai/shims/${shims.kind}'\` after \`import 'openai/shims/${kind}'\``); + } + auto = options.auto; + kind = shims.kind; + registry_fetch = shims.fetch; + registry_Request = shims.Request; + registry_Response = shims.Response; + Headers = shims.Headers; + registry_FormData = shims.FormData; + registry_Blob = shims.Blob; + File = shims.File; + registry_ReadableStream = shims.ReadableStream; + registry_getMultipartRequestOptions = shims.getMultipartRequestOptions; + getDefaultAgent = shims.getDefaultAgent; + fileFromPath = shims.fileFromPath; + isFsReadStream = shims.isFsReadStream; +} +//# sourceMappingURL=registry.mjs.map +// EXTERNAL MODULE: external "stream" +var external_stream_ = __webpack_require__(2203); +// EXTERNAL MODULE: external "http" +var external_http_ = __webpack_require__(58611); +// EXTERNAL MODULE: ./node_modules/whatwg-url/lib/public-api.js +var public_api = __webpack_require__(53417); +// EXTERNAL MODULE: external "https" +var external_https_ = __webpack_require__(65692); +// EXTERNAL MODULE: external "zlib" +var external_zlib_ = __webpack_require__(43106); +;// ./node_modules/node-fetch/lib/index.mjs + + + + + + + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const lib_Readable = external_stream_.Readable; + +const lib_BUFFER = Symbol('buffer'); +const lib_TYPE = Symbol('type'); + +class lib_Blob { + constructor() { + this[lib_TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof lib_Blob) { + buffer = element[lib_BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[lib_BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[lib_TYPE] = type; + } + } + get size() { + return this[lib_BUFFER].length; + } + get type() { + return this[lib_TYPE]; + } + text() { + return Promise.resolve(this[lib_BUFFER].toString()); + } + arrayBuffer() { + const buf = this[lib_BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new lib_Readable(); + readable._read = function () {}; + readable.push(this[lib_BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[lib_BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new lib_Blob([], { type: arguments[2] }); + blob[lib_BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(lib_Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(lib_Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = external_stream_.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof external_stream_) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof external_stream_) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new lib_Blob([], { + type: ct.toLowerCase() + }), { + [lib_BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof external_stream_)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof lib_Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +lib_Headers.prototype.entries = lib_Headers.prototype[Symbol.iterator]; + +Object.defineProperty(lib_Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(lib_Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new lib_Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = external_http_.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class lib_Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new lib_Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new lib_Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(lib_Response.prototype); + +Object.defineProperties(lib_Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(lib_Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const lib_URL = external_url_.URL || public_api.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = external_url_.parse; +const format_url = external_url_.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new lib_URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = "destroy" in external_stream_.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class lib_Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new lib_Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new lib_Request(this); + } +} + +Body.mixIn(lib_Request.prototype); + +Object.defineProperty(lib_Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(lib_Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new lib_Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof external_stream_.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = external_url_.URL || public_api.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = external_stream_.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function lib_fetch(url, opts) { + + // allow custom promise + if (!lib_fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = lib_fetch.Promise; + + // wrap http.request into fetch + return new lib_fetch.Promise(function (resolve, reject) { + // build request object + const request = new lib_Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? external_https_ : external_http_).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof external_stream_.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (lib_fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new lib_Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(lib_fetch(new lib_Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new lib_Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: external_zlib_.Z_SYNC_FLUSH, + finishFlush: external_zlib_.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(external_zlib_.createGunzip(zlibOptions)); + response = new lib_Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(external_zlib_.createInflate()); + } else { + body = body.pipe(external_zlib_.createInflateRaw()); + } + response = new lib_Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new lib_Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof external_zlib_.createBrotliDecompress === 'function') { + body = body.pipe(external_zlib_.createBrotliDecompress()); + response = new lib_Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new lib_Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +lib_fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +lib_fetch.Promise = global.Promise; + +/* harmony default export */ const node_fetch_lib = (lib_fetch); + + +// EXTERNAL MODULE: ./node_modules/formdata-node/lib/esm/File.js +var esm_File = __webpack_require__(71831); +// EXTERNAL MODULE: ./node_modules/formdata-node/lib/esm/isFile.js +var esm_isFile = __webpack_require__(80699); +// EXTERNAL MODULE: ./node_modules/formdata-node/lib/esm/Blob.js + 2 modules +var esm_Blob = __webpack_require__(89239); +;// ./node_modules/formdata-node/lib/esm/isBlob.js + +const isBlob_isBlob = (value) => value instanceof esm_Blob/* Blob */.Y; + +// EXTERNAL MODULE: ./node_modules/formdata-node/lib/esm/isFunction.js +var esm_isFunction = __webpack_require__(52937); +;// ./node_modules/formdata-node/lib/esm/deprecateConstructorEntries.js + +const deprecateConstructorEntries = (0,external_util_.deprecate)(() => { }, "Constructor \"entries\" argument is not spec-compliant " + + "and will be removed in next major release."); + +;// ./node_modules/formdata-node/lib/esm/FormData.js +var FormData_classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _FormData_instances, _FormData_entries, _FormData_setEntry; + + + + + + +class FormData_FormData { + constructor(entries) { + _FormData_instances.add(this); + _FormData_entries.set(this, new Map()); + if (entries) { + deprecateConstructorEntries(); + entries.forEach(({ name, value, fileName }) => this.append(name, value, fileName)); + } + } + static [(_FormData_entries = new WeakMap(), _FormData_instances = new WeakSet(), Symbol.hasInstance)](value) { + return Boolean(value + && (0,esm_isFunction/* isFunction */.T)(value.constructor) + && value[Symbol.toStringTag] === "FormData" + && (0,esm_isFunction/* isFunction */.T)(value.append) + && (0,esm_isFunction/* isFunction */.T)(value.set) + && (0,esm_isFunction/* isFunction */.T)(value.get) + && (0,esm_isFunction/* isFunction */.T)(value.getAll) + && (0,esm_isFunction/* isFunction */.T)(value.has) + && (0,esm_isFunction/* isFunction */.T)(value.delete) + && (0,esm_isFunction/* isFunction */.T)(value.entries) + && (0,esm_isFunction/* isFunction */.T)(value.values) + && (0,esm_isFunction/* isFunction */.T)(value.keys) + && (0,esm_isFunction/* isFunction */.T)(value[Symbol.iterator]) + && (0,esm_isFunction/* isFunction */.T)(value.forEach)); + } + append(name, value, fileName) { + FormData_classPrivateFieldGet(this, _FormData_instances, "m", _FormData_setEntry).call(this, { + name, + fileName, + append: true, + rawValue: value, + argsLength: arguments.length + }); + } + set(name, value, fileName) { + FormData_classPrivateFieldGet(this, _FormData_instances, "m", _FormData_setEntry).call(this, { + name, + fileName, + append: false, + rawValue: value, + argsLength: arguments.length + }); + } + get(name) { + const field = FormData_classPrivateFieldGet(this, _FormData_entries, "f").get(String(name)); + if (!field) { + return null; + } + return field[0]; + } + getAll(name) { + const field = FormData_classPrivateFieldGet(this, _FormData_entries, "f").get(String(name)); + if (!field) { + return []; + } + return field.slice(); + } + has(name) { + return FormData_classPrivateFieldGet(this, _FormData_entries, "f").has(String(name)); + } + delete(name) { + FormData_classPrivateFieldGet(this, _FormData_entries, "f").delete(String(name)); + } + *keys() { + for (const key of FormData_classPrivateFieldGet(this, _FormData_entries, "f").keys()) { + yield key; + } + } + *entries() { + for (const name of this.keys()) { + const values = this.getAll(name); + for (const value of values) { + yield [name, value]; + } + } + } + *values() { + for (const [, value] of this) { + yield value; + } + } + [(_FormData_setEntry = function _FormData_setEntry({ name, rawValue, append, fileName, argsLength }) { + const methodName = append ? "append" : "set"; + if (argsLength < 2) { + throw new TypeError(`Failed to execute '${methodName}' on 'FormData': ` + + `2 arguments required, but only ${argsLength} present.`); + } + name = String(name); + let value; + if ((0,esm_isFile/* isFile */.f)(rawValue)) { + value = fileName === undefined + ? rawValue + : new esm_File/* File */.Z([rawValue], fileName, { + type: rawValue.type, + lastModified: rawValue.lastModified + }); + } + else if (isBlob_isBlob(rawValue)) { + value = new esm_File/* File */.Z([rawValue], fileName === undefined ? "blob" : fileName, { + type: rawValue.type + }); + } + else if (fileName) { + throw new TypeError(`Failed to execute '${methodName}' on 'FormData': ` + + "parameter 2 is not of type 'Blob'."); + } + else { + value = String(rawValue); + } + const values = FormData_classPrivateFieldGet(this, _FormData_entries, "f").get(name); + if (!values) { + return void FormData_classPrivateFieldGet(this, _FormData_entries, "f").set(name, [value]); + } + if (!append) { + return void FormData_classPrivateFieldGet(this, _FormData_entries, "f").set(name, [value]); + } + values.push(value); + }, Symbol.iterator)]() { + return this.entries(); + } + forEach(callback, thisArg) { + for (const [name, value] of this) { + callback.call(thisArg, value, name, this); + } + } + get [Symbol.toStringTag]() { + return "FormData"; + } + [external_util_.inspect.custom]() { + return this[Symbol.toStringTag]; + } +} + +;// ./node_modules/formdata-node/lib/esm/index.js + + + + +// EXTERNAL MODULE: ./node_modules/agentkeepalive/index.js +var agentkeepalive = __webpack_require__(95692); +// EXTERNAL MODULE: ./node_modules/abort-controller/dist/abort-controller.js +var abort_controller = __webpack_require__(66584); +;// ./node_modules/form-data-encoder/lib/esm/util/createBoundary.js +const alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"; +function createBoundary() { + let size = 16; + let res = ""; + while (size--) { + res += alphabet[(Math.random() * alphabet.length) << 0]; + } + return res; +} +/* harmony default export */ const util_createBoundary = (createBoundary); + +;// ./node_modules/form-data-encoder/lib/esm/util/isPlainObject.js +const getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase()); +function isPlainObject(value) { + if (getType(value) !== "object") { + return false; + } + const pp = Object.getPrototypeOf(value); + if (pp === null || pp === undefined) { + return true; + } + const Ctor = pp.constructor && pp.constructor.toString(); + return Ctor === Object.toString(); +} +/* harmony default export */ const util_isPlainObject = (isPlainObject); + +;// ./node_modules/form-data-encoder/lib/esm/util/normalizeValue.js +const normalizeValue = (value) => String(value) + .replace(/\r|\n/g, (match, i, str) => { + if ((match === "\r" && str[i + 1] !== "\n") + || (match === "\n" && str[i - 1] !== "\r")) { + return "\r\n"; + } + return match; +}); +/* harmony default export */ const util_normalizeValue = (normalizeValue); + +;// ./node_modules/form-data-encoder/lib/esm/util/escapeName.js +const escapeName = (name) => String(name) + .replace(/\r/g, "%0D") + .replace(/\n/g, "%0A") + .replace(/"/g, "%22"); +/* harmony default export */ const util_escapeName = (escapeName); + +;// ./node_modules/form-data-encoder/lib/esm/util/isFunction.js +const isFunction_isFunction = (value) => (typeof value === "function"); +/* harmony default export */ const util_isFunction = (isFunction_isFunction); + +;// ./node_modules/form-data-encoder/lib/esm/util/isFileLike.js + +const isFileLike = (value) => Boolean(value + && typeof value === "object" + && util_isFunction(value.constructor) + && value[Symbol.toStringTag] === "File" + && util_isFunction(value.stream) + && value.name != null + && value.size != null + && value.lastModified != null); + +;// ./node_modules/form-data-encoder/lib/esm/util/isFormData.js + +const isFormData = (value) => Boolean(value + && util_isFunction(value.constructor) + && value[Symbol.toStringTag] === "FormData" + && util_isFunction(value.append) + && util_isFunction(value.getAll) + && util_isFunction(value.entries) + && util_isFunction(value[Symbol.iterator])); +const isFormDataLike = (/* unused pure expression or super */ null && (isFormData)); + +;// ./node_modules/form-data-encoder/lib/esm/FormDataEncoder.js +var FormDataEncoder_classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var FormDataEncoder_classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _FormDataEncoder_instances, _FormDataEncoder_CRLF, _FormDataEncoder_CRLF_BYTES, _FormDataEncoder_CRLF_BYTES_LENGTH, _FormDataEncoder_DASHES, _FormDataEncoder_encoder, _FormDataEncoder_footer, _FormDataEncoder_form, _FormDataEncoder_options, _FormDataEncoder_getFieldHeader; + + + + + + +const FormDataEncoder_defaultOptions = { + enableAdditionalHeaders: false +}; +class FormDataEncoder { + constructor(form, boundaryOrOptions, options) { + _FormDataEncoder_instances.add(this); + _FormDataEncoder_CRLF.set(this, "\r\n"); + _FormDataEncoder_CRLF_BYTES.set(this, void 0); + _FormDataEncoder_CRLF_BYTES_LENGTH.set(this, void 0); + _FormDataEncoder_DASHES.set(this, "-".repeat(2)); + _FormDataEncoder_encoder.set(this, new TextEncoder()); + _FormDataEncoder_footer.set(this, void 0); + _FormDataEncoder_form.set(this, void 0); + _FormDataEncoder_options.set(this, void 0); + if (!isFormData(form)) { + throw new TypeError("Expected first argument to be a FormData instance."); + } + let boundary; + if (util_isPlainObject(boundaryOrOptions)) { + options = boundaryOrOptions; + } + else { + boundary = boundaryOrOptions; + } + if (!boundary) { + boundary = util_createBoundary(); + } + if (typeof boundary !== "string") { + throw new TypeError("Expected boundary argument to be a string."); + } + if (options && !util_isPlainObject(options)) { + throw new TypeError("Expected options argument to be an object."); + } + FormDataEncoder_classPrivateFieldSet(this, _FormDataEncoder_form, form, "f"); + FormDataEncoder_classPrivateFieldSet(this, _FormDataEncoder_options, { ...FormDataEncoder_defaultOptions, ...options }, "f"); + FormDataEncoder_classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES, FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")), "f"); + FormDataEncoder_classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f").byteLength, "f"); + this.boundary = `form-data-boundary-${boundary}`; + this.contentType = `multipart/form-data; boundary=${this.boundary}`; + FormDataEncoder_classPrivateFieldSet(this, _FormDataEncoder_footer, FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`), "f"); + this.contentLength = String(this.getContentLength()); + this.headers = Object.freeze({ + "Content-Type": this.contentType, + "Content-Length": this.contentLength + }); + Object.defineProperties(this, { + boundary: { writable: false, configurable: false }, + contentType: { writable: false, configurable: false }, + contentLength: { writable: false, configurable: false }, + headers: { writable: false, configurable: false } + }); + } + getContentLength() { + let length = 0; + for (const [name, raw] of FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_form, "f")) { + const value = isFileLike(raw) ? raw : FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(util_normalizeValue(raw)); + length += FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value).byteLength; + length += isFileLike(value) ? value.size : value.byteLength; + length += FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, "f"); + } + return length + FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_footer, "f").byteLength; + } + *values() { + for (const [name, raw] of FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_form, "f").entries()) { + const value = isFileLike(raw) ? raw : FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(util_normalizeValue(raw)); + yield FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value); + yield value; + yield FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f"); + } + yield FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_footer, "f"); + } + async *encode() { + for (const part of this.values()) { + if (isFileLike(part)) { + yield* part.stream(); + } + else { + yield part; + } + } + } + [(_FormDataEncoder_CRLF = new WeakMap(), _FormDataEncoder_CRLF_BYTES = new WeakMap(), _FormDataEncoder_CRLF_BYTES_LENGTH = new WeakMap(), _FormDataEncoder_DASHES = new WeakMap(), _FormDataEncoder_encoder = new WeakMap(), _FormDataEncoder_footer = new WeakMap(), _FormDataEncoder_form = new WeakMap(), _FormDataEncoder_options = new WeakMap(), _FormDataEncoder_instances = new WeakSet(), _FormDataEncoder_getFieldHeader = function _FormDataEncoder_getFieldHeader(name, value) { + let header = ""; + header += `${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`; + header += `Content-Disposition: form-data; name="${util_escapeName(name)}"`; + if (isFileLike(value)) { + header += `; filename="${util_escapeName(value.name)}"${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`; + header += `Content-Type: ${value.type || "application/octet-stream"}`; + } + if (FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_options, "f").enableAdditionalHeaders === true) { + header += `${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}Content-Length: ${isFileLike(value) ? value.size : value.byteLength}`; + } + return FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${header}${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`); + }, Symbol.iterator)]() { + return this.values(); + } + [Symbol.asyncIterator]() { + return this.encode(); + } +} +const Encoder = (/* unused pure expression or super */ null && (FormDataEncoder)); + +;// ./node_modules/form-data-encoder/lib/esm/index.js + + + + + + +;// ./node_modules/openai/_shims/MultipartBody.mjs +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ +class MultipartBody { + constructor(body) { + this.body = body; + } + get [Symbol.toStringTag]() { + return 'MultipartBody'; + } +} +//# sourceMappingURL=MultipartBody.mjs.map +;// external "node:stream/web" +const web_namespaceObject = require("node:stream/web"); +;// ./node_modules/openai/_shims/node-runtime.mjs + + + + + + + + + +let fileFromPathWarned = false; +async function node_runtime_fileFromPath(path, ...args) { + // this import fails in environments that don't handle export maps correctly, like old versions of Jest + const { fileFromPath: _fileFromPath } = await __webpack_require__.e(/* import() */ 401).then(__webpack_require__.bind(__webpack_require__, 26401)); + if (!fileFromPathWarned) { + console.warn(`fileFromPath is deprecated; use fs.createReadStream(${JSON.stringify(path)}) instead`); + fileFromPathWarned = true; + } + // @ts-ignore + return await _fileFromPath(path, ...args); +} +const defaultHttpAgent = new agentkeepalive({ keepAlive: true, timeout: 5 * 60 * 1000 }); +const defaultHttpsAgent = new agentkeepalive.HttpsAgent({ keepAlive: true, timeout: 5 * 60 * 1000 }); +async function node_runtime_getMultipartRequestOptions(form, opts) { + const encoder = new FormDataEncoder(form); + const readable = external_node_stream_namespaceObject.Readable.from(encoder); + const body = new MultipartBody(readable); + const headers = { + ...opts.headers, + ...encoder.headers, + 'Content-Length': encoder.contentLength, + }; + return { ...opts, body: body, headers }; +} +function getRuntime() { + // Polyfill global object if needed. + if (typeof AbortController === 'undefined') { + // @ts-expect-error (the types are subtly different, but compatible in practice) + globalThis.AbortController = abort_controller.AbortController; + } + return { + kind: 'node', + fetch: node_fetch_lib, + Request: lib_Request, + Response: lib_Response, + Headers: lib_Headers, + FormData: FormData_FormData, + Blob: esm_Blob/* Blob */.Y, + File: esm_File/* File */.Z, + ReadableStream: web_namespaceObject.ReadableStream, + getMultipartRequestOptions: node_runtime_getMultipartRequestOptions, + getDefaultAgent: (url) => (url.startsWith('https') ? defaultHttpsAgent : defaultHttpAgent), + fileFromPath: node_runtime_fileFromPath, + isFsReadStream: (value) => value instanceof external_node_fs_namespaceObject.ReadStream, + }; +} +//# sourceMappingURL=node-runtime.mjs.map +;// ./node_modules/openai/_shims/index.mjs +/** + * Disclaimer: modules in _shims aren't intended to be imported by SDK users. + */ + + +const init = () => { + if (!kind) setShims(getRuntime(), { auto: true }); +}; + + +init(); + +;// ./node_modules/openai/internal/decoders/line.mjs +var line_classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var line_classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _LineDecoder_carriageReturnIndex; + +/** + * A re-implementation of httpx's `LineDecoder` in Python that handles incrementally + * reading lines from text. + * + * https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258 + */ +class LineDecoder { + constructor() { + _LineDecoder_carriageReturnIndex.set(this, void 0); + this.buffer = new Uint8Array(); + line_classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f"); + } + decode(chunk) { + if (chunk == null) { + return []; + } + const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk) + : typeof chunk === 'string' ? new TextEncoder().encode(chunk) + : chunk; + let newData = new Uint8Array(this.buffer.length + binaryChunk.length); + newData.set(this.buffer); + newData.set(binaryChunk, this.buffer.length); + this.buffer = newData; + const lines = []; + let patternIndex; + while ((patternIndex = findNewlineIndex(this.buffer, line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f"))) != null) { + if (patternIndex.carriage && line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") == null) { + // skip until we either get a corresponding `\n`, a new `\r` or nothing + line_classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, patternIndex.index, "f"); + continue; + } + // we got double \r or \rtext\n + if (line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") != null && + (patternIndex.index !== line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") + 1 || patternIndex.carriage)) { + lines.push(this.decodeText(this.buffer.slice(0, line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") - 1))); + this.buffer = this.buffer.slice(line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f")); + line_classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f"); + continue; + } + const endIndex = line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") !== null ? patternIndex.preceding - 1 : patternIndex.preceding; + const line = this.decodeText(this.buffer.slice(0, endIndex)); + lines.push(line); + this.buffer = this.buffer.slice(patternIndex.index); + line_classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f"); + } + return lines; + } + decodeText(bytes) { + if (bytes == null) + return ''; + if (typeof bytes === 'string') + return bytes; + // Node: + if (typeof Buffer !== 'undefined') { + if (bytes instanceof Buffer) { + return bytes.toString(); + } + if (bytes instanceof Uint8Array) { + return Buffer.from(bytes).toString(); + } + throw new error_OpenAIError(`Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global "Buffer" defined, which this library assumes to be Node. Please report this error.`); + } + // Browser + if (typeof TextDecoder !== 'undefined') { + if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) { + this.textDecoder ?? (this.textDecoder = new TextDecoder('utf8')); + return this.textDecoder.decode(bytes); + } + throw new error_OpenAIError(`Unexpected: received non-Uint8Array/ArrayBuffer (${bytes.constructor.name}) in a web platform. Please report this error.`); + } + throw new error_OpenAIError(`Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.`); + } + flush() { + if (!this.buffer.length) { + return []; + } + return this.decode('\n'); + } +} +_LineDecoder_carriageReturnIndex = new WeakMap(); +// prettier-ignore +LineDecoder.NEWLINE_CHARS = new Set(['\n', '\r']); +LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g; +/** + * This function searches the buffer for the end patterns, (\r or \n) + * and returns an object with the index preceding the matched newline and the + * index after the newline char. `null` is returned if no new line is found. + * + * ```ts + * findNewLineIndex('abc\ndef') -> { preceding: 2, index: 3 } + * ``` + */ +function findNewlineIndex(buffer, startIndex) { + const newline = 0x0a; // \n + const carriage = 0x0d; // \r + for (let i = startIndex ?? 0; i < buffer.length; i++) { + if (buffer[i] === newline) { + return { preceding: i, index: i + 1, carriage: false }; + } + if (buffer[i] === carriage) { + return { preceding: i, index: i + 1, carriage: true }; + } + } + return null; +} +function findDoubleNewlineIndex(buffer) { + // This function searches the buffer for the end patterns (\r\r, \n\n, \r\n\r\n) + // and returns the index right after the first occurrence of any pattern, + // or -1 if none of the patterns are found. + const newline = 0x0a; // \n + const carriage = 0x0d; // \r + for (let i = 0; i < buffer.length - 1; i++) { + if (buffer[i] === newline && buffer[i + 1] === newline) { + // \n\n + return i + 2; + } + if (buffer[i] === carriage && buffer[i + 1] === carriage) { + // \r\r + return i + 2; + } + if (buffer[i] === carriage && + buffer[i + 1] === newline && + i + 3 < buffer.length && + buffer[i + 2] === carriage && + buffer[i + 3] === newline) { + // \r\n\r\n + return i + 4; + } + } + return -1; +} +//# sourceMappingURL=line.mjs.map +;// ./node_modules/openai/internal/stream-utils.mjs +/** + * Most browsers don't yet have async iterable support for ReadableStream, + * and Node has a very different way of reading bytes from its "ReadableStream". + * + * This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490 + */ +function ReadableStreamToAsyncIterable(stream) { + if (stream[Symbol.asyncIterator]) + return stream; + const reader = stream.getReader(); + return { + async next() { + try { + const result = await reader.read(); + if (result?.done) + reader.releaseLock(); // release lock when stream becomes closed + return result; + } + catch (e) { + reader.releaseLock(); // release lock when stream becomes errored + throw e; + } + }, + async return() { + const cancelPromise = reader.cancel(); + reader.releaseLock(); + await cancelPromise; + return { done: true, value: undefined }; + }, + [Symbol.asyncIterator]() { + return this; + }, + }; +} +//# sourceMappingURL=stream-utils.mjs.map +;// ./node_modules/openai/streaming.mjs + + + + + + +class Stream { + constructor(iterator, controller) { + this.iterator = iterator; + this.controller = controller; + } + static fromSSEResponse(response, controller) { + let consumed = false; + async function* iterator() { + if (consumed) { + throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.'); + } + consumed = true; + let done = false; + try { + for await (const sse of _iterSSEMessages(response, controller)) { + if (done) + continue; + if (sse.data.startsWith('[DONE]')) { + done = true; + continue; + } + if (sse.event === null || + sse.event.startsWith('response.') || + sse.event.startsWith('transcript.')) { + let data; + try { + data = JSON.parse(sse.data); + } + catch (e) { + console.error(`Could not parse message into JSON:`, sse.data); + console.error(`From chunk:`, sse.raw); + throw e; + } + if (data && data.error) { + throw new APIError(undefined, data.error, undefined, createResponseHeaders(response.headers)); + } + yield data; + } + else { + let data; + try { + data = JSON.parse(sse.data); + } + catch (e) { + console.error(`Could not parse message into JSON:`, sse.data); + console.error(`From chunk:`, sse.raw); + throw e; + } + // TODO: Is this where the error should be thrown? + if (sse.event == 'error') { + throw new APIError(undefined, data.error, data.message, undefined); + } + yield { event: sse.event, data: data }; + } + } + done = true; + } + catch (e) { + // If the user calls `stream.controller.abort()`, we should exit without throwing. + if (e instanceof Error && e.name === 'AbortError') + return; + throw e; + } + finally { + // If the user `break`s, abort the ongoing request. + if (!done) + controller.abort(); + } + } + return new Stream(iterator, controller); + } + /** + * Generates a Stream from a newline-separated ReadableStream + * where each item is a JSON value. + */ + static fromReadableStream(readableStream, controller) { + let consumed = false; + async function* iterLines() { + const lineDecoder = new LineDecoder(); + const iter = ReadableStreamToAsyncIterable(readableStream); + for await (const chunk of iter) { + for (const line of lineDecoder.decode(chunk)) { + yield line; + } + } + for (const line of lineDecoder.flush()) { + yield line; + } + } + async function* iterator() { + if (consumed) { + throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.'); + } + consumed = true; + let done = false; + try { + for await (const line of iterLines()) { + if (done) + continue; + if (line) + yield JSON.parse(line); + } + done = true; + } + catch (e) { + // If the user calls `stream.controller.abort()`, we should exit without throwing. + if (e instanceof Error && e.name === 'AbortError') + return; + throw e; + } + finally { + // If the user `break`s, abort the ongoing request. + if (!done) + controller.abort(); + } + } + return new Stream(iterator, controller); + } + [Symbol.asyncIterator]() { + return this.iterator(); + } + /** + * Splits the stream into two streams which can be + * independently read from at different speeds. + */ + tee() { + const left = []; + const right = []; + const iterator = this.iterator(); + const teeIterator = (queue) => { + return { + next: () => { + if (queue.length === 0) { + const result = iterator.next(); + left.push(result); + right.push(result); + } + return queue.shift(); + }, + }; + }; + return [ + new Stream(() => teeIterator(left), this.controller), + new Stream(() => teeIterator(right), this.controller), + ]; + } + /** + * Converts this stream to a newline-separated ReadableStream of + * JSON stringified values in the stream + * which can be turned back into a Stream with `Stream.fromReadableStream()`. + */ + toReadableStream() { + const self = this; + let iter; + const encoder = new TextEncoder(); + return new registry_ReadableStream({ + async start() { + iter = self[Symbol.asyncIterator](); + }, + async pull(ctrl) { + try { + const { value, done } = await iter.next(); + if (done) + return ctrl.close(); + const bytes = encoder.encode(JSON.stringify(value) + '\n'); + ctrl.enqueue(bytes); + } + catch (err) { + ctrl.error(err); + } + }, + async cancel() { + await iter.return?.(); + }, + }); + } +} +async function* _iterSSEMessages(response, controller) { + if (!response.body) { + controller.abort(); + throw new error_OpenAIError(`Attempted to iterate over a response with no body`); + } + const sseDecoder = new SSEDecoder(); + const lineDecoder = new LineDecoder(); + const iter = ReadableStreamToAsyncIterable(response.body); + for await (const sseChunk of iterSSEChunks(iter)) { + for (const line of lineDecoder.decode(sseChunk)) { + const sse = sseDecoder.decode(line); + if (sse) + yield sse; + } + } + for (const line of lineDecoder.flush()) { + const sse = sseDecoder.decode(line); + if (sse) + yield sse; + } +} +/** + * Given an async iterable iterator, iterates over it and yields full + * SSE chunks, i.e. yields when a double new-line is encountered. + */ +async function* iterSSEChunks(iterator) { + let data = new Uint8Array(); + for await (const chunk of iterator) { + if (chunk == null) { + continue; + } + const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk) + : typeof chunk === 'string' ? new TextEncoder().encode(chunk) + : chunk; + let newData = new Uint8Array(data.length + binaryChunk.length); + newData.set(data); + newData.set(binaryChunk, data.length); + data = newData; + let patternIndex; + while ((patternIndex = findDoubleNewlineIndex(data)) !== -1) { + yield data.slice(0, patternIndex); + data = data.slice(patternIndex); + } + } + if (data.length > 0) { + yield data; + } +} +class SSEDecoder { + constructor() { + this.event = null; + this.data = []; + this.chunks = []; + } + decode(line) { + if (line.endsWith('\r')) { + line = line.substring(0, line.length - 1); + } + if (!line) { + // empty line and we didn't previously encounter any messages + if (!this.event && !this.data.length) + return null; + const sse = { + event: this.event, + data: this.data.join('\n'), + raw: this.chunks, + }; + this.event = null; + this.data = []; + this.chunks = []; + return sse; + } + this.chunks.push(line); + if (line.startsWith(':')) { + return null; + } + let [fieldname, _, value] = partition(line, ':'); + if (value.startsWith(' ')) { + value = value.substring(1); + } + if (fieldname === 'event') { + this.event = value; + } + else if (fieldname === 'data') { + this.data.push(value); + } + return null; + } +} +function partition(str, delimiter) { + const index = str.indexOf(delimiter); + if (index !== -1) { + return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)]; + } + return [str, '', '']; +} +//# sourceMappingURL=streaming.mjs.map +;// ./node_modules/openai/uploads.mjs + + +const isResponseLike = (value) => value != null && + typeof value === 'object' && + typeof value.url === 'string' && + typeof value.blob === 'function'; +const uploads_isFileLike = (value) => value != null && + typeof value === 'object' && + typeof value.name === 'string' && + typeof value.lastModified === 'number' && + isBlobLike(value); +/** + * The BlobLike type omits arrayBuffer() because @types/node-fetch@^2.6.4 lacks it; but this check + * adds the arrayBuffer() method type because it is available and used at runtime + */ +const isBlobLike = (value) => value != null && + typeof value === 'object' && + typeof value.size === 'number' && + typeof value.type === 'string' && + typeof value.text === 'function' && + typeof value.slice === 'function' && + typeof value.arrayBuffer === 'function'; +const isUploadable = (value) => { + return uploads_isFileLike(value) || isResponseLike(value) || isFsReadStream(value); +}; +/** + * Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats + * @param value the raw content of the file. Can be an {@link Uploadable}, {@link BlobLikePart}, or {@link AsyncIterable} of {@link BlobLikePart}s + * @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible + * @param {Object=} options additional properties + * @param {string=} options.type the MIME type of the content + * @param {number=} options.lastModified the last modified timestamp + * @returns a {@link File} with the given properties + */ +async function toFile(value, name, options) { + // If it's a promise, resolve it. + value = await value; + // If we've been given a `File` we don't need to do anything + if (uploads_isFileLike(value)) { + return value; + } + if (isResponseLike(value)) { + const blob = await value.blob(); + name || (name = new URL(value.url).pathname.split(/[\\/]/).pop() ?? 'unknown_file'); + // we need to convert the `Blob` into an array buffer because the `Blob` class + // that `node-fetch` defines is incompatible with the web standard which results + // in `new File` interpreting it as a string instead of binary data. + const data = isBlobLike(blob) ? [(await blob.arrayBuffer())] : [blob]; + return new File(data, name, options); + } + const bits = await getBytes(value); + name || (name = getName(value) ?? 'unknown_file'); + if (!options?.type) { + const type = bits[0]?.type; + if (typeof type === 'string') { + options = { ...options, type }; + } + } + return new File(bits, name, options); +} +async function getBytes(value) { + let parts = []; + if (typeof value === 'string' || + ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc. + value instanceof ArrayBuffer) { + parts.push(value); + } + else if (isBlobLike(value)) { + parts.push(await value.arrayBuffer()); + } + else if (isAsyncIterableIterator(value) // includes Readable, ReadableStream, etc. + ) { + for await (const chunk of value) { + parts.push(chunk); // TODO, consider validating? + } + } + else { + throw new Error(`Unexpected data type: ${typeof value}; constructor: ${value?.constructor + ?.name}; props: ${propsForError(value)}`); + } + return parts; +} +function propsForError(value) { + const props = Object.getOwnPropertyNames(value); + return `[${props.map((p) => `"${p}"`).join(', ')}]`; +} +function getName(value) { + return (getStringFromMaybeBuffer(value.name) || + getStringFromMaybeBuffer(value.filename) || + // For fs.ReadStream + getStringFromMaybeBuffer(value.path)?.split(/[\\/]/).pop()); +} +const getStringFromMaybeBuffer = (x) => { + if (typeof x === 'string') + return x; + if (typeof Buffer !== 'undefined' && x instanceof Buffer) + return String(x); + return undefined; +}; +const isAsyncIterableIterator = (value) => value != null && typeof value === 'object' && typeof value[Symbol.asyncIterator] === 'function'; +const isMultipartBody = (body) => body && typeof body === 'object' && body.body && body[Symbol.toStringTag] === 'MultipartBody'; +/** + * Returns a multipart/form-data request if any part of the given request body contains a File / Blob value. + * Otherwise returns the request as is. + */ +const maybeMultipartFormRequestOptions = async (opts) => { + if (!hasUploadableValue(opts.body)) + return opts; + const form = await createForm(opts.body); + return getMultipartRequestOptions(form, opts); +}; +const multipartFormRequestOptions = async (opts) => { + const form = await createForm(opts.body); + return registry_getMultipartRequestOptions(form, opts); +}; +const createForm = async (body) => { + const form = new registry_FormData(); + await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value))); + return form; +}; +const hasUploadableValue = (value) => { + if (isUploadable(value)) + return true; + if (Array.isArray(value)) + return value.some(hasUploadableValue); + if (value && typeof value === 'object') { + for (const k in value) { + if (hasUploadableValue(value[k])) + return true; + } + } + return false; +}; +const addFormValue = async (form, key, value) => { + if (value === undefined) + return; + if (value == null) { + throw new TypeError(`Received null for "${key}"; to pass null in FormData, you must use the string 'null'`); + } + // TODO: make nested formats configurable + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + form.append(key, String(value)); + } + else if (isUploadable(value)) { + const file = await toFile(value); + form.append(key, file); + } + else if (Array.isArray(value)) { + await Promise.all(value.map((entry) => addFormValue(form, key + '[]', entry))); + } + else if (typeof value === 'object') { + await Promise.all(Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop))); + } + else { + throw new TypeError(`Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`); + } +}; +//# sourceMappingURL=uploads.mjs.map +;// ./node_modules/openai/core.mjs +var core_classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var core_classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _AbstractPage_client; + + + + +// try running side effects outside of _shims/index to workaround https://github.com/vercel/next.js/issues/76881 +init(); + + +async function defaultParseResponse(props) { + const { response } = props; + if (props.options.stream) { + debug('response', response.status, response.url, response.headers, response.body); + // Note: there is an invariant here that isn't represented in the type system + // that if you set `stream: true` the response type must also be `Stream` + if (props.options.__streamClass) { + return props.options.__streamClass.fromSSEResponse(response, props.controller); + } + return Stream.fromSSEResponse(response, props.controller); + } + // fetch refuses to read the body when the status code is 204. + if (response.status === 204) { + return null; + } + if (props.options.__binaryResponse) { + return response; + } + const contentType = response.headers.get('content-type'); + const mediaType = contentType?.split(';')[0]?.trim(); + const isJSON = mediaType?.includes('application/json') || mediaType?.endsWith('+json'); + if (isJSON) { + const json = await response.json(); + debug('response', response.status, response.url, response.headers, json); + return _addRequestID(json, response); + } + const text = await response.text(); + debug('response', response.status, response.url, response.headers, text); + // TODO handle blob, arraybuffer, other content types, etc. + return text; +} +function _addRequestID(value, response) { + if (!value || typeof value !== 'object' || Array.isArray(value)) { + return value; + } + return Object.defineProperty(value, '_request_id', { + value: response.headers.get('x-request-id'), + enumerable: false, + }); +} +/** + * A subclass of `Promise` providing additional helper methods + * for interacting with the SDK. + */ +class APIPromise extends Promise { + constructor(responsePromise, parseResponse = defaultParseResponse) { + super((resolve) => { + // this is maybe a bit weird but this has to be a no-op to not implicitly + // parse the response body; instead .then, .catch, .finally are overridden + // to parse the response + resolve(null); + }); + this.responsePromise = responsePromise; + this.parseResponse = parseResponse; + } + _thenUnwrap(transform) { + return new APIPromise(this.responsePromise, async (props) => _addRequestID(transform(await this.parseResponse(props), props), props.response)); + } + /** + * Gets the raw `Response` instance instead of parsing the response + * data. + * + * If you want to parse the response body but still get the `Response` + * instance, you can use {@link withResponse()}. + * + * 👋 Getting the wrong TypeScript type for `Response`? + * Try setting `"moduleResolution": "NodeNext"` if you can, + * or add one of these imports before your first `import … from 'openai'`: + * - `import 'openai/shims/node'` (if you're running on Node) + * - `import 'openai/shims/web'` (otherwise) + */ + asResponse() { + return this.responsePromise.then((p) => p.response); + } + /** + * Gets the parsed response data, the raw `Response` instance and the ID of the request, + * returned via the X-Request-ID header which is useful for debugging requests and reporting + * issues to OpenAI. + * + * If you just want to get the raw `Response` instance without parsing it, + * you can use {@link asResponse()}. + * + * + * 👋 Getting the wrong TypeScript type for `Response`? + * Try setting `"moduleResolution": "NodeNext"` if you can, + * or add one of these imports before your first `import … from 'openai'`: + * - `import 'openai/shims/node'` (if you're running on Node) + * - `import 'openai/shims/web'` (otherwise) + */ + async withResponse() { + const [data, response] = await Promise.all([this.parse(), this.asResponse()]); + return { data, response, request_id: response.headers.get('x-request-id') }; + } + parse() { + if (!this.parsedPromise) { + this.parsedPromise = this.responsePromise.then(this.parseResponse); + } + return this.parsedPromise; + } + then(onfulfilled, onrejected) { + return this.parse().then(onfulfilled, onrejected); + } + catch(onrejected) { + return this.parse().catch(onrejected); + } + finally(onfinally) { + return this.parse().finally(onfinally); + } +} +class APIClient { + constructor({ baseURL, maxRetries = 2, timeout = 600000, // 10 minutes + httpAgent, fetch: overriddenFetch, }) { + this.baseURL = baseURL; + this.maxRetries = validatePositiveInteger('maxRetries', maxRetries); + this.timeout = validatePositiveInteger('timeout', timeout); + this.httpAgent = httpAgent; + this.fetch = overriddenFetch ?? registry_fetch; + } + authHeaders(opts) { + return {}; + } + /** + * Override this to add your own default headers, for example: + * + * { + * ...super.defaultHeaders(), + * Authorization: 'Bearer 123', + * } + */ + defaultHeaders(opts) { + return { + Accept: 'application/json', + 'Content-Type': 'application/json', + 'User-Agent': this.getUserAgent(), + ...getPlatformHeaders(), + ...this.authHeaders(opts), + }; + } + /** + * Override this to add your own headers validation: + */ + validateHeaders(headers, customHeaders) { } + defaultIdempotencyKey() { + return `stainless-node-retry-${uuid4()}`; + } + get(path, opts) { + return this.methodRequest('get', path, opts); + } + post(path, opts) { + return this.methodRequest('post', path, opts); + } + patch(path, opts) { + return this.methodRequest('patch', path, opts); + } + put(path, opts) { + return this.methodRequest('put', path, opts); + } + delete(path, opts) { + return this.methodRequest('delete', path, opts); + } + methodRequest(method, path, opts) { + return this.request(Promise.resolve(opts).then(async (opts) => { + const body = opts && isBlobLike(opts?.body) ? new DataView(await opts.body.arrayBuffer()) + : opts?.body instanceof DataView ? opts.body + : opts?.body instanceof ArrayBuffer ? new DataView(opts.body) + : opts && ArrayBuffer.isView(opts?.body) ? new DataView(opts.body.buffer) + : opts?.body; + return { method, path, ...opts, body }; + })); + } + getAPIList(path, Page, opts) { + return this.requestAPIList(Page, { method: 'get', path, ...opts }); + } + calculateContentLength(body) { + if (typeof body === 'string') { + if (typeof Buffer !== 'undefined') { + return Buffer.byteLength(body, 'utf8').toString(); + } + if (typeof TextEncoder !== 'undefined') { + const encoder = new TextEncoder(); + const encoded = encoder.encode(body); + return encoded.length.toString(); + } + } + else if (ArrayBuffer.isView(body)) { + return body.byteLength.toString(); + } + return null; + } + buildRequest(options, { retryCount = 0 } = {}) { + options = { ...options }; + const { method, path, query, headers: headers = {} } = options; + const body = ArrayBuffer.isView(options.body) || (options.__binaryRequest && typeof options.body === 'string') ? + options.body + : isMultipartBody(options.body) ? options.body.body + : options.body ? JSON.stringify(options.body, null, 2) + : null; + const contentLength = this.calculateContentLength(body); + const url = this.buildURL(path, query); + if ('timeout' in options) + validatePositiveInteger('timeout', options.timeout); + options.timeout = options.timeout ?? this.timeout; + const httpAgent = options.httpAgent ?? this.httpAgent ?? getDefaultAgent(url); + const minAgentTimeout = options.timeout + 1000; + if (typeof httpAgent?.options?.timeout === 'number' && + minAgentTimeout > (httpAgent.options.timeout ?? 0)) { + // Allow any given request to bump our agent active socket timeout. + // This may seem strange, but leaking active sockets should be rare and not particularly problematic, + // and without mutating agent we would need to create more of them. + // This tradeoff optimizes for performance. + httpAgent.options.timeout = minAgentTimeout; + } + if (this.idempotencyHeader && method !== 'get') { + if (!options.idempotencyKey) + options.idempotencyKey = this.defaultIdempotencyKey(); + headers[this.idempotencyHeader] = options.idempotencyKey; + } + const reqHeaders = this.buildHeaders({ options, headers, contentLength, retryCount }); + const req = { + method, + ...(body && { body: body }), + headers: reqHeaders, + ...(httpAgent && { agent: httpAgent }), + // @ts-ignore node-fetch uses a custom AbortSignal type that is + // not compatible with standard web types + signal: options.signal ?? null, + }; + return { req, url, timeout: options.timeout }; + } + buildHeaders({ options, headers, contentLength, retryCount, }) { + const reqHeaders = {}; + if (contentLength) { + reqHeaders['content-length'] = contentLength; + } + const defaultHeaders = this.defaultHeaders(options); + applyHeadersMut(reqHeaders, defaultHeaders); + applyHeadersMut(reqHeaders, headers); + // let builtin fetch set the Content-Type for multipart bodies + if (isMultipartBody(options.body) && kind !== 'node') { + delete reqHeaders['content-type']; + } + // Don't set theses headers if they were already set or removed through default headers or by the caller. + // We check `defaultHeaders` and `headers`, which can contain nulls, instead of `reqHeaders` to account + // for the removal case. + if (getHeader(defaultHeaders, 'x-stainless-retry-count') === undefined && + getHeader(headers, 'x-stainless-retry-count') === undefined) { + reqHeaders['x-stainless-retry-count'] = String(retryCount); + } + if (getHeader(defaultHeaders, 'x-stainless-timeout') === undefined && + getHeader(headers, 'x-stainless-timeout') === undefined && + options.timeout) { + reqHeaders['x-stainless-timeout'] = String(options.timeout); + } + this.validateHeaders(reqHeaders, headers); + return reqHeaders; + } + /** + * Used as a callback for mutating the given `FinalRequestOptions` object. + */ + async prepareOptions(options) { } + /** + * Used as a callback for mutating the given `RequestInit` object. + * + * This is useful for cases where you want to add certain headers based off of + * the request properties, e.g. `method` or `url`. + */ + async prepareRequest(request, { url, options }) { } + parseHeaders(headers) { + return (!headers ? {} + : Symbol.iterator in headers ? + Object.fromEntries(Array.from(headers).map((header) => [...header])) + : { ...headers }); + } + makeStatusError(status, error, message, headers) { + return APIError.generate(status, error, message, headers); + } + request(options, remainingRetries = null) { + return new APIPromise(this.makeRequest(options, remainingRetries)); + } + async makeRequest(optionsInput, retriesRemaining) { + const options = await optionsInput; + const maxRetries = options.maxRetries ?? this.maxRetries; + if (retriesRemaining == null) { + retriesRemaining = maxRetries; + } + await this.prepareOptions(options); + const { req, url, timeout } = this.buildRequest(options, { retryCount: maxRetries - retriesRemaining }); + await this.prepareRequest(req, { url, options }); + debug('request', url, options, req.headers); + if (options.signal?.aborted) { + throw new APIUserAbortError(); + } + const controller = new AbortController(); + const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(castToError); + if (response instanceof Error) { + if (options.signal?.aborted) { + throw new APIUserAbortError(); + } + if (retriesRemaining) { + return this.retryRequest(options, retriesRemaining); + } + if (response.name === 'AbortError') { + throw new APIConnectionTimeoutError(); + } + throw new APIConnectionError({ cause: response }); + } + const responseHeaders = createResponseHeaders(response.headers); + if (!response.ok) { + if (retriesRemaining && this.shouldRetry(response)) { + const retryMessage = `retrying, ${retriesRemaining} attempts remaining`; + debug(`response (error; ${retryMessage})`, response.status, url, responseHeaders); + return this.retryRequest(options, retriesRemaining, responseHeaders); + } + const errText = await response.text().catch((e) => castToError(e).message); + const errJSON = safeJSON(errText); + const errMessage = errJSON ? undefined : errText; + const retryMessage = retriesRemaining ? `(error; no more retries left)` : `(error; not retryable)`; + debug(`response (error; ${retryMessage})`, response.status, url, responseHeaders, errMessage); + const err = this.makeStatusError(response.status, errJSON, errMessage, responseHeaders); + throw err; + } + return { response, options, controller }; + } + requestAPIList(Page, options) { + const request = this.makeRequest(options, null); + return new PagePromise(this, request, Page); + } + buildURL(path, query) { + const url = isAbsoluteURL(path) ? + new URL(path) + : new URL(this.baseURL + (this.baseURL.endsWith('/') && path.startsWith('/') ? path.slice(1) : path)); + const defaultQuery = this.defaultQuery(); + if (!isEmptyObj(defaultQuery)) { + query = { ...defaultQuery, ...query }; + } + if (typeof query === 'object' && query && !Array.isArray(query)) { + url.search = this.stringifyQuery(query); + } + return url.toString(); + } + stringifyQuery(query) { + return Object.entries(query) + .filter(([_, value]) => typeof value !== 'undefined') + .map(([key, value]) => { + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`; + } + if (value === null) { + return `${encodeURIComponent(key)}=`; + } + throw new error_OpenAIError(`Cannot stringify type ${typeof value}; Expected string, number, boolean, or null. If you need to pass nested query parameters, you can manually encode them, e.g. { query: { 'foo[key1]': value1, 'foo[key2]': value2 } }, and please open a GitHub issue requesting better support for your use case.`); + }) + .join('&'); + } + async fetchWithTimeout(url, init, ms, controller) { + const { signal, ...options } = init || {}; + if (signal) + signal.addEventListener('abort', () => controller.abort()); + const timeout = setTimeout(() => controller.abort(), ms); + const fetchOptions = { + signal: controller.signal, + ...options, + }; + if (fetchOptions.method) { + // Custom methods like 'patch' need to be uppercased + // See https://github.com/nodejs/undici/issues/2294 + fetchOptions.method = fetchOptions.method.toUpperCase(); + } + return ( + // use undefined this binding; fetch errors if bound to something else in browser/cloudflare + this.fetch.call(undefined, url, fetchOptions).finally(() => { + clearTimeout(timeout); + })); + } + shouldRetry(response) { + // Note this is not a standard header. + const shouldRetryHeader = response.headers.get('x-should-retry'); + // If the server explicitly says whether or not to retry, obey. + if (shouldRetryHeader === 'true') + return true; + if (shouldRetryHeader === 'false') + return false; + // Retry on request timeouts. + if (response.status === 408) + return true; + // Retry on lock timeouts. + if (response.status === 409) + return true; + // Retry on rate limits. + if (response.status === 429) + return true; + // Retry internal errors. + if (response.status >= 500) + return true; + return false; + } + async retryRequest(options, retriesRemaining, responseHeaders) { + let timeoutMillis; + // Note the `retry-after-ms` header may not be standard, but is a good idea and we'd like proactive support for it. + const retryAfterMillisHeader = responseHeaders?.['retry-after-ms']; + if (retryAfterMillisHeader) { + const timeoutMs = parseFloat(retryAfterMillisHeader); + if (!Number.isNaN(timeoutMs)) { + timeoutMillis = timeoutMs; + } + } + // About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After + const retryAfterHeader = responseHeaders?.['retry-after']; + if (retryAfterHeader && !timeoutMillis) { + const timeoutSeconds = parseFloat(retryAfterHeader); + if (!Number.isNaN(timeoutSeconds)) { + timeoutMillis = timeoutSeconds * 1000; + } + else { + timeoutMillis = Date.parse(retryAfterHeader) - Date.now(); + } + } + // If the API asks us to wait a certain amount of time (and it's a reasonable amount), + // just do what it says, but otherwise calculate a default + if (!(timeoutMillis && 0 <= timeoutMillis && timeoutMillis < 60 * 1000)) { + const maxRetries = options.maxRetries ?? this.maxRetries; + timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries); + } + await sleep(timeoutMillis); + return this.makeRequest(options, retriesRemaining - 1); + } + calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries) { + const initialRetryDelay = 0.5; + const maxRetryDelay = 8.0; + const numRetries = maxRetries - retriesRemaining; + // Apply exponential backoff, but not more than the max. + const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay); + // Apply some jitter, take up to at most 25 percent of the retry time. + const jitter = 1 - Math.random() * 0.25; + return sleepSeconds * jitter * 1000; + } + getUserAgent() { + return `${this.constructor.name}/JS ${VERSION}`; + } +} +class AbstractPage { + constructor(client, response, body, options) { + _AbstractPage_client.set(this, void 0); + core_classPrivateFieldSet(this, _AbstractPage_client, client, "f"); + this.options = options; + this.response = response; + this.body = body; + } + hasNextPage() { + const items = this.getPaginatedItems(); + if (!items.length) + return false; + return this.nextPageInfo() != null; + } + async getNextPage() { + const nextInfo = this.nextPageInfo(); + if (!nextInfo) { + throw new error_OpenAIError('No next page expected; please check `.hasNextPage()` before calling `.getNextPage()`.'); + } + const nextOptions = { ...this.options }; + if ('params' in nextInfo && typeof nextOptions.query === 'object') { + nextOptions.query = { ...nextOptions.query, ...nextInfo.params }; + } + else if ('url' in nextInfo) { + const params = [...Object.entries(nextOptions.query || {}), ...nextInfo.url.searchParams.entries()]; + for (const [key, value] of params) { + nextInfo.url.searchParams.set(key, value); + } + nextOptions.query = undefined; + nextOptions.path = nextInfo.url.toString(); + } + return await core_classPrivateFieldGet(this, _AbstractPage_client, "f").requestAPIList(this.constructor, nextOptions); + } + async *iterPages() { + // eslint-disable-next-line @typescript-eslint/no-this-alias + let page = this; + yield page; + while (page.hasNextPage()) { + page = await page.getNextPage(); + yield page; + } + } + async *[(_AbstractPage_client = new WeakMap(), Symbol.asyncIterator)]() { + for await (const page of this.iterPages()) { + for (const item of page.getPaginatedItems()) { + yield item; + } + } + } +} +/** + * This subclass of Promise will resolve to an instantiated Page once the request completes. + * + * It also implements AsyncIterable to allow auto-paginating iteration on an unawaited list call, eg: + * + * for await (const item of client.items.list()) { + * console.log(item) + * } + */ +class PagePromise extends APIPromise { + constructor(client, request, Page) { + super(request, async (props) => new Page(client, props.response, await defaultParseResponse(props), props.options)); + } + /** + * Allow auto-paginating iteration on an unawaited list call, eg: + * + * for await (const item of client.items.list()) { + * console.log(item) + * } + */ + async *[Symbol.asyncIterator]() { + const page = await this; + for await (const item of page) { + yield item; + } + } +} +const createResponseHeaders = (headers) => { + return new Proxy(Object.fromEntries( + // @ts-ignore + headers.entries()), { + get(target, name) { + const key = name.toString(); + return target[key.toLowerCase()] || target[key]; + }, + }); +}; +// This is required so that we can determine if a given object matches the RequestOptions +// type at runtime. While this requires duplication, it is enforced by the TypeScript +// compiler such that any missing / extraneous keys will cause an error. +const requestOptionsKeys = { + method: true, + path: true, + query: true, + body: true, + headers: true, + maxRetries: true, + stream: true, + timeout: true, + httpAgent: true, + signal: true, + idempotencyKey: true, + __metadata: true, + __binaryRequest: true, + __binaryResponse: true, + __streamClass: true, +}; +const isRequestOptions = (obj) => { + return (typeof obj === 'object' && + obj !== null && + !isEmptyObj(obj) && + Object.keys(obj).every((k) => hasOwn(requestOptionsKeys, k))); +}; +const getPlatformProperties = () => { + if (typeof Deno !== 'undefined' && Deno.build != null) { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': normalizePlatform(Deno.build.os), + 'X-Stainless-Arch': normalizeArch(Deno.build.arch), + 'X-Stainless-Runtime': 'deno', + 'X-Stainless-Runtime-Version': typeof Deno.version === 'string' ? Deno.version : Deno.version?.deno ?? 'unknown', + }; + } + if (typeof EdgeRuntime !== 'undefined') { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': `other:${EdgeRuntime}`, + 'X-Stainless-Runtime': 'edge', + 'X-Stainless-Runtime-Version': process.version, + }; + } + // Check if Node.js + if (Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]') { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': normalizePlatform(process.platform), + 'X-Stainless-Arch': normalizeArch(process.arch), + 'X-Stainless-Runtime': 'node', + 'X-Stainless-Runtime-Version': process.version, + }; + } + const browserInfo = getBrowserInfo(); + if (browserInfo) { + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': 'unknown', + 'X-Stainless-Runtime': `browser:${browserInfo.browser}`, + 'X-Stainless-Runtime-Version': browserInfo.version, + }; + } + // TODO add support for Cloudflare workers, etc. + return { + 'X-Stainless-Lang': 'js', + 'X-Stainless-Package-Version': VERSION, + 'X-Stainless-OS': 'Unknown', + 'X-Stainless-Arch': 'unknown', + 'X-Stainless-Runtime': 'unknown', + 'X-Stainless-Runtime-Version': 'unknown', + }; +}; +// Note: modified from https://github.com/JS-DevTools/host-environment/blob/b1ab79ecde37db5d6e163c050e54fe7d287d7c92/src/isomorphic.browser.ts +function getBrowserInfo() { + if (typeof navigator === 'undefined' || !navigator) { + return null; + } + // NOTE: The order matters here! + const browserPatterns = [ + { key: 'edge', pattern: /Edge(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'ie', pattern: /MSIE(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'ie', pattern: /Trident(?:.*rv\:(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'chrome', pattern: /Chrome(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'firefox', pattern: /Firefox(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, + { key: 'safari', pattern: /(?:Version\W+(\d+)\.(\d+)(?:\.(\d+))?)?(?:\W+Mobile\S*)?\W+Safari/ }, + ]; + // Find the FIRST matching browser + for (const { key, pattern } of browserPatterns) { + const match = pattern.exec(navigator.userAgent); + if (match) { + const major = match[1] || 0; + const minor = match[2] || 0; + const patch = match[3] || 0; + return { browser: key, version: `${major}.${minor}.${patch}` }; + } + } + return null; +} +const normalizeArch = (arch) => { + // Node docs: + // - https://nodejs.org/api/process.html#processarch + // Deno docs: + // - https://doc.deno.land/deno/stable/~/Deno.build + if (arch === 'x32') + return 'x32'; + if (arch === 'x86_64' || arch === 'x64') + return 'x64'; + if (arch === 'arm') + return 'arm'; + if (arch === 'aarch64' || arch === 'arm64') + return 'arm64'; + if (arch) + return `other:${arch}`; + return 'unknown'; +}; +const normalizePlatform = (platform) => { + // Node platforms: + // - https://nodejs.org/api/process.html#processplatform + // Deno platforms: + // - https://doc.deno.land/deno/stable/~/Deno.build + // - https://github.com/denoland/deno/issues/14799 + platform = platform.toLowerCase(); + // NOTE: this iOS check is untested and may not work + // Node does not work natively on IOS, there is a fork at + // https://github.com/nodejs-mobile/nodejs-mobile + // however it is unknown at the time of writing how to detect if it is running + if (platform.includes('ios')) + return 'iOS'; + if (platform === 'android') + return 'Android'; + if (platform === 'darwin') + return 'MacOS'; + if (platform === 'win32') + return 'Windows'; + if (platform === 'freebsd') + return 'FreeBSD'; + if (platform === 'openbsd') + return 'OpenBSD'; + if (platform === 'linux') + return 'Linux'; + if (platform) + return `Other:${platform}`; + return 'Unknown'; +}; +let _platformHeaders; +const getPlatformHeaders = () => { + return (_platformHeaders ?? (_platformHeaders = getPlatformProperties())); +}; +const safeJSON = (text) => { + try { + return JSON.parse(text); + } + catch (err) { + return undefined; + } +}; +// https://url.spec.whatwg.org/#url-scheme-string +const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i; +const isAbsoluteURL = (url) => { + return startsWithSchemeRegexp.test(url); +}; +const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); +const validatePositiveInteger = (name, n) => { + if (typeof n !== 'number' || !Number.isInteger(n)) { + throw new error_OpenAIError(`${name} must be an integer`); + } + if (n < 0) { + throw new error_OpenAIError(`${name} must be a positive integer`); + } + return n; +}; +const castToError = (err) => { + if (err instanceof Error) + return err; + if (typeof err === 'object' && err !== null) { + try { + return new Error(JSON.stringify(err)); + } + catch { } + } + return new Error(err); +}; +const ensurePresent = (value) => { + if (value == null) + throw new OpenAIError(`Expected a value to be given but received ${value} instead.`); + return value; +}; +/** + * Read an environment variable. + * + * Trims beginning and trailing whitespace. + * + * Will return undefined if the environment variable doesn't exist or cannot be accessed. + */ +const readEnv = (env) => { + if (typeof process !== 'undefined') { + return process.env?.[env]?.trim() ?? undefined; + } + if (typeof Deno !== 'undefined') { + return Deno.env?.get?.(env)?.trim(); + } + return undefined; +}; +const coerceInteger = (value) => { + if (typeof value === 'number') + return Math.round(value); + if (typeof value === 'string') + return parseInt(value, 10); + throw new OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`); +}; +const coerceFloat = (value) => { + if (typeof value === 'number') + return value; + if (typeof value === 'string') + return parseFloat(value); + throw new OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`); +}; +const coerceBoolean = (value) => { + if (typeof value === 'boolean') + return value; + if (typeof value === 'string') + return value === 'true'; + return Boolean(value); +}; +const maybeCoerceInteger = (value) => { + if (value === undefined) { + return undefined; + } + return coerceInteger(value); +}; +const maybeCoerceFloat = (value) => { + if (value === undefined) { + return undefined; + } + return coerceFloat(value); +}; +const maybeCoerceBoolean = (value) => { + if (value === undefined) { + return undefined; + } + return coerceBoolean(value); +}; +// https://stackoverflow.com/a/34491287 +function isEmptyObj(obj) { + if (!obj) + return true; + for (const _k in obj) + return false; + return true; +} +// https://eslint.org/docs/latest/rules/no-prototype-builtins +function hasOwn(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} +/** + * Copies headers from "newHeaders" onto "targetHeaders", + * using lower-case for all properties, + * ignoring any keys with undefined values, + * and deleting any keys with null values. + */ +function applyHeadersMut(targetHeaders, newHeaders) { + for (const k in newHeaders) { + if (!hasOwn(newHeaders, k)) + continue; + const lowerKey = k.toLowerCase(); + if (!lowerKey) + continue; + const val = newHeaders[k]; + if (val === null) { + delete targetHeaders[lowerKey]; + } + else if (val !== undefined) { + targetHeaders[lowerKey] = val; + } + } +} +const SENSITIVE_HEADERS = new Set(['authorization', 'api-key']); +function debug(action, ...args) { + if (typeof process !== 'undefined' && process?.env?.['DEBUG'] === 'true') { + const modifiedArgs = args.map((arg) => { + if (!arg) { + return arg; + } + // Check for sensitive headers in request body 'headers' object + if (arg['headers']) { + // clone so we don't mutate + const modifiedArg = { ...arg, headers: { ...arg['headers'] } }; + for (const header in arg['headers']) { + if (SENSITIVE_HEADERS.has(header.toLowerCase())) { + modifiedArg['headers'][header] = 'REDACTED'; + } + } + return modifiedArg; + } + let modifiedArg = null; + // Check for sensitive headers in headers object + for (const header in arg) { + if (SENSITIVE_HEADERS.has(header.toLowerCase())) { + // avoid making a copy until we need to + modifiedArg ?? (modifiedArg = { ...arg }); + modifiedArg[header] = 'REDACTED'; + } + } + return modifiedArg ?? arg; + }); + console.log(`OpenAI:DEBUG:${action}`, ...modifiedArgs); + } +} +/** + * https://stackoverflow.com/a/2117523 + */ +const uuid4 = () => { + return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => { + const r = (Math.random() * 16) | 0; + const v = c === 'x' ? r : (r & 0x3) | 0x8; + return v.toString(16); + }); +}; +const isRunningInBrowser = () => { + return ( + // @ts-ignore + typeof window !== 'undefined' && + // @ts-ignore + typeof window.document !== 'undefined' && + // @ts-ignore + typeof navigator !== 'undefined'); +}; +const isHeadersProtocol = (headers) => { + return typeof headers?.get === 'function'; +}; +const getRequiredHeader = (headers, header) => { + const foundHeader = getHeader(headers, header); + if (foundHeader === undefined) { + throw new Error(`Could not find ${header} header`); + } + return foundHeader; +}; +const getHeader = (headers, header) => { + const lowerCasedHeader = header.toLowerCase(); + if (isHeadersProtocol(headers)) { + // to deal with the case where the header looks like Stainless-Event-Id + const intercapsHeader = header[0]?.toUpperCase() + + header.substring(1).replace(/([^\w])(\w)/g, (_m, g1, g2) => g1 + g2.toUpperCase()); + for (const key of [header, lowerCasedHeader, header.toUpperCase(), intercapsHeader]) { + const value = headers.get(key); + if (value) { + return value; + } + } + } + for (const [key, value] of Object.entries(headers)) { + if (key.toLowerCase() === lowerCasedHeader) { + if (Array.isArray(value)) { + if (value.length <= 1) + return value[0]; + console.warn(`Received ${value.length} entries for the ${header} header, using the first entry.`); + return value[0]; + } + return value; + } + } + return undefined; +}; +/** + * Encodes a string to Base64 format. + */ +const toBase64 = (str) => { + if (!str) + return ''; + if (typeof Buffer !== 'undefined') { + return Buffer.from(str).toString('base64'); + } + if (typeof btoa !== 'undefined') { + return btoa(str); + } + throw new OpenAIError('Cannot generate b64 string; Expected `Buffer` or `btoa` to be defined'); +}; +/** + * Converts a Base64 encoded string to a Float32Array. + * @param base64Str - The Base64 encoded string. + * @returns An Array of numbers interpreted as Float32 values. + */ +const toFloat32Array = (base64Str) => { + if (typeof Buffer !== 'undefined') { + // for Node.js environment + return Array.from(new Float32Array(Buffer.from(base64Str, 'base64').buffer)); + } + else { + // for legacy web platform APIs + const binaryStr = atob(base64Str); + const len = binaryStr.length; + const bytes = new Uint8Array(len); + for (let i = 0; i < len; i++) { + bytes[i] = binaryStr.charCodeAt(i); + } + return Array.from(new Float32Array(bytes.buffer)); + } +}; +function isObj(obj) { + return obj != null && typeof obj === 'object' && !Array.isArray(obj); +} +//# sourceMappingURL=core.mjs.map +;// ./node_modules/openai/error.mjs +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +class error_OpenAIError extends Error { +} +class APIError extends error_OpenAIError { + constructor(status, error, message, headers) { + super(`${APIError.makeMessage(status, error, message)}`); + this.status = status; + this.headers = headers; + this.request_id = headers?.['x-request-id']; + this.error = error; + const data = error; + this.code = data?.['code']; + this.param = data?.['param']; + this.type = data?.['type']; + } + static makeMessage(status, error, message) { + const msg = error?.message ? + typeof error.message === 'string' ? + error.message + : JSON.stringify(error.message) + : error ? JSON.stringify(error) + : message; + if (status && msg) { + return `${status} ${msg}`; + } + if (status) { + return `${status} status code (no body)`; + } + if (msg) { + return msg; + } + return '(no status code or body)'; + } + static generate(status, errorResponse, message, headers) { + if (!status || !headers) { + return new APIConnectionError({ message, cause: castToError(errorResponse) }); + } + const error = errorResponse?.['error']; + if (status === 400) { + return new BadRequestError(status, error, message, headers); + } + if (status === 401) { + return new AuthenticationError(status, error, message, headers); + } + if (status === 403) { + return new PermissionDeniedError(status, error, message, headers); + } + if (status === 404) { + return new NotFoundError(status, error, message, headers); + } + if (status === 409) { + return new ConflictError(status, error, message, headers); + } + if (status === 422) { + return new UnprocessableEntityError(status, error, message, headers); + } + if (status === 429) { + return new RateLimitError(status, error, message, headers); + } + if (status >= 500) { + return new InternalServerError(status, error, message, headers); + } + return new APIError(status, error, message, headers); + } +} +class APIUserAbortError extends APIError { + constructor({ message } = {}) { + super(undefined, undefined, message || 'Request was aborted.', undefined); + } +} +class APIConnectionError extends APIError { + constructor({ message, cause }) { + super(undefined, undefined, message || 'Connection error.', undefined); + // in some environments the 'cause' property is already declared + // @ts-ignore + if (cause) + this.cause = cause; + } +} +class APIConnectionTimeoutError extends APIConnectionError { + constructor({ message } = {}) { + super({ message: message ?? 'Request timed out.' }); + } +} +class BadRequestError extends APIError { +} +class AuthenticationError extends APIError { +} +class PermissionDeniedError extends APIError { +} +class NotFoundError extends APIError { +} +class ConflictError extends APIError { +} +class UnprocessableEntityError extends APIError { +} +class RateLimitError extends APIError { +} +class InternalServerError extends APIError { +} +class LengthFinishReasonError extends error_OpenAIError { + constructor() { + super(`Could not parse response content as the length limit was reached`); + } +} +class ContentFilterFinishReasonError extends error_OpenAIError { + constructor() { + super(`Could not parse response content as the request was rejected by the content filter`); + } +} +//# sourceMappingURL=error.mjs.map +;// ./node_modules/openai/lib/parser.mjs + +function makeParseableResponseFormat(response_format, parser) { + const obj = { ...response_format }; + Object.defineProperties(obj, { + $brand: { + value: 'auto-parseable-response-format', + enumerable: false, + }, + $parseRaw: { + value: parser, + enumerable: false, + }, + }); + return obj; +} +function parser_makeParseableTextFormat(response_format, parser) { + const obj = { ...response_format }; + Object.defineProperties(obj, { + $brand: { + value: 'auto-parseable-response-format', + enumerable: false, + }, + $parseRaw: { + value: parser, + enumerable: false, + }, + }); + return obj; +} +function isAutoParsableResponseFormat(response_format) { + return response_format?.['$brand'] === 'auto-parseable-response-format'; +} +function parser_makeParseableTool(tool, { parser, callback, }) { + const obj = { ...tool }; + Object.defineProperties(obj, { + $brand: { + value: 'auto-parseable-tool', + enumerable: false, + }, + $parseRaw: { + value: parser, + enumerable: false, + }, + $callback: { + value: callback, + enumerable: false, + }, + }); + return obj; +} +function isAutoParsableTool(tool) { + return tool?.['$brand'] === 'auto-parseable-tool'; +} +function maybeParseChatCompletion(completion, params) { + if (!params || !hasAutoParseableInput(params)) { + return { + ...completion, + choices: completion.choices.map((choice) => ({ + ...choice, + message: { + ...choice.message, + parsed: null, + ...(choice.message.tool_calls ? + { + tool_calls: choice.message.tool_calls, + } + : undefined), + }, + })), + }; + } + return parseChatCompletion(completion, params); +} +function parseChatCompletion(completion, params) { + const choices = completion.choices.map((choice) => { + if (choice.finish_reason === 'length') { + throw new LengthFinishReasonError(); + } + if (choice.finish_reason === 'content_filter') { + throw new ContentFilterFinishReasonError(); + } + return { + ...choice, + message: { + ...choice.message, + ...(choice.message.tool_calls ? + { + tool_calls: choice.message.tool_calls?.map((toolCall) => parseToolCall(params, toolCall)) ?? undefined, + } + : undefined), + parsed: choice.message.content && !choice.message.refusal ? + parseResponseFormat(params, choice.message.content) + : null, + }, + }; + }); + return { ...completion, choices }; +} +function parseResponseFormat(params, content) { + if (params.response_format?.type !== 'json_schema') { + return null; + } + if (params.response_format?.type === 'json_schema') { + if ('$parseRaw' in params.response_format) { + const response_format = params.response_format; + return response_format.$parseRaw(content); + } + return JSON.parse(content); + } + return null; +} +function parseToolCall(params, toolCall) { + const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name); + return { + ...toolCall, + function: { + ...toolCall.function, + parsed_arguments: isAutoParsableTool(inputTool) ? inputTool.$parseRaw(toolCall.function.arguments) + : inputTool?.function.strict ? JSON.parse(toolCall.function.arguments) + : null, + }, + }; +} +function shouldParseToolCall(params, toolCall) { + if (!params) { + return false; + } + const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name); + return isAutoParsableTool(inputTool) || inputTool?.function.strict || false; +} +function hasAutoParseableInput(params) { + if (isAutoParsableResponseFormat(params.response_format)) { + return true; + } + return (params.tools?.some((t) => isAutoParsableTool(t) || (t.type === 'function' && t.function.strict === true)) ?? false); +} +function validateInputTools(tools) { + for (const tool of tools ?? []) { + if (tool.type !== 'function') { + throw new error_OpenAIError(`Currently only \`function\` tool types support auto-parsing; Received \`${tool.type}\``); + } + if (tool.function.strict !== true) { + throw new error_OpenAIError(`The \`${tool.function.name}\` tool is not marked with \`strict: true\`. Only strict function tools can be auto-parsed`); + } + } +} +//# sourceMappingURL=parser.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/any.mjs +function any_parseAnyDef() { + return {}; +} +//# sourceMappingURL=any.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/errorMessages.mjs +function errorMessages_addErrorMessage(res, key, errorMessage, refs) { + if (!refs?.errorMessages) + return; + if (errorMessage) { + res.errorMessage = { + ...res.errorMessage, + [key]: errorMessage, + }; + } +} +function errorMessages_setResponseValueAndErrors(res, key, value, errorMessage, refs) { + res[key] = value; + errorMessages_addErrorMessage(res, key, errorMessage, refs); +} +//# sourceMappingURL=errorMessages.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/array.mjs + + + +function array_parseArrayDef(def, refs) { + const res = { + type: 'array', + }; + if (def.type?._def?.typeName !== lib_ZodFirstPartyTypeKind.ZodAny) { + res.items = parseDef_parseDef(def.type._def, { + ...refs, + currentPath: [...refs.currentPath, 'items'], + }); + } + if (def.minLength) { + errorMessages_setResponseValueAndErrors(res, 'minItems', def.minLength.value, def.minLength.message, refs); + } + if (def.maxLength) { + errorMessages_setResponseValueAndErrors(res, 'maxItems', def.maxLength.value, def.maxLength.message, refs); + } + if (def.exactLength) { + errorMessages_setResponseValueAndErrors(res, 'minItems', def.exactLength.value, def.exactLength.message, refs); + errorMessages_setResponseValueAndErrors(res, 'maxItems', def.exactLength.value, def.exactLength.message, refs); + } + return res; +} +//# sourceMappingURL=array.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/bigint.mjs + +function bigint_parseBigintDef(def, refs) { + const res = { + type: 'integer', + format: 'int64', + }; + if (!def.checks) + return res; + for (const check of def.checks) { + switch (check.kind) { + case 'min': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + errorMessages_setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } + else { + errorMessages_setResponseValueAndErrors(res, 'exclusiveMinimum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMinimum = true; + } + errorMessages_setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } + break; + case 'max': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + errorMessages_setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } + else { + errorMessages_setResponseValueAndErrors(res, 'exclusiveMaximum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMaximum = true; + } + errorMessages_setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } + break; + case 'multipleOf': + errorMessages_setResponseValueAndErrors(res, 'multipleOf', check.value, check.message, refs); + break; + } + } + return res; +} +//# sourceMappingURL=bigint.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/boolean.mjs +function boolean_parseBooleanDef() { + return { + type: 'boolean', + }; +} +//# sourceMappingURL=boolean.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/branded.mjs + +function branded_parseBrandedDef(_def, refs) { + return parseDef_parseDef(_def.type._def, refs); +} +//# sourceMappingURL=branded.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/catch.mjs + +const catch_parseCatchDef = (def, refs) => { + return parseDef_parseDef(def.innerType._def, refs); +}; +//# sourceMappingURL=catch.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/date.mjs + +function date_parseDateDef(def, refs, overrideDateStrategy) { + const strategy = overrideDateStrategy ?? refs.dateStrategy; + if (Array.isArray(strategy)) { + return { + anyOf: strategy.map((item, i) => date_parseDateDef(def, refs, item)), + }; + } + switch (strategy) { + case 'string': + case 'format:date-time': + return { + type: 'string', + format: 'date-time', + }; + case 'format:date': + return { + type: 'string', + format: 'date', + }; + case 'integer': + return date_integerDateParser(def, refs); + } +} +const date_integerDateParser = (def, refs) => { + const res = { + type: 'integer', + format: 'unix-time', + }; + if (refs.target === 'openApi3') { + return res; + } + for (const check of def.checks) { + switch (check.kind) { + case 'min': + errorMessages_setResponseValueAndErrors(res, 'minimum', check.value, // This is in milliseconds + check.message, refs); + break; + case 'max': + errorMessages_setResponseValueAndErrors(res, 'maximum', check.value, // This is in milliseconds + check.message, refs); + break; + } + } + return res; +}; +//# sourceMappingURL=date.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/default.mjs + +function default_parseDefaultDef(_def, refs) { + return { + ...parseDef_parseDef(_def.innerType._def, refs), + default: _def.defaultValue(), + }; +} +//# sourceMappingURL=default.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/effects.mjs + +function effects_parseEffectsDef(_def, refs, forceResolution) { + return refs.effectStrategy === 'input' ? parseDef_parseDef(_def.schema._def, refs, forceResolution) : {}; +} +//# sourceMappingURL=effects.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/enum.mjs +function enum_parseEnumDef(def) { + return { + type: 'string', + enum: [...def.values], + }; +} +//# sourceMappingURL=enum.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/intersection.mjs + +const intersection_isJsonSchema7AllOfType = (type) => { + if ('type' in type && type.type === 'string') + return false; + return 'allOf' in type; +}; +function intersection_parseIntersectionDef(def, refs) { + const allOf = [ + parseDef_parseDef(def.left._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '0'], + }), + parseDef_parseDef(def.right._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '1'], + }), + ].filter((x) => !!x); + let unevaluatedProperties = refs.target === 'jsonSchema2019-09' ? { unevaluatedProperties: false } : undefined; + const mergedAllOf = []; + // If either of the schemas is an allOf, merge them into a single allOf + allOf.forEach((schema) => { + if (intersection_isJsonSchema7AllOfType(schema)) { + mergedAllOf.push(...schema.allOf); + if (schema.unevaluatedProperties === undefined) { + // If one of the schemas has no unevaluatedProperties set, + // the merged schema should also have no unevaluatedProperties set + unevaluatedProperties = undefined; + } + } + else { + let nestedSchema = schema; + if ('additionalProperties' in schema && schema.additionalProperties === false) { + const { additionalProperties, ...rest } = schema; + nestedSchema = rest; + } + else { + // As soon as one of the schemas has additionalProperties set not to false, we allow unevaluatedProperties + unevaluatedProperties = undefined; + } + mergedAllOf.push(nestedSchema); + } + }); + return mergedAllOf.length ? + { + allOf: mergedAllOf, + ...unevaluatedProperties, + } + : undefined; +} +//# sourceMappingURL=intersection.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/literal.mjs +function literal_parseLiteralDef(def, refs) { + const parsedType = typeof def.value; + if (parsedType !== 'bigint' && + parsedType !== 'number' && + parsedType !== 'boolean' && + parsedType !== 'string') { + return { + type: Array.isArray(def.value) ? 'array' : 'object', + }; + } + if (refs.target === 'openApi3') { + return { + type: parsedType === 'bigint' ? 'integer' : parsedType, + enum: [def.value], + }; + } + return { + type: parsedType === 'bigint' ? 'integer' : parsedType, + const: def.value, + }; +} +//# sourceMappingURL=literal.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/string.mjs + +let parsers_string_emojiRegex; +/** + * Generated from the regular expressions found here as of 2024-05-22: + * https://github.com/colinhacks/zod/blob/master/src/types.ts. + * + * Expressions with /i flag have been changed accordingly. + */ +const string_zodPatterns = { + /** + * `c` was changed to `[cC]` to replicate /i flag + */ + cuid: /^[cC][^\s-]{8,}$/, + cuid2: /^[0-9a-z]+$/, + ulid: /^[0-9A-HJKMNP-TV-Z]{26}$/, + /** + * `a-z` was added to replicate /i flag + */ + email: /^(?!\.)(?!.*\.\.)([a-zA-Z0-9_'+\-\.]*)[a-zA-Z0-9_+-]@([a-zA-Z0-9][a-zA-Z0-9\-]*\.)+[a-zA-Z]{2,}$/, + /** + * Constructed a valid Unicode RegExp + * + * Lazily instantiate since this type of regex isn't supported + * in all envs (e.g. React Native). + * + * See: + * https://github.com/colinhacks/zod/issues/2433 + * Fix in Zod: + * https://github.com/colinhacks/zod/commit/9340fd51e48576a75adc919bff65dbc4a5d4c99b + */ + emoji: () => { + if (parsers_string_emojiRegex === undefined) { + parsers_string_emojiRegex = RegExp('^(\\p{Extended_Pictographic}|\\p{Emoji_Component})+$', 'u'); + } + return parsers_string_emojiRegex; + }, + /** + * Unused + */ + uuid: /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/, + /** + * Unused + */ + ipv4: /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/, + /** + * Unused + */ + ipv6: /^(([a-f0-9]{1,4}:){7}|::([a-f0-9]{1,4}:){0,6}|([a-f0-9]{1,4}:){1}:([a-f0-9]{1,4}:){0,5}|([a-f0-9]{1,4}:){2}:([a-f0-9]{1,4}:){0,4}|([a-f0-9]{1,4}:){3}:([a-f0-9]{1,4}:){0,3}|([a-f0-9]{1,4}:){4}:([a-f0-9]{1,4}:){0,2}|([a-f0-9]{1,4}:){5}:([a-f0-9]{1,4}:){0,1})([a-f0-9]{1,4}|(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2})))$/, + base64: /^([0-9a-zA-Z+/]{4})*(([0-9a-zA-Z+/]{2}==)|([0-9a-zA-Z+/]{3}=))?$/, + nanoid: /^[a-zA-Z0-9_-]{21}$/, +}; +function string_parseStringDef(def, refs) { + const res = { + type: 'string', + }; + function processPattern(value) { + return refs.patternStrategy === 'escape' ? string_escapeNonAlphaNumeric(value) : value; + } + if (def.checks) { + for (const check of def.checks) { + switch (check.kind) { + case 'min': + errorMessages_setResponseValueAndErrors(res, 'minLength', typeof res.minLength === 'number' ? Math.max(res.minLength, check.value) : check.value, check.message, refs); + break; + case 'max': + errorMessages_setResponseValueAndErrors(res, 'maxLength', typeof res.maxLength === 'number' ? Math.min(res.maxLength, check.value) : check.value, check.message, refs); + break; + case 'email': + switch (refs.emailStrategy) { + case 'format:email': + string_addFormat(res, 'email', check.message, refs); + break; + case 'format:idn-email': + string_addFormat(res, 'idn-email', check.message, refs); + break; + case 'pattern:zod': + string_addPattern(res, string_zodPatterns.email, check.message, refs); + break; + } + break; + case 'url': + string_addFormat(res, 'uri', check.message, refs); + break; + case 'uuid': + string_addFormat(res, 'uuid', check.message, refs); + break; + case 'regex': + string_addPattern(res, check.regex, check.message, refs); + break; + case 'cuid': + string_addPattern(res, string_zodPatterns.cuid, check.message, refs); + break; + case 'cuid2': + string_addPattern(res, string_zodPatterns.cuid2, check.message, refs); + break; + case 'startsWith': + string_addPattern(res, RegExp(`^${processPattern(check.value)}`), check.message, refs); + break; + case 'endsWith': + string_addPattern(res, RegExp(`${processPattern(check.value)}$`), check.message, refs); + break; + case 'datetime': + string_addFormat(res, 'date-time', check.message, refs); + break; + case 'date': + string_addFormat(res, 'date', check.message, refs); + break; + case 'time': + string_addFormat(res, 'time', check.message, refs); + break; + case 'duration': + string_addFormat(res, 'duration', check.message, refs); + break; + case 'length': + errorMessages_setResponseValueAndErrors(res, 'minLength', typeof res.minLength === 'number' ? Math.max(res.minLength, check.value) : check.value, check.message, refs); + errorMessages_setResponseValueAndErrors(res, 'maxLength', typeof res.maxLength === 'number' ? Math.min(res.maxLength, check.value) : check.value, check.message, refs); + break; + case 'includes': { + string_addPattern(res, RegExp(processPattern(check.value)), check.message, refs); + break; + } + case 'ip': { + if (check.version !== 'v6') { + string_addFormat(res, 'ipv4', check.message, refs); + } + if (check.version !== 'v4') { + string_addFormat(res, 'ipv6', check.message, refs); + } + break; + } + case 'emoji': + string_addPattern(res, string_zodPatterns.emoji, check.message, refs); + break; + case 'ulid': { + string_addPattern(res, string_zodPatterns.ulid, check.message, refs); + break; + } + case 'base64': { + switch (refs.base64Strategy) { + case 'format:binary': { + string_addFormat(res, 'binary', check.message, refs); + break; + } + case 'contentEncoding:base64': { + errorMessages_setResponseValueAndErrors(res, 'contentEncoding', 'base64', check.message, refs); + break; + } + case 'pattern:zod': { + string_addPattern(res, string_zodPatterns.base64, check.message, refs); + break; + } + } + break; + } + case 'nanoid': { + string_addPattern(res, string_zodPatterns.nanoid, check.message, refs); + } + case 'toLowerCase': + case 'toUpperCase': + case 'trim': + break; + default: + ((_) => { })(check); + } + } + } + return res; +} +const string_escapeNonAlphaNumeric = (value) => Array.from(value) + .map((c) => (/[a-zA-Z0-9]/.test(c) ? c : `\\${c}`)) + .join(''); +const string_addFormat = (schema, value, message, refs) => { + if (schema.format || schema.anyOf?.some((x) => x.format)) { + if (!schema.anyOf) { + schema.anyOf = []; + } + if (schema.format) { + schema.anyOf.push({ + format: schema.format, + ...(schema.errorMessage && + refs.errorMessages && { + errorMessage: { format: schema.errorMessage.format }, + }), + }); + delete schema.format; + if (schema.errorMessage) { + delete schema.errorMessage.format; + if (Object.keys(schema.errorMessage).length === 0) { + delete schema.errorMessage; + } + } + } + schema.anyOf.push({ + format: value, + ...(message && refs.errorMessages && { errorMessage: { format: message } }), + }); + } + else { + errorMessages_setResponseValueAndErrors(schema, 'format', value, message, refs); + } +}; +const string_addPattern = (schema, regex, message, refs) => { + if (schema.pattern || schema.allOf?.some((x) => x.pattern)) { + if (!schema.allOf) { + schema.allOf = []; + } + if (schema.pattern) { + schema.allOf.push({ + pattern: schema.pattern, + ...(schema.errorMessage && + refs.errorMessages && { + errorMessage: { pattern: schema.errorMessage.pattern }, + }), + }); + delete schema.pattern; + if (schema.errorMessage) { + delete schema.errorMessage.pattern; + if (Object.keys(schema.errorMessage).length === 0) { + delete schema.errorMessage; + } + } + } + schema.allOf.push({ + pattern: processRegExp(regex, refs), + ...(message && refs.errorMessages && { errorMessage: { pattern: message } }), + }); + } + else { + errorMessages_setResponseValueAndErrors(schema, 'pattern', processRegExp(regex, refs), message, refs); + } +}; +// Mutate z.string.regex() in a best attempt to accommodate for regex flags when applyRegexFlags is true +const processRegExp = (regexOrFunction, refs) => { + const regex = typeof regexOrFunction === 'function' ? regexOrFunction() : regexOrFunction; + if (!refs.applyRegexFlags || !regex.flags) + return regex.source; + // Currently handled flags + const flags = { + i: regex.flags.includes('i'), + m: regex.flags.includes('m'), + s: regex.flags.includes('s'), // `.` matches newlines + }; + // The general principle here is to step through each character, one at a time, applying mutations as flags require. We keep track when the current character is escaped, and when it's inside a group /like [this]/ or (also) a range like /[a-z]/. The following is fairly brittle imperative code; edit at your peril! + const source = flags.i ? regex.source.toLowerCase() : regex.source; + let pattern = ''; + let isEscaped = false; + let inCharGroup = false; + let inCharRange = false; + for (let i = 0; i < source.length; i++) { + if (isEscaped) { + pattern += source[i]; + isEscaped = false; + continue; + } + if (flags.i) { + if (inCharGroup) { + if (source[i].match(/[a-z]/)) { + if (inCharRange) { + pattern += source[i]; + pattern += `${source[i - 2]}-${source[i]}`.toUpperCase(); + inCharRange = false; + } + else if (source[i + 1] === '-' && source[i + 2]?.match(/[a-z]/)) { + pattern += source[i]; + inCharRange = true; + } + else { + pattern += `${source[i]}${source[i].toUpperCase()}`; + } + continue; + } + } + else if (source[i].match(/[a-z]/)) { + pattern += `[${source[i]}${source[i].toUpperCase()}]`; + continue; + } + } + if (flags.m) { + if (source[i] === '^') { + pattern += `(^|(?<=[\r\n]))`; + continue; + } + else if (source[i] === '$') { + pattern += `($|(?=[\r\n]))`; + continue; + } + } + if (flags.s && source[i] === '.') { + pattern += inCharGroup ? `${source[i]}\r\n` : `[${source[i]}\r\n]`; + continue; + } + pattern += source[i]; + if (source[i] === '\\') { + isEscaped = true; + } + else if (inCharGroup && source[i] === ']') { + inCharGroup = false; + } + else if (!inCharGroup && source[i] === '[') { + inCharGroup = true; + } + } + try { + const regexTest = new RegExp(pattern); + } + catch { + console.warn(`Could not convert regex pattern at ${refs.currentPath.join('/')} to a flag-independent form! Falling back to the flag-ignorant source`); + return regex.source; + } + return pattern; +}; +//# sourceMappingURL=string.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/record.mjs + + + +function record_parseRecordDef(def, refs) { + if (refs.target === 'openApi3' && def.keyType?._def.typeName === lib_ZodFirstPartyTypeKind.ZodEnum) { + return { + type: 'object', + required: def.keyType._def.values, + properties: def.keyType._def.values.reduce((acc, key) => ({ + ...acc, + [key]: parseDef_parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'properties', key], + }) ?? {}, + }), {}), + additionalProperties: false, + }; + } + const schema = { + type: 'object', + additionalProperties: parseDef_parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? {}, + }; + if (refs.target === 'openApi3') { + return schema; + } + if (def.keyType?._def.typeName === lib_ZodFirstPartyTypeKind.ZodString && def.keyType._def.checks?.length) { + const keyType = Object.entries(string_parseStringDef(def.keyType._def, refs)).reduce((acc, [key, value]) => (key === 'type' ? acc : { ...acc, [key]: value }), {}); + return { + ...schema, + propertyNames: keyType, + }; + } + else if (def.keyType?._def.typeName === lib_ZodFirstPartyTypeKind.ZodEnum) { + return { + ...schema, + propertyNames: { + enum: def.keyType._def.values, + }, + }; + } + return schema; +} +//# sourceMappingURL=record.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/map.mjs + + +function map_parseMapDef(def, refs) { + if (refs.mapStrategy === 'record') { + return record_parseRecordDef(def, refs); + } + const keys = parseDef_parseDef(def.keyType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', 'items', '0'], + }) || {}; + const values = parseDef_parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', 'items', '1'], + }) || {}; + return { + type: 'array', + maxItems: 125, + items: { + type: 'array', + items: [keys, values], + minItems: 2, + maxItems: 2, + }, + }; +} +//# sourceMappingURL=map.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/nativeEnum.mjs +function nativeEnum_parseNativeEnumDef(def) { + const object = def.values; + const actualKeys = Object.keys(def.values).filter((key) => { + return typeof object[object[key]] !== 'number'; + }); + const actualValues = actualKeys.map((key) => object[key]); + const parsedTypes = Array.from(new Set(actualValues.map((values) => typeof values))); + return { + type: parsedTypes.length === 1 ? + parsedTypes[0] === 'string' ? + 'string' + : 'number' + : ['string', 'number'], + enum: actualValues, + }; +} +//# sourceMappingURL=nativeEnum.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/never.mjs +function never_parseNeverDef() { + return { + not: {}, + }; +} +//# sourceMappingURL=never.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/null.mjs +function null_parseNullDef(refs) { + return refs.target === 'openApi3' ? + { + enum: ['null'], + nullable: true, + } + : { + type: 'null', + }; +} +//# sourceMappingURL=null.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/union.mjs + +const union_primitiveMappings = { + ZodString: 'string', + ZodNumber: 'number', + ZodBigInt: 'integer', + ZodBoolean: 'boolean', + ZodNull: 'null', +}; +function union_parseUnionDef(def, refs) { + if (refs.target === 'openApi3') + return union_asAnyOf(def, refs); + const options = def.options instanceof Map ? Array.from(def.options.values()) : def.options; + // This blocks tries to look ahead a bit to produce nicer looking schemas with type array instead of anyOf. + if (options.every((x) => x._def.typeName in union_primitiveMappings && (!x._def.checks || !x._def.checks.length))) { + // all types in union are primitive and lack checks, so might as well squash into {type: [...]} + const types = options.reduce((types, x) => { + const type = union_primitiveMappings[x._def.typeName]; //Can be safely casted due to row 43 + return type && !types.includes(type) ? [...types, type] : types; + }, []); + return { + type: types.length > 1 ? types : types[0], + }; + } + else if (options.every((x) => x._def.typeName === 'ZodLiteral' && !x.description)) { + // all options literals + const types = options.reduce((acc, x) => { + const type = typeof x._def.value; + switch (type) { + case 'string': + case 'number': + case 'boolean': + return [...acc, type]; + case 'bigint': + return [...acc, 'integer']; + case 'object': + if (x._def.value === null) + return [...acc, 'null']; + case 'symbol': + case 'undefined': + case 'function': + default: + return acc; + } + }, []); + if (types.length === options.length) { + // all the literals are primitive, as far as null can be considered primitive + const uniqueTypes = types.filter((x, i, a) => a.indexOf(x) === i); + return { + type: uniqueTypes.length > 1 ? uniqueTypes : uniqueTypes[0], + enum: options.reduce((acc, x) => { + return acc.includes(x._def.value) ? acc : [...acc, x._def.value]; + }, []), + }; + } + } + else if (options.every((x) => x._def.typeName === 'ZodEnum')) { + return { + type: 'string', + enum: options.reduce((acc, x) => [...acc, ...x._def.values.filter((x) => !acc.includes(x))], []), + }; + } + return union_asAnyOf(def, refs); +} +const union_asAnyOf = (def, refs) => { + const anyOf = (def.options instanceof Map ? Array.from(def.options.values()) : def.options) + .map((x, i) => parseDef_parseDef(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', `${i}`], + })) + .filter((x) => !!x && (!refs.strictUnions || (typeof x === 'object' && Object.keys(x).length > 0))); + return anyOf.length ? { anyOf } : undefined; +}; +//# sourceMappingURL=union.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/nullable.mjs + + +function nullable_parseNullableDef(def, refs) { + if (['ZodString', 'ZodNumber', 'ZodBigInt', 'ZodBoolean', 'ZodNull'].includes(def.innerType._def.typeName) && + (!def.innerType._def.checks || !def.innerType._def.checks.length)) { + if (refs.target === 'openApi3' || refs.nullableStrategy === 'property') { + return { + type: union_primitiveMappings[def.innerType._def.typeName], + nullable: true, + }; + } + return { + type: [union_primitiveMappings[def.innerType._def.typeName], 'null'], + }; + } + if (refs.target === 'openApi3') { + const base = parseDef_parseDef(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath], + }); + if (base && '$ref' in base) + return { allOf: [base], nullable: true }; + return base && { ...base, nullable: true }; + } + const base = parseDef_parseDef(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', '0'], + }); + return base && { anyOf: [base, { type: 'null' }] }; +} +//# sourceMappingURL=nullable.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/number.mjs + +function number_parseNumberDef(def, refs) { + const res = { + type: 'number', + }; + if (!def.checks) + return res; + for (const check of def.checks) { + switch (check.kind) { + case 'int': + res.type = 'integer'; + errorMessages_addErrorMessage(res, 'type', check.message, refs); + break; + case 'min': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + errorMessages_setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } + else { + errorMessages_setResponseValueAndErrors(res, 'exclusiveMinimum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMinimum = true; + } + errorMessages_setResponseValueAndErrors(res, 'minimum', check.value, check.message, refs); + } + break; + case 'max': + if (refs.target === 'jsonSchema7') { + if (check.inclusive) { + errorMessages_setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } + else { + errorMessages_setResponseValueAndErrors(res, 'exclusiveMaximum', check.value, check.message, refs); + } + } + else { + if (!check.inclusive) { + res.exclusiveMaximum = true; + } + errorMessages_setResponseValueAndErrors(res, 'maximum', check.value, check.message, refs); + } + break; + case 'multipleOf': + errorMessages_setResponseValueAndErrors(res, 'multipleOf', check.value, check.message, refs); + break; + } + } + return res; +} +//# sourceMappingURL=number.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/object.mjs + +function object_decideAdditionalProperties(def, refs) { + if (refs.removeAdditionalStrategy === 'strict') { + return def.catchall._def.typeName === 'ZodNever' ? + def.unknownKeys !== 'strict' + : parseDef_parseDef(def.catchall._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? true; + } + else { + return def.catchall._def.typeName === 'ZodNever' ? + def.unknownKeys === 'passthrough' + : parseDef_parseDef(def.catchall._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalProperties'], + }) ?? true; + } +} +function object_parseObjectDef(def, refs) { + const result = { + type: 'object', + ...Object.entries(def.shape()).reduce((acc, [propName, propDef]) => { + if (propDef === undefined || propDef._def === undefined) + return acc; + const parsedDef = parseDef_parseDef(propDef._def, { + ...refs, + currentPath: [...refs.currentPath, 'properties', propName], + propertyPath: [...refs.currentPath, 'properties', propName], + }); + if (parsedDef === undefined) + return acc; + return { + properties: { + ...acc.properties, + [propName]: parsedDef, + }, + required: propDef.isOptional() && !refs.openaiStrictMode ? acc.required : [...acc.required, propName], + }; + }, { properties: {}, required: [] }), + additionalProperties: object_decideAdditionalProperties(def, refs), + }; + if (!result.required.length) + delete result.required; + return result; +} +//# sourceMappingURL=object.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/optional.mjs + +const optional_parseOptionalDef = (def, refs) => { + if (refs.currentPath.toString() === refs.propertyPath?.toString()) { + return parseDef_parseDef(def.innerType._def, refs); + } + const innerSchema = parseDef_parseDef(def.innerType._def, { + ...refs, + currentPath: [...refs.currentPath, 'anyOf', '1'], + }); + return innerSchema ? + { + anyOf: [ + { + not: {}, + }, + innerSchema, + ], + } + : {}; +}; +//# sourceMappingURL=optional.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/pipeline.mjs + +const pipeline_parsePipelineDef = (def, refs) => { + if (refs.pipeStrategy === 'input') { + return parseDef_parseDef(def.in._def, refs); + } + else if (refs.pipeStrategy === 'output') { + return parseDef_parseDef(def.out._def, refs); + } + const a = parseDef_parseDef(def.in._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', '0'], + }); + const b = parseDef_parseDef(def.out._def, { + ...refs, + currentPath: [...refs.currentPath, 'allOf', a ? '1' : '0'], + }); + return { + allOf: [a, b].filter((x) => x !== undefined), + }; +}; +//# sourceMappingURL=pipeline.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/promise.mjs + +function promise_parsePromiseDef(def, refs) { + return parseDef_parseDef(def.type._def, refs); +} +//# sourceMappingURL=promise.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/set.mjs + + +function set_parseSetDef(def, refs) { + const items = parseDef_parseDef(def.valueType._def, { + ...refs, + currentPath: [...refs.currentPath, 'items'], + }); + const schema = { + type: 'array', + uniqueItems: true, + items, + }; + if (def.minSize) { + errorMessages_setResponseValueAndErrors(schema, 'minItems', def.minSize.value, def.minSize.message, refs); + } + if (def.maxSize) { + errorMessages_setResponseValueAndErrors(schema, 'maxItems', def.maxSize.value, def.maxSize.message, refs); + } + return schema; +} +//# sourceMappingURL=set.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/tuple.mjs + +function tuple_parseTupleDef(def, refs) { + if (def.rest) { + return { + type: 'array', + minItems: def.items.length, + items: def.items + .map((x, i) => parseDef_parseDef(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', `${i}`], + })) + .reduce((acc, x) => (x === undefined ? acc : [...acc, x]), []), + additionalItems: parseDef_parseDef(def.rest._def, { + ...refs, + currentPath: [...refs.currentPath, 'additionalItems'], + }), + }; + } + else { + return { + type: 'array', + minItems: def.items.length, + maxItems: def.items.length, + items: def.items + .map((x, i) => parseDef_parseDef(x._def, { + ...refs, + currentPath: [...refs.currentPath, 'items', `${i}`], + })) + .reduce((acc, x) => (x === undefined ? acc : [...acc, x]), []), + }; + } +} +//# sourceMappingURL=tuple.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/undefined.mjs +function undefined_parseUndefinedDef() { + return { + not: {}, + }; +} +//# sourceMappingURL=undefined.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/unknown.mjs +function unknown_parseUnknownDef() { + return {}; +} +//# sourceMappingURL=unknown.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parsers/readonly.mjs + +const readonly_parseReadonlyDef = (def, refs) => { + return parseDef_parseDef(def.innerType._def, refs); +}; +//# sourceMappingURL=readonly.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/Options.mjs +const Options_ignoreOverride = Symbol('Let zodToJsonSchema decide on which parser to use'); +const Options_defaultOptions = { + name: undefined, + $refStrategy: 'root', + effectStrategy: 'input', + pipeStrategy: 'all', + dateStrategy: 'format:date-time', + mapStrategy: 'entries', + nullableStrategy: 'from-target', + removeAdditionalStrategy: 'passthrough', + definitionPath: 'definitions', + target: 'jsonSchema7', + strictUnions: false, + errorMessages: false, + markdownDescription: false, + patternStrategy: 'escape', + applyRegexFlags: false, + emailStrategy: 'format:email', + base64Strategy: 'contentEncoding:base64', + nameStrategy: 'ref', +}; +const Options_getDefaultOptions = (options) => { + // We need to add `definitions` here as we may mutate it + return (typeof options === 'string' ? + { + ...Options_defaultOptions, + basePath: ['#'], + definitions: {}, + name: options, + } + : { + ...Options_defaultOptions, + basePath: ['#'], + definitions: {}, + ...options, + }); +}; +//# sourceMappingURL=Options.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/parseDef.mjs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +function parseDef_parseDef(def, refs, forceResolution = false) { + const seenItem = refs.seen.get(def); + if (refs.override) { + const overrideResult = refs.override?.(def, refs, seenItem, forceResolution); + if (overrideResult !== Options_ignoreOverride) { + return overrideResult; + } + } + if (seenItem && !forceResolution) { + const seenSchema = parseDef_get$ref(seenItem, refs); + if (seenSchema !== undefined) { + if ('$ref' in seenSchema) { + refs.seenRefs.add(seenSchema.$ref); + } + return seenSchema; + } + } + const newItem = { def, path: refs.currentPath, jsonSchema: undefined }; + refs.seen.set(def, newItem); + const jsonSchema = parseDef_selectParser(def, def.typeName, refs, forceResolution); + if (jsonSchema) { + parseDef_addMeta(def, refs, jsonSchema); + } + newItem.jsonSchema = jsonSchema; + return jsonSchema; +} +const parseDef_get$ref = (item, refs) => { + switch (refs.$refStrategy) { + case 'root': + return { $ref: item.path.join('/') }; + // this case is needed as OpenAI strict mode doesn't support top-level `$ref`s, i.e. + // the top-level schema *must* be `{"type": "object", "properties": {...}}` but if we ever + // need to define a `$ref`, relative `$ref`s aren't supported, so we need to extract + // the schema to `#/definitions/` and reference that. + // + // e.g. if we need to reference a schema at + // `["#","definitions","contactPerson","properties","person1","properties","name"]` + // then we'll extract it out to `contactPerson_properties_person1_properties_name` + case 'extract-to-root': + const name = item.path.slice(refs.basePath.length + 1).join('_'); + // we don't need to extract the root schema in this case, as it's already + // been added to the definitions + if (name !== refs.name && refs.nameStrategy === 'duplicate-ref') { + refs.definitions[name] = item.def; + } + return { $ref: [...refs.basePath, refs.definitionPath, name].join('/') }; + case 'relative': + return { $ref: parseDef_getRelativePath(refs.currentPath, item.path) }; + case 'none': + case 'seen': { + if (item.path.length < refs.currentPath.length && + item.path.every((value, index) => refs.currentPath[index] === value)) { + console.warn(`Recursive reference detected at ${refs.currentPath.join('/')}! Defaulting to any`); + return {}; + } + return refs.$refStrategy === 'seen' ? {} : undefined; + } + } +}; +const parseDef_getRelativePath = (pathA, pathB) => { + let i = 0; + for (; i < pathA.length && i < pathB.length; i++) { + if (pathA[i] !== pathB[i]) + break; + } + return [(pathA.length - i).toString(), ...pathB.slice(i)].join('/'); +}; +const parseDef_selectParser = (def, typeName, refs, forceResolution) => { + switch (typeName) { + case lib_ZodFirstPartyTypeKind.ZodString: + return string_parseStringDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodNumber: + return number_parseNumberDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodObject: + return object_parseObjectDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodBigInt: + return bigint_parseBigintDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodBoolean: + return boolean_parseBooleanDef(); + case lib_ZodFirstPartyTypeKind.ZodDate: + return date_parseDateDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodUndefined: + return undefined_parseUndefinedDef(); + case lib_ZodFirstPartyTypeKind.ZodNull: + return null_parseNullDef(refs); + case lib_ZodFirstPartyTypeKind.ZodArray: + return array_parseArrayDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodUnion: + case lib_ZodFirstPartyTypeKind.ZodDiscriminatedUnion: + return union_parseUnionDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodIntersection: + return intersection_parseIntersectionDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodTuple: + return tuple_parseTupleDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodRecord: + return record_parseRecordDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodLiteral: + return literal_parseLiteralDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodEnum: + return enum_parseEnumDef(def); + case lib_ZodFirstPartyTypeKind.ZodNativeEnum: + return nativeEnum_parseNativeEnumDef(def); + case lib_ZodFirstPartyTypeKind.ZodNullable: + return nullable_parseNullableDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodOptional: + return optional_parseOptionalDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodMap: + return map_parseMapDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodSet: + return set_parseSetDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodLazy: + return parseDef_parseDef(def.getter()._def, refs); + case lib_ZodFirstPartyTypeKind.ZodPromise: + return promise_parsePromiseDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodNaN: + case lib_ZodFirstPartyTypeKind.ZodNever: + return never_parseNeverDef(); + case lib_ZodFirstPartyTypeKind.ZodEffects: + return effects_parseEffectsDef(def, refs, forceResolution); + case lib_ZodFirstPartyTypeKind.ZodAny: + return any_parseAnyDef(); + case lib_ZodFirstPartyTypeKind.ZodUnknown: + return unknown_parseUnknownDef(); + case lib_ZodFirstPartyTypeKind.ZodDefault: + return default_parseDefaultDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodBranded: + return branded_parseBrandedDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodReadonly: + return readonly_parseReadonlyDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodCatch: + return catch_parseCatchDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodPipeline: + return pipeline_parsePipelineDef(def, refs); + case lib_ZodFirstPartyTypeKind.ZodFunction: + case lib_ZodFirstPartyTypeKind.ZodVoid: + case lib_ZodFirstPartyTypeKind.ZodSymbol: + return undefined; + default: + return ((_) => undefined)(typeName); + } +}; +const parseDef_addMeta = (def, refs, jsonSchema) => { + if (def.description) { + jsonSchema.description = def.description; + if (refs.markdownDescription) { + jsonSchema.markdownDescription = def.description; + } + } + return jsonSchema; +}; +//# sourceMappingURL=parseDef.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/util.mjs +const zodDef = (zodSchema) => { + return '_def' in zodSchema ? zodSchema._def : zodSchema; +}; +function util_isEmptyObj(obj) { + if (!obj) + return true; + for (const _k in obj) + return false; + return true; +} +//# sourceMappingURL=util.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/Refs.mjs + + +const Refs_getRefs = (options) => { + const _options = Options_getDefaultOptions(options); + const currentPath = _options.name !== undefined ? + [..._options.basePath, _options.definitionPath, _options.name] + : _options.basePath; + return { + ..._options, + currentPath: currentPath, + propertyPath: undefined, + seenRefs: new Set(), + seen: new Map(Object.entries(_options.definitions).map(([name, def]) => [ + zodDef(def), + { + def: zodDef(def), + path: [..._options.basePath, _options.definitionPath, name], + // Resolution of references will be forced even though seen, so it's ok that the schema is undefined here for now. + jsonSchema: undefined, + }, + ])), + }; +}; +//# sourceMappingURL=Refs.mjs.map +;// ./node_modules/openai/_vendor/zod-to-json-schema/zodToJsonSchema.mjs + + + +const zod_to_json_schema_zodToJsonSchema_zodToJsonSchema = (schema, options) => { + const refs = Refs_getRefs(options); + const name = typeof options === 'string' ? options + : options?.nameStrategy === 'title' ? undefined + : options?.name; + const main = parseDef_parseDef(schema._def, name === undefined ? refs : ({ + ...refs, + currentPath: [...refs.basePath, refs.definitionPath, name], + }), false) ?? {}; + const title = typeof options === 'object' && options.name !== undefined && options.nameStrategy === 'title' ? + options.name + : undefined; + if (title !== undefined) { + main.title = title; + } + const definitions = (() => { + if (util_isEmptyObj(refs.definitions)) { + return undefined; + } + const definitions = {}; + const processedDefinitions = new Set(); + // the call to `parseDef()` here might itself add more entries to `.definitions` + // so we need to continually evaluate definitions until we've resolved all of them + // + // we have a generous iteration limit here to avoid blowing up the stack if there + // are any bugs that would otherwise result in us iterating indefinitely + for (let i = 0; i < 500; i++) { + const newDefinitions = Object.entries(refs.definitions).filter(([key]) => !processedDefinitions.has(key)); + if (newDefinitions.length === 0) + break; + for (const [key, schema] of newDefinitions) { + definitions[key] = + parseDef_parseDef(zodDef(schema), { ...refs, currentPath: [...refs.basePath, refs.definitionPath, key] }, true) ?? {}; + processedDefinitions.add(key); + } + } + return definitions; + })(); + const combined = name === undefined ? + definitions ? + { + ...main, + [refs.definitionPath]: definitions, + } + : main + : refs.nameStrategy === 'duplicate-ref' ? + { + ...main, + ...(definitions || refs.seenRefs.size ? + { + [refs.definitionPath]: { + ...definitions, + // only actually duplicate the schema definition if it was ever referenced + // otherwise the duplication is completely pointless + ...(refs.seenRefs.size ? { [name]: main } : undefined), + }, + } + : undefined), + } + : { + $ref: [...(refs.$refStrategy === 'relative' ? [] : refs.basePath), refs.definitionPath, name].join('/'), + [refs.definitionPath]: { + ...definitions, + [name]: main, + }, + }; + if (refs.target === 'jsonSchema7') { + combined.$schema = 'http://json-schema.org/draft-07/schema#'; + } + else if (refs.target === 'jsonSchema2019-09') { + combined.$schema = 'https://json-schema.org/draft/2019-09/schema#'; + } + return combined; +}; + +//# sourceMappingURL=zodToJsonSchema.mjs.map +;// ./node_modules/openai/helpers/zod.mjs + + + +function zod_zodToJsonSchema(schema, options) { + return zod_to_json_schema_zodToJsonSchema_zodToJsonSchema(schema, { + openaiStrictMode: true, + name: options.name, + nameStrategy: 'duplicate-ref', + $refStrategy: 'extract-to-root', + nullableStrategy: 'property', + }); +} +/** + * Creates a chat completion `JSONSchema` response format object from + * the given Zod schema. + * + * If this is passed to the `.parse()`, `.stream()` or `.runTools()` + * chat completion methods then the response message will contain a + * `.parsed` property that is the result of parsing the content with + * the given Zod object. + * + * ```ts + * const completion = await client.beta.chat.completions.parse({ + * model: 'gpt-4o-2024-08-06', + * messages: [ + * { role: 'system', content: 'You are a helpful math tutor.' }, + * { role: 'user', content: 'solve 8x + 31 = 2' }, + * ], + * response_format: zodResponseFormat( + * z.object({ + * steps: z.array(z.object({ + * explanation: z.string(), + * answer: z.string(), + * })), + * final_answer: z.string(), + * }), + * 'math_answer', + * ), + * }); + * const message = completion.choices[0]?.message; + * if (message?.parsed) { + * console.log(message.parsed); + * console.log(message.parsed.final_answer); + * } + * ``` + * + * This can be passed directly to the `.create()` method but will not + * result in any automatic parsing, you'll have to parse the response yourself. + */ +function zodResponseFormat(zodObject, name, props) { + return makeParseableResponseFormat({ + type: 'json_schema', + json_schema: { + ...props, + name, + strict: true, + schema: zod_zodToJsonSchema(zodObject, { name }), + }, + }, (content) => zodObject.parse(JSON.parse(content))); +} +function zodTextFormat(zodObject, name, props) { + return makeParseableTextFormat({ + type: 'json_schema', + ...props, + name, + strict: true, + schema: zod_zodToJsonSchema(zodObject, { name }), + }, (content) => zodObject.parse(JSON.parse(content))); +} +/** + * Creates a chat completion `function` tool that can be invoked + * automatically by the chat completion `.runTools()` method or automatically + * parsed by `.parse()` / `.stream()`. + */ +function zodFunction(options) { + // @ts-expect-error TODO + return makeParseableTool({ + type: 'function', + function: { + name: options.name, + parameters: zod_zodToJsonSchema(options.parameters, { name: options.name }), + strict: true, + ...(options.description ? { description: options.description } : undefined), + }, + }, { + callback: options.function, + parser: (args) => options.parameters.parse(JSON.parse(args)), + }); +} +function zodResponsesFunction(options) { + return makeParseableResponseTool({ + type: 'function', + name: options.name, + parameters: zod_zodToJsonSchema(options.parameters, { name: options.name }), + strict: true, + ...(options.description ? { description: options.description } : undefined), + }, { + callback: options.function, + parser: (args) => options.parameters.parse(JSON.parse(args)), + }); +} +//# sourceMappingURL=zod.mjs.map ;// ./dist-in/constants.js const constants_MODULE_NAME = 'kbot'; const EXCLUDE_GLOB = [ @@ -164399,7 +169985,7 @@ const external_node_crypto_namespaceObject = require("node:crypto"); -const { getType } = mime; +const { getType: inspect_getType } = mime; @@ -164468,7 +170054,7 @@ const createInspectObj = (path, options, stat) => { obj.mime = 'inode/socket'; } else { - obj.mime = getType(path); + obj.mime = inspect_getType(path); } } if (options.times) { @@ -164532,7 +170118,7 @@ const inspect_async = async (path, options) => { const stat = await (options.symlinks ? fs.promises.lstat : fs.promises.statfs)(path); return addExtraFieldsSync(path, createInspectObj(path, options, stat), options); }; - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5zcGVjdC5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9pbnNwZWN0LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxFQUFFLE1BQU0sU0FBUyxDQUFBO0FBQzdCLE9BQU8sRUFBUyxZQUFZLEVBQUUsUUFBUSxFQUFFLFNBQVMsRUFBRSxZQUFZLEVBQUUsTUFBTSxTQUFTLENBQUE7QUFFaEYsT0FBTyxHQUFHLE1BQU0sTUFBTSxDQUFDO0FBQ3ZCLE1BQU0sRUFBRSxPQUFPLEVBQUUsR0FBRyxHQUFHLENBQUM7QUFFeEIsT0FBTyxLQUFNLFFBQVEsTUFBTSxXQUFXLENBQUE7QUFDdEMsT0FBTyxFQUFFLFVBQVUsRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQUN4QyxPQUFPLEVBQUUsZ0JBQWdCLEVBQUUsZUFBZSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDdkUsT0FBTyxFQUFFLFNBQVMsRUFBMEIsTUFBTSxpQkFBaUIsQ0FBQTtBQUVuRSxNQUFNLENBQUMsTUFBTSwyQkFBMkIsR0FBYSxDQUFDLEtBQUssRUFBRSxNQUFNLEVBQUUsUUFBUSxFQUFFLFFBQVEsQ0FBQyxDQUFBO0FBRXhGLE1BQU0sVUFBVSxxQkFBcUI7SUFDcEMsT0FBTztRQUNOLEtBQUssRUFBRSxJQUFJO1FBQ1gsSUFBSSxFQUFFLElBQUk7S0FDVixDQUFDO0FBQ0gsQ0FBQztBQUNELE1BQU0sVUFBVSxhQUFhLENBQUMsVUFBa0IsRUFBRSxJQUFZLEVBQUUsT0FBeUI7SUFDeEYsTUFBTSxlQUFlLEdBQVcsVUFBVSxHQUFHLG1CQUFtQixDQUFDO0lBQ2pFLGdCQUFnQixDQUFDLGVBQWUsRUFBRSxNQUFNLEVBQUUsSUFBSSxFQUFFLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQztJQUM1RCxlQUFlLENBQUMsZUFBZSxFQUFFLFNBQVMsRUFBRSxPQUFPLEVBQUU7UUFDcEQsUUFBUSxFQUFFLENBQUMsUUFBUSxDQUFDO1FBQ3BCLElBQUksRUFBRSxDQUFDLFNBQVMsQ0FBQztRQUNqQixLQUFLLEVBQUUsQ0FBQyxTQUFTLENBQUM7UUFDbEIsWUFBWSxFQUFFLENBQUMsU0FBUyxDQUFDO1FBQ3pCLFFBQVEsRUFBRSxDQUFDLFNBQVMsQ0FBQztRQUNyQixJQUFJLEVBQUUsUUFBUTtRQUNkLElBQUksRUFBRSxRQUFRO0tBQ2QsQ0FBQyxDQUFDO0lBRUgsSUFBSSxPQUFPLElBQUksT0FBTyxDQUFDLFFBQVEsS0FBSyxTQUFTO1dBQ3pDLENBQUMsMkJBQTJCLENBQUMsUUFBUSxDQUFDLE9BQU8sQ0FBQyxRQUFRLENBQUMsRUFBRSxDQUFDO1FBQzdELE1BQU0sSUFBSSxLQUFLLENBQUMsd0NBQXdDLEdBQUcsZUFBZTtjQUN2RSw0QkFBNEIsR0FBRywyQkFBMkIsQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQztJQUMzRSxDQUFDO0FBQ0YsQ0FBQztBQUVELE1BQU0sZ0JBQWdCLEdBQUcsQ0FBQyxJQUFZLEVBQUUsT0FBd0IsRUFBRSxJQUFjLEVBQVMsRUFBRTtJQUMxRixNQUFNLEdBQUcsR0FBVSxFQUFXLENBQUE7SUFDOUIsR0FBRyxDQUFDLElBQUksR0FBRyxRQUFRLENBQUMsUUFBUSxDQUFDLElBQUksQ0FBQyxDQUFBO0lBQ2xDLElBQUksSUFBSSxDQUFDLE1BQU0sRUFBRSxFQUFFLENBQUM7UUFDbkIsR0FBRyxDQUFDLElBQUksR0FBRyxTQUFTLENBQUMsSUFBSSxDQUFDO1FBQzFCLEdBQUcsQ0FBQyxJQUFJLEdBQUcsSUFBSSxDQUFDLElBQUksQ0FBQztJQUN0QixDQUFDO1NBQU0sSUFBSSxJQUFJLENBQUMsV0FBVyxFQUFFLEVBQUUsQ0FBQztRQUMvQixHQUFHLENBQUMsSUFBSSxHQUFHLFNBQVMsQ0FBQyxHQUFHLENBQUM7SUFDMUIsQ0FBQztTQUFNLElBQUksSUFBSSxDQUFDLGNBQWMsRUFBRSxFQUFFLENBQUM7UUFDbEMsR0FBRyxDQUFDLElBQUksR0FBRyxTQUFTLENBQUMsT0FBTyxDQUFDO0lBQzlCLENBQUM7U0FBTSxDQUFDO1FBQ1AsR0FBRyxDQUFDLElBQUksR0FBRyxTQUFTLENBQUMsS0FBSyxDQUFDO0lBQzVCLENBQUM7SUFDRCxJQUFJLE9BQU8sQ0FBQyxJQUFJLEVBQUUsQ0FBQztRQUNsQixHQUFHLENBQUMsSUFBSSxHQUFHLElBQUksQ0FBQyxJQUFJLENBQUM7SUFDdEIsQ0FBQztJQUNELElBQUksT0FBTyxDQUFDLElBQUksRUFBRSxDQUFDO1FBQ2xCLElBQUksSUFBSSxDQUFDLFdBQVcsRUFBRSxFQUFFLENBQUM7WUFDeEIsR0FBRyxDQUFDLElBQUksR0FBRyxpQkFBaUIsQ0FBQztRQUM5QixDQUFDO2FBQU0sSUFBSSxJQUFJLENBQUMsYUFBYSxFQUFFLEVBQUUsQ0FBQztZQUNqQyxHQUFHLENBQUMsSUFBSSxHQUFHLG1CQUFtQixDQUFDO1FBQ2hDLENBQUM7YUFBTSxJQUFJLElBQUksQ0FBQyxpQkFBaUIsRUFBRSxFQUFFLENBQUM7WUFDckMsR0FBRyxDQUFDLElBQUksR0FBRyxrQkFBa0IsQ0FBQztRQUMvQixDQUFDO2FBQU0sSUFBSSxJQUFJLENBQUMsY0FBYyxFQUFFLEVBQUUsQ0FBQztZQUNsQyxHQUFHLENBQUMsSUFBSSxHQUFHLGVBQWUsQ0FBQztRQUM1QixDQUFDO2FBQU0sSUFBSSxJQUFJLENBQUMsTUFBTSxFQUFFLEVBQUUsQ0FBQztZQUMxQixHQUFHLENBQUMsSUFBSSxHQUFHLFlBQVksQ0FBQztRQUN6QixDQUFDO2FBQU0sSUFBSSxJQUFJLENBQUMsUUFBUSxFQUFFLEVBQUUsQ0FBQztZQUM1QixHQUFHLENBQUMsSUFBSSxHQUFHLGNBQWMsQ0FBQztRQUMzQixDQUFDO2FBQU0sQ0FBQztZQUNQLEdBQUcsQ0FBQyxJQUFJLEdBQUcsT0FBTyxDQUFDLElBQUksQ0FBQyxDQUFDO1FBQzFCLENBQUM7SUFDRixDQUFDO0lBRUQsSUFBSSxPQUFPLENBQUMsS0FBSyxFQUFFLENBQUM7UUFDbkIsR0FBRyxDQUFDLFVBQVUsR0FBRyxJQUFJLENBQUMsS0FBSyxDQUFBO1FBQzNCLEdBQUcsQ0FBQyxVQUFVLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQTtRQUMzQixHQUFHLENBQUMsVUFBVSxHQUFHLElBQUksQ0FBQyxLQUFLLENBQUE7UUFDM0IsR0FBRyxDQUFDLFNBQVMsR0FBRyxJQUFJLENBQUMsU0FBUyxDQUFBO0lBQy9CLENBQUM7SUFFRCxJQUFJLE9BQU8sQ0FBQyxZQUFZLEVBQUUsQ0FBQztRQUMxQixHQUFHLENBQUMsWUFBWSxHQUFHLElBQUksQ0FBQztJQUN6QixDQUFDO0lBQ0QsT0FBTyxHQUFHLENBQUM7QUFDWixDQUFDLENBQUM7QUFDRixNQUFNLFVBQVUsVUFBVSxDQUFDLElBQVksRUFBRSxPQUF5QjtJQUNqRSxPQUFPLEdBQUcsT0FBTyxJQUFJLHFCQUFxQixFQUFFLENBQUM7SUFDN0MsTUFBTSxJQUFJLEdBQUcsQ0FBQyxPQUFPLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQyxTQUFTLENBQUMsQ0FBQyxDQUFDLFFBQVEsQ0FBQyxDQUFDLElBQUksQ0FBQyxDQUFDO0lBQzdELE9BQU8sZ0JBQWdCLENBQUMsSUFBSSxFQUFFLE9BQU8sRUFBRSxJQUFJLENBQUMsQ0FBQztBQUM5QyxDQUFDO0FBQ0QsNERBQTREO0FBQzVELE9BQU87QUFDUCw0REFBNEQ7QUFDNUQsTUFBTSxZQUFZLEdBQUcsQ0FBQyxJQUFZLEVBQUUsSUFBWSxFQUFVLEVBQUU7SUFDM0QsTUFBTSxJQUFJLEdBQUcsVUFBVSxDQUFDLElBQUksQ0FBQyxDQUFBO0lBQzdCLE1BQU0sSUFBSSxHQUFHLFlBQVksQ0FBQyxJQUFJLENBQUMsQ0FBQTtJQUMvQixJQUFJLENBQUMsTUFBTSxDQUFDLElBQUksQ0FBQyxDQUFBO0lBQ2pCLE9BQU8sSUFBSSxDQUFDLE1BQU0sQ0FBQyxLQUFLLENBQUMsQ0FBQTtBQUMxQixDQUFDLENBQUM7QUFFRixNQUFNLGtCQUFrQixHQUFHLENBQUMsSUFBWSxFQUFFLFVBQWUsRUFBRSxPQUF3QixFQUFTLEVBQUU7SUFDN0YsSUFBSSxVQUFVLENBQUMsSUFBSSxLQUFLLFNBQVMsQ0FBQyxJQUFJLElBQUksT0FBTyxDQUFDLFFBQVEsRUFBRSxDQUFDO1FBQzVELFVBQVUsQ0FBQyxPQUFPLENBQUMsUUFBUSxDQUFDLEdBQUcsWUFBWSxDQUFDLElBQUksRUFBRSxPQUFPLENBQUMsUUFBUSxDQUFDLENBQUM7SUFDckUsQ0FBQztTQUFNLElBQUksVUFBVSxDQUFDLElBQUksS0FBSyxTQUFTLENBQUMsT0FBTyxFQUFFLENBQUM7UUFDbEQsVUFBVSxDQUFDLFFBQVEsR0FBRyxZQUFZLENBQUMsSUFBSSxDQUFDLENBQUM7SUFDMUMsQ0FBQztJQUNELE9BQU8sVUFBVSxDQUFDO0FBQ25CLENBQUMsQ0FBQztBQUVGLE1BQU0sVUFBVSxJQUFJLENBQUMsSUFBWSxFQUFFLE9BQXlCO0lBQzNELElBQUksYUFBYSxHQUFHLEVBQUUsQ0FBQyxTQUFTLENBQUE7SUFDaEMsSUFBSSxJQUFJLENBQUE7SUFDUixNQUFNLElBQUksR0FBRyxPQUFPLElBQUksRUFBRSxDQUFBO0lBRTFCLElBQUksSUFBSSxDQUFDLFFBQVEsS0FBSyxRQUFRLEVBQUUsQ0FBQztRQUNoQyxhQUFhLEdBQUcsRUFBRSxDQUFDLFFBQVEsQ0FBQTtJQUM1QixDQUFDO0lBRUQsSUFBSSxDQUFDO1FBQ0osSUFBSSxHQUFHLGFBQWEsQ0FBQyxJQUFJLENBQUMsQ0FBQTtJQUMzQixDQUFDO0lBQUMsT0FBTyxHQUFHLEVBQUUsQ0FBQztRQUNkLDJCQUEyQjtRQUMzQixJQUFJLEdBQUcsQ0FBQyxJQUFJLEtBQUssUUFBUSxFQUFFLENBQUM7WUFDM0IsdURBQXVEO1lBQ3ZELE9BQU8sU0FBUyxDQUFDO1FBQ2xCLENBQUM7UUFDRCxNQUFNLEdBQUcsQ0FBQztJQUNYLENBQUM7SUFFRCxNQUFNLFVBQVUsR0FBRyxnQkFBZ0IsQ0FBQyxJQUFJLEVBQUUsSUFBSSxFQUFFLElBQUksQ0FBQyxDQUFBO0lBQ3JELGtCQUFrQixDQUFDLElBQUksRUFBRSxVQUFVLEVBQUUsSUFBSSxDQUFDLENBQUE7SUFFMUMsT0FBTyxVQUFVLENBQUE7QUFDbEIsQ0FBQztBQUVELE1BQU0sQ0FBQyxNQUFNLEtBQUssR0FBRyxLQUFLLEVBQUUsSUFBWSxFQUFFLE9BQXlCLEVBQWtCLEVBQUU7SUFDdEYsT0FBTyxHQUFHLE9BQU8sSUFBSSxFQUFxQixDQUFDO0lBQzNDLE1BQU0sSUFBSSxHQUFHLE1BQU0sQ0FBQyxPQUFPLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQyxFQUFFLENBQUMsUUFBUSxDQUFDLEtBQUssQ0FBQyxDQUFDLENBQUMsRUFBRSxDQUFDLFFBQVEsQ0FBQyxNQUFNLENBQUMsQ0FBQyxJQUFJLENBQUMsQ0FBQTtJQUNwRixPQUFPLGtCQUFrQixDQUFDLElBQUksRUFBRSxnQkFBZ0IsQ0FBQyxJQUFJLEVBQUUsT0FBTyxFQUFFLElBQWEsQ0FBQyxFQUFFLE9BQU8sQ0FBQyxDQUFBO0FBQ3pGLENBQUMsQ0FBQSJ9 ;// ../fs/dist/utils/platform.js var Platform; (function (Platform) { @@ -164576,7 +170162,7 @@ const isNative = (/* unused pure expression or super */ null && (_isNative)); const isWeb = (/* unused pure expression or super */ null && (_isWeb)); const isQunit = (/* unused pure expression or super */ null && (_isQunit)); const platform_platform = (/* unused pure expression or super */ null && (_platform)); - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicGxhdGZvcm0uanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvdXRpbHMvcGxhdGZvcm0udHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsTUFBTSxDQUFOLElBQVksUUFLWDtBQUxELFdBQVksUUFBUTtJQUNuQixxQ0FBRyxDQUFBO0lBQ0gscUNBQUcsQ0FBQTtJQUNILHlDQUFLLENBQUE7SUFDTCw2Q0FBTyxDQUFBO0FBQ1IsQ0FBQyxFQUxXLFFBQVEsS0FBUixRQUFRLFFBS25CO0FBQ0QsSUFBSSxVQUFVLEdBQUcsS0FBSyxDQUFDO0FBQ3ZCLElBQUksWUFBWSxHQUFHLEtBQUssQ0FBQztBQUN6QixJQUFJLFFBQVEsR0FBRyxLQUFLLENBQUM7QUFDckIsSUFBSSxXQUFXLEdBQUcsS0FBSyxDQUFDO0FBQ3hCLElBQUksU0FBUyxHQUFHLEtBQUssQ0FBQztBQUN0QixJQUFJLE1BQU0sR0FBRyxLQUFLLENBQUM7QUFDbkIsSUFBSSxRQUFRLEdBQUcsS0FBSyxDQUFDO0FBQ3JCLE1BQU0sQ0FBQyxJQUFJLFNBQVMsR0FBYSxRQUFRLENBQUMsR0FBRyxDQUFDO0FBQzlDLGVBQWU7QUFDZixJQUFJLE9BQU8sT0FBTyxLQUFLLFFBQVEsRUFBRSxDQUFDO0lBQ2pDLFVBQVUsR0FBRyxDQUFDLE9BQU8sQ0FBQyxRQUFRLEtBQUssT0FBTyxDQUFDLENBQUM7SUFDNUMsWUFBWSxHQUFHLENBQUMsT0FBTyxDQUFDLFFBQVEsS0FBSyxRQUFRLENBQUMsQ0FBQztJQUMvQyxRQUFRLEdBQUcsQ0FBQyxPQUFPLENBQUMsUUFBUSxLQUFLLE9BQU8sQ0FBQyxDQUFDO0lBQzFDLFdBQVcsR0FBRyxDQUFDLFVBQVUsSUFBSSxDQUFDLE9BQU8sQ0FBQyxNQUFNLEVBQUUsS0FBSyxDQUFDLENBQUMsQ0FBQztJQUN0RCxTQUFTLEdBQUcsSUFBSSxDQUFDO0FBQ2xCLENBQUM7QUFDRCxJQUFJLFNBQVMsRUFBRSxDQUFDO0lBQ2YsSUFBSSxZQUFZLEVBQUUsQ0FBQztRQUNsQixTQUFTLEdBQUcsUUFBUSxDQUFDLEdBQUcsQ0FBQztJQUMxQixDQUFDO1NBQU0sSUFBSSxVQUFVLEVBQUUsQ0FBQztRQUN2QixTQUFTLEdBQUcsUUFBUSxDQUFDLE9BQU8sQ0FBQztJQUM5QixDQUFDO1NBQU0sSUFBSSxRQUFRLEVBQUUsQ0FBQztRQUNyQixTQUFTLEdBQUcsUUFBUSxDQUFDLEtBQUssQ0FBQztJQUM1QixDQUFDO0FBQ0YsQ0FBQztBQUVELE1BQU0sQ0FBQyxNQUFNLFNBQVMsR0FBRyxVQUFVLENBQUM7QUFDcEMsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLFlBQVksQ0FBQztBQUN4QyxNQUFNLENBQUMsTUFBTSxPQUFPLEdBQUcsUUFBUSxDQUFDO0FBQ2hDLE1BQU0sQ0FBQyxNQUFNLFVBQVUsR0FBRyxXQUFXLENBQUM7QUFDdEMsTUFBTSxDQUFDLE1BQU0sUUFBUSxHQUFHLFNBQVMsQ0FBQztBQUNsQyxNQUFNLENBQUMsTUFBTSxLQUFLLEdBQUcsTUFBTSxDQUFDO0FBQzVCLE1BQU0sQ0FBQyxNQUFNLE9BQU8sR0FBRyxRQUFRLENBQUM7QUFDaEMsTUFBTSxDQUFDLE1BQU0sUUFBUSxHQUFHLFNBQVMsQ0FBQyJ9 ;// ../fs/dist/utils/strings.js let canNormalize = typeof (''.normalize) === 'function'; const nonAsciiCharactersPattern = /[^\u0000-\u0080]/; @@ -164593,7 +170179,7 @@ const normalizeNFC = (str) => { } return res; }; - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic3RyaW5ncy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy91dGlscy9zdHJpbmdzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE1BQU0sQ0FBQyxJQUFJLFlBQVksR0FBRyxPQUFPLENBQUUsRUFBVSxDQUFDLFNBQVMsQ0FBQyxLQUFLLFVBQVUsQ0FBQztBQUN4RSxNQUFNLHlCQUF5QixHQUFHLGtCQUFrQixDQUFDO0FBQ3JELE1BQU0sQ0FBQyxNQUFNLFlBQVksR0FBRyxDQUFDLEdBQVcsRUFBVSxFQUFFO0lBQ25ELElBQUksQ0FBQyxZQUFZLElBQUksQ0FBQyxHQUFHLEVBQUUsQ0FBQztRQUMzQixPQUFPLEdBQUcsQ0FBQztJQUNaLENBQUM7SUFFRCxJQUFJLEdBQVcsQ0FBQztJQUNoQixJQUFJLHlCQUF5QixDQUFDLElBQUksQ0FBQyxHQUFHLENBQUMsRUFBRSxDQUFDO1FBQ3pDLEdBQUcsR0FBSSxHQUFXLENBQUMsU0FBUyxDQUFDLEtBQUssQ0FBQyxDQUFDO0lBQ3JDLENBQUM7U0FBTSxDQUFDO1FBQ1AsR0FBRyxHQUFHLEdBQUcsQ0FBQztJQUNYLENBQUM7SUFDRCxPQUFPLEdBQUcsQ0FBQztBQUNaLENBQUMsQ0FBQyJ9 ;// ../fs/dist/list.js @@ -164657,7 +170243,7 @@ function list_async(path) { .catch(err => (err.code === 'ENOENT' ? resolve(undefined) : reject(err))); }); } - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibGlzdC5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9saXN0LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sRUFBRSxXQUFXLEVBQUUsT0FBTyxFQUFFLE1BQU0sSUFBSSxDQUFDO0FBQzFDLE9BQU8sRUFBRSxnQkFBZ0IsRUFBRSxNQUFNLHFCQUFxQixDQUFDO0FBQ3ZELE9BQU8sRUFBRSxXQUFXLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQztBQUNsRCxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sb0JBQW9CLENBQUM7QUFFbEQsTUFBTSxVQUFVLGFBQWEsQ0FBQyxVQUFrQixFQUFFLElBQVk7SUFDN0QsTUFBTSxlQUFlLEdBQUcsVUFBVSxHQUFHLFFBQVEsQ0FBQztJQUM5QyxnQkFBZ0IsQ0FBQyxlQUFlLEVBQUUsTUFBTSxFQUFFLElBQUksRUFBRSxDQUFDLFFBQVEsRUFBRSxXQUFXLENBQUMsQ0FBQyxDQUFDO0FBQzFFLENBQUM7QUFFRCxNQUFNLFVBQVUsWUFBWSxDQUFDLElBQVk7SUFDeEMsc0RBQXNEO0lBQ3RELHNEQUFzRDtJQUN0RCxJQUFJLFdBQVcsRUFBRSxDQUFDO1FBQ2pCLE9BQU8sV0FBVyxDQUFDLElBQUksQ0FBQyxDQUFDLEdBQUcsQ0FBQyxDQUFDLENBQUMsRUFBRSxDQUFDLFlBQVksQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDO0lBQ3BELENBQUM7SUFFRCxPQUFPLFdBQVcsQ0FBQyxJQUFJLENBQUMsQ0FBQztBQUMxQixDQUFDO0FBQ0QsNERBQTREO0FBQzVELE9BQU87QUFDUCw0REFBNEQ7QUFDNUQsTUFBTSxVQUFVLElBQUksQ0FBQyxJQUFZO0lBQ2hDLElBQUksQ0FBQztRQUNKLE9BQU8sWUFBWSxDQUFDLElBQUksQ0FBQyxDQUFDO0lBQzNCLENBQUM7SUFBQyxPQUFPLEdBQUcsRUFBRSxDQUFDO1FBQ2QsSUFBSSxHQUFHLENBQUMsSUFBSSxLQUFLLFFBQVEsRUFBRSxDQUFDO1lBQzNCLHVEQUF1RDtZQUN2RCxPQUFPLFNBQVMsQ0FBQztRQUNsQixDQUFDO1FBQ0QsTUFBTSxHQUFHLENBQUM7SUFDWCxDQUFDO0FBQ0YsQ0FBQztBQUVELDREQUE0RDtBQUM1RCxRQUFRO0FBQ1IsNERBQTREO0FBQzVELFNBQVMsWUFBWSxDQUFDLElBQVk7SUFDakMsMEhBQTBIO0lBQzFILHNEQUFzRDtJQUN0RCxzREFBc0Q7SUFFdEQsT0FBTyxJQUFJLE9BQU8sQ0FBVyxDQUFDLE9BQU8sRUFBRSxNQUFNLEVBQUUsRUFBRTtRQUNoRCxJQUFJLFdBQVcsRUFBRSxDQUFDO1lBQ2pCLE9BQU8sQ0FBQyxJQUFJLEVBQUUsQ0FBQyxHQUEwQixFQUFFLEtBQWUsRUFBRSxFQUFFO2dCQUM3RCxJQUFJLEdBQUcsRUFBRSxDQUFDO29CQUNULE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQztnQkFDYixDQUFDO2dCQUNELE9BQU8sQ0FBQyxLQUFLLENBQUMsQ0FBQztZQUNoQixDQUFDLENBQUMsQ0FBQztRQUNKLENBQUM7UUFDRCxPQUFPLENBQUMsSUFBSSxFQUFFLENBQUMsR0FBMEIsRUFBRSxLQUFlLEVBQUUsRUFBRTtZQUM3RCxJQUFJLEdBQUcsRUFBRSxDQUFDO2dCQUNULE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQztZQUNiLENBQUM7WUFDRCxPQUFPLENBQUMsS0FBSyxDQUFDLENBQUM7UUFDaEIsQ0FBQyxDQUFDLENBQUM7SUFDSixDQUFDLENBQUMsQ0FBQztBQUNKLENBQUM7QUFDRCxNQUFNLFVBQVUsS0FBSyxDQUFDLElBQVk7SUFDakMsT0FBTyxJQUFJLE9BQU8sQ0FBVyxDQUFDLE9BQU8sRUFBRSxNQUFNLEVBQUUsRUFBRTtRQUNoRCxZQUFZLENBQUMsSUFBSSxDQUFDO2FBQ2hCLElBQUksQ0FBQyxDQUFDLElBQUksRUFBRSxFQUFFLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxDQUFDO2FBQzdCLEtBQUssQ0FBQyxHQUFHLENBQUMsRUFBRSxDQUFDLENBQUMsR0FBRyxDQUFDLElBQUksS0FBSyxRQUFRLENBQUMsQ0FBQyxDQUFDLE9BQU8sQ0FBQyxTQUFTLENBQUMsQ0FBQyxDQUFDLENBQUMsTUFBTSxDQUFDLEdBQUcsQ0FBQyxDQUFDLENBQUMsQ0FBQztJQUM1RSxDQUFDLENBQUMsQ0FBQztBQUNKLENBQUMifQ== ;// ../fs/dist/errno.js const errno = [ { @@ -164965,7 +170551,7 @@ const code = {}; errno.forEach(function (error) { code[error.code] = error; }); - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZXJybm8uanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvZXJybm8udHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsTUFBTSxDQUFDLE1BQU0sS0FBSyxHQUFHO0lBQ2pCO1FBQ0UsS0FBSyxFQUFFLENBQUMsQ0FBQztRQUNULElBQUksRUFBRSxRQUFRO1FBQ2QsV0FBVyxFQUFFLDJCQUEyQjtLQUN6QztJQUNEO1FBQ0UsS0FBSyxFQUFFLENBQUMsQ0FBQztRQUNULElBQUksRUFBRSxTQUFTO1FBQ2YsV0FBVyxFQUFFLGVBQWU7S0FDN0I7SUFDRDtRQUNFLEtBQUssRUFBRSxDQUFDO1FBQ1IsSUFBSSxFQUFFLElBQUk7UUFDVixXQUFXLEVBQUUsU0FBUztLQUN2QjtJQUNEO1FBQ0UsS0FBSyxFQUFFLENBQUM7UUFDUixJQUFJLEVBQUUsS0FBSztRQUNYLFdBQVcsRUFBRSxhQUFhO0tBQzNCO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsQ0FBQztRQUNSLElBQUksRUFBRSxXQUFXO1FBQ2pCLFdBQVcsRUFBRSxtQkFBbUI7S0FDakM7SUFDRDtRQUNFLEtBQUssRUFBRSxDQUFDO1FBQ1IsSUFBSSxFQUFFLFFBQVE7UUFDZCxXQUFXLEVBQUUsbUJBQW1CO0tBQ2pDO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsQ0FBQztRQUNSLElBQUksRUFBRSxRQUFRO1FBQ2QsV0FBVyxFQUFFLGtDQUFrQztLQUNoRDtJQUNEO1FBQ0UsS0FBSyxFQUFFLENBQUM7UUFDUixJQUFJLEVBQUUsWUFBWTtRQUNsQixXQUFXLEVBQUUsd0JBQXdCO0tBQ3RDO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsQ0FBQztRQUNSLElBQUksRUFBRSxlQUFlO1FBQ3JCLFdBQVcsRUFBRSx1QkFBdUI7S0FDckM7SUFDRDtRQUNFLEtBQUssRUFBRSxDQUFDO1FBQ1IsSUFBSSxFQUFFLGNBQWM7UUFDcEIsV0FBVyxFQUFFLDhCQUE4QjtLQUM1QztJQUNEO1FBQ0UsS0FBSyxFQUFFLENBQUM7UUFDUixJQUFJLEVBQUUsVUFBVTtRQUNoQixXQUFXLEVBQUUsZ0NBQWdDO0tBQzlDO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsQ0FBQztRQUNSLElBQUksRUFBRSxPQUFPO1FBQ2IsV0FBVyxFQUFFLHFCQUFxQjtLQUNuQztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsT0FBTztRQUNiLFdBQVcsRUFBRSx5QkFBeUI7S0FDdkM7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLGNBQWM7UUFDcEIsV0FBVyxFQUFFLGtDQUFrQztLQUNoRDtJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsY0FBYztRQUNwQixXQUFXLEVBQUUsb0JBQW9CO0tBQ2xDO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxZQUFZO1FBQ2xCLFdBQVcsRUFBRSwwQkFBMEI7S0FDeEM7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLGNBQWM7UUFDcEIsV0FBVyxFQUFFLDhCQUE4QjtLQUM1QztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsUUFBUTtRQUNkLFdBQVcsRUFBRSxxQ0FBcUM7S0FDbkQ7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLGNBQWM7UUFDcEIsV0FBVyxFQUFFLHFCQUFxQjtLQUNuQztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsT0FBTztRQUNiLFdBQVcsRUFBRSx5QkFBeUI7S0FDdkM7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLFFBQVE7UUFDZCxXQUFXLEVBQUUsa0JBQWtCO0tBQ2hDO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxTQUFTO1FBQ2YsV0FBVyxFQUFFLDZCQUE2QjtLQUMzQztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsUUFBUTtRQUNkLFdBQVcsRUFBRSxxQkFBcUI7S0FDbkM7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLFVBQVU7UUFDaEIsV0FBVyxFQUFFLGtCQUFrQjtLQUNoQztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsVUFBVTtRQUNoQixXQUFXLEVBQUUsaUJBQWlCO0tBQy9CO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxhQUFhO1FBQ25CLFdBQVcsRUFBRSx3QkFBd0I7S0FDdEM7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLFFBQVE7UUFDZCxXQUFXLEVBQUUscUJBQXFCO0tBQ25DO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxTQUFTO1FBQ2YsV0FBVyxFQUFFLDJCQUEyQjtLQUN6QztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsUUFBUTtRQUNkLFdBQVcsRUFBRSxtQkFBbUI7S0FDakM7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLFNBQVM7UUFDZixXQUFXLEVBQUUsaUJBQWlCO0tBQy9CO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxRQUFRO1FBQ2QsV0FBVyxFQUFFLGtDQUFrQztLQUNoRDtJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsUUFBUTtRQUNkLFdBQVcsRUFBRSwrQkFBK0I7S0FDN0M7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLFVBQVU7UUFDaEIsV0FBVyxFQUFFLHlCQUF5QjtLQUN2QztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsVUFBVTtRQUNoQixXQUFXLEVBQUUsZ0NBQWdDO0tBQzlDO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxTQUFTO1FBQ2YsV0FBVyxFQUFFLG1DQUFtQztLQUNqRDtJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsUUFBUTtRQUNkLFdBQVcsRUFBRSwyQkFBMkI7S0FDekM7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLFFBQVE7UUFDZCxXQUFXLEVBQUUsMEJBQTBCO0tBQ3hDO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxPQUFPO1FBQ2IsV0FBVyxFQUFFLGFBQWE7S0FDM0I7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLFFBQVE7UUFDZCxXQUFXLEVBQUUsZ0JBQWdCO0tBQzlCO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxpQkFBaUI7UUFDdkIsV0FBVyxFQUFFLHdCQUF3QjtLQUN0QztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsWUFBWTtRQUNsQixXQUFXLEVBQUUsZ0NBQWdDO0tBQzlDO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxXQUFXO1FBQ2pCLFdBQVcsRUFBRSxzQkFBc0I7S0FDcEM7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLFVBQVU7UUFDaEIsV0FBVyxFQUFFLDJCQUEyQjtLQUN6QztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsaUJBQWlCO1FBQ3ZCLFdBQVcsRUFBRSwyQ0FBMkM7S0FDekQ7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLFlBQVk7UUFDbEIsV0FBVyxFQUFFLHdDQUF3QztLQUN0RDtJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsYUFBYTtRQUNuQixXQUFXLEVBQUUsMkJBQTJCO0tBQ3pDO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxXQUFXO1FBQ2pCLFdBQVcsRUFBRSwrQ0FBK0M7S0FDN0Q7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLFFBQVE7UUFDZCxXQUFXLEVBQUUscUJBQXFCO0tBQ25DO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxPQUFPO1FBQ2IsV0FBVyxFQUFFLGlCQUFpQjtLQUMvQjtJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsY0FBYztRQUNwQixXQUFXLEVBQUUsZUFBZTtLQUM3QjtJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsT0FBTztRQUNiLFdBQVcsRUFBRSx5QkFBeUI7S0FDdkM7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLE9BQU87UUFDYixXQUFXLEVBQUUscUNBQXFDO0tBQ25EO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxPQUFPO1FBQ2IsV0FBVyxFQUFFLGlDQUFpQztLQUMvQztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsV0FBVztRQUNqQixXQUFXLEVBQUUscUJBQXFCO0tBQ25DO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxRQUFRO1FBQ2QsV0FBVyxFQUFFLHlCQUF5QjtLQUN2QztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsS0FBSztRQUNYLFdBQVcsRUFBRSxXQUFXO0tBQ3pCO0lBQ0Q7UUFDRSxLQUFLLEVBQUUsRUFBRTtRQUNULElBQUksRUFBRSxPQUFPO1FBQ2IsV0FBVyxFQUFFLHVCQUF1QjtLQUNyQztJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsUUFBUTtRQUNkLFdBQVcsRUFBRSxnQkFBZ0I7S0FDOUI7SUFDRDtRQUNFLEtBQUssRUFBRSxFQUFFO1FBQ1QsSUFBSSxFQUFFLFFBQVE7UUFDZCxXQUFXLEVBQUUsY0FBYztLQUM1QjtJQUNEO1FBQ0UsS0FBSyxFQUFFLEVBQUU7UUFDVCxJQUFJLEVBQUUsV0FBVztRQUNqQixXQUFXLEVBQUUsb0JBQW9CO0tBQ2xDO0NBQ0YsQ0FBQTtBQUlILE1BQU0sQ0FBQyxNQUFNLElBQUksR0FBRyxFQUFFLENBQUE7QUFDdEIsS0FBSyxDQUFDLE9BQU8sQ0FBQyxVQUFVLEtBQUs7SUFDM0IsSUFBSSxDQUFDLEtBQUssQ0FBQyxJQUFJLENBQUMsR0FBRyxLQUFLLENBQUE7QUFDMUIsQ0FBQyxDQUFDLENBQUEifQ== ;// ../fs/dist/errors.js @@ -165009,7 +170595,7 @@ const ErrIsNotDirectory = (path) => { err.code = 'ENOTDIR'; return err; }; - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZXJyb3JzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL2Vycm9ycy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQUUsY0FBYyxFQUFFLE1BQU0saUJBQWlCLENBQUE7QUFDaEQsT0FBTyxFQUFFLElBQUksRUFBRyxNQUFNLFlBQVksQ0FBQTtBQUVsQyxNQUFNLENBQUMsTUFBTSxPQUFPLEdBQUcsRUFBRSxDQUFBO0FBRXpCLE1BQU0sQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUFDLENBQUMsT0FBTyxDQUFDLFVBQVUsSUFBSTtJQUN2QyxNQUFNLENBQUMsR0FBRyxJQUFJLENBQUMsSUFBSSxDQUFDLENBQUM7SUFDckIsT0FBTyxDQUFDLElBQUksQ0FBQyxHQUFHLENBQUMsSUFBWSxFQUFFLEVBQUU7UUFDaEMsTUFBTSxHQUFHLEdBQUcsSUFBSSxLQUFLLENBQUMsSUFBSSxHQUFHLElBQUksR0FBRyxDQUFDLENBQUMsV0FBVyxHQUFHLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxLQUFLLEdBQUcsSUFBSSxHQUFHLElBQUksQ0FBQyxDQUFDLENBQUMsRUFBRSxDQUFDLENBQW1CLENBQUM7UUFDekcsR0FBRyxDQUFDLEtBQUssR0FBRyxDQUFDLENBQUMsS0FBSyxDQUFDO1FBQ3BCLEdBQUcsQ0FBQyxJQUFJLEdBQUcsSUFBSSxDQUFDO1FBQ2hCLEdBQUcsQ0FBQyxJQUFJLEdBQUcsSUFBSSxDQUFDO1FBQ2hCLE9BQU8sR0FBRyxDQUFDO0lBQ1osQ0FBQyxDQUFBO0FBQ0YsQ0FBQyxDQUFDLENBQUE7QUFDRixNQUFNLENBQUMsTUFBTSxjQUFjLEdBQUcsQ0FBQyxJQUFZLEVBQVMsRUFBRTtJQUNyRCxPQUFPLElBQUksS0FBSyxDQUFDLGdCQUFnQixHQUFHLElBQUksR0FBRyxxQ0FBcUMsQ0FBQyxDQUFDO0FBQ25GLENBQUMsQ0FBQztBQUNGLE1BQU0sQ0FBQyxNQUFNLGFBQWEsR0FBRyxDQUFDLElBQVksRUFBUyxFQUFFO0lBQ3BELE9BQU8sSUFBSSxLQUFLLENBQUMsZ0JBQWdCLEdBQUcsSUFBSSxDQUFDLENBQUM7QUFDM0MsQ0FBQyxDQUFDO0FBQ0YsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHLENBQUMsSUFBWSxFQUFTLEVBQUU7SUFDakQsT0FBTyxJQUFJLEtBQUssQ0FBQyxPQUFPLEdBQUcsSUFBSSxHQUFHLDRCQUE0QjtRQUM3RCxrREFBa0QsQ0FBQyxDQUFDO0FBQ3RELENBQUMsQ0FBQztBQUNGLE1BQU0sQ0FBQyxNQUFNLGNBQWMsR0FBRyxDQUFDLElBQVksRUFBUyxFQUFFO0lBQ3JELE9BQU8sSUFBSSxLQUFLLENBQUMsT0FBTyxHQUFHLElBQUksR0FBRyxpQ0FBaUM7UUFDbEUsaURBQWlELENBQUMsQ0FBQztBQUNyRCxDQUFDLENBQUM7QUFFRixNQUFNLENBQUMsTUFBTSxlQUFlLEdBQUcsQ0FBQyxJQUFZLEVBQVMsRUFBRTtJQUN0RCxNQUFNLEdBQUcsR0FBUSxJQUFJLEtBQUssQ0FBQyw4QkFBOEIsR0FBRyxJQUFJLENBQUMsQ0FBQztJQUNsRSxHQUFHLENBQUMsSUFBSSxHQUFHLFFBQVEsQ0FBQztJQUNwQixPQUFPLEdBQUcsQ0FBQztBQUNaLENBQUMsQ0FBQztBQUVGLE1BQU0sQ0FBQyxNQUFNLG9CQUFvQixHQUFHLENBQUMsSUFBWSxFQUFTLEVBQUU7SUFDM0QsTUFBTSxHQUFHLEdBQVEsSUFBSSxLQUFLLENBQUMsa0NBQWtDLEdBQUcsSUFBSSxDQUFDLENBQUM7SUFDdEUsR0FBRyxDQUFDLElBQUksR0FBRyxRQUFRLENBQUM7SUFDcEIsT0FBTyxHQUFHLENBQUM7QUFDWixDQUFDLENBQUM7QUFFRixNQUFNLENBQUMsTUFBTSxpQkFBaUIsR0FBRyxDQUFDLElBQVksRUFBUyxFQUFFO0lBQ3hELE1BQU0sR0FBRyxHQUFHLElBQUksY0FBYyxDQUFDLHFEQUFxRCxHQUFHLElBQUksQ0FBQyxDQUFDO0lBQzdGLEdBQUcsQ0FBQyxJQUFJLEdBQUcsU0FBUyxDQUFDO0lBQ3JCLE9BQU8sR0FBRyxDQUFDO0FBQ1osQ0FBQyxDQUFDIn0= // EXTERNAL MODULE: ../fs/node_modules/brace-expansion/index.js var fs_node_modules_brace_expansion = __webpack_require__(59144); ;// ../fs/node_modules/minimatch/dist/esm/assert-valid-pattern.js @@ -166869,7 +172455,7 @@ function matcher_create(basePath, patterns, options) { return weHaveMatch; }; } - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWF0Y2hlci5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy91dGlscy9tYXRjaGVyLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxTQUFTLE1BQU0sV0FBVyxDQUFBO0FBT3RDLE1BQU0scUJBQXFCLEdBQUcsQ0FBQyxRQUFnQixFQUFFLE9BQWUsRUFBVSxFQUFFO0lBQzNFLHNFQUFzRTtJQUN0RSxtREFBbUQ7SUFDbkQsTUFBTSxRQUFRLEdBQVksQ0FBQyxPQUFPLENBQUMsUUFBUSxDQUFDLEdBQUcsQ0FBQyxDQUFDLENBQUM7SUFDbEQsTUFBTSxVQUFVLEdBQVksT0FBTyxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsQ0FBQztJQUNsRCxNQUFNLFNBQVMsR0FBWSxJQUFJLENBQUMsSUFBSSxDQUFDLE9BQU8sQ0FBQyxDQUFDO0lBQzlDLElBQUksU0FBUyxDQUFDO0lBRWQsSUFBSSxDQUFDLFVBQVUsSUFBSSxRQUFRLEVBQUUsQ0FBQztRQUM3QixrRUFBa0U7UUFDbEUsT0FBTyxHQUFHLE9BQU8sQ0FBQyxPQUFPLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxDQUFDLE9BQU8sQ0FBQyxPQUFPLEVBQUUsRUFBRSxDQUFDLENBQUM7UUFFekQsSUFBSSxLQUFLLENBQUMsSUFBSSxDQUFDLFFBQVEsQ0FBQyxFQUFFLENBQUM7WUFDMUIsU0FBUyxHQUFHLEVBQUUsQ0FBQztRQUNoQixDQUFDO2FBQU0sQ0FBQztZQUNQLFNBQVMsR0FBRyxHQUFHLENBQUM7UUFDakIsQ0FBQztRQUVELElBQUksU0FBUyxFQUFFLENBQUM7WUFDZixPQUFPLEdBQUcsR0FBRyxRQUFRLEdBQUcsU0FBUyxHQUFHLE9BQU8sQ0FBQztRQUM3QyxDQUFDO1FBQ0QsT0FBTyxRQUFRLEdBQUcsU0FBUyxHQUFHLE9BQU8sQ0FBQztJQUN2QyxDQUFDO0lBRUQsT0FBTyxPQUFPLENBQUM7QUFDaEIsQ0FBQyxDQUFDO0FBRUYsTUFBTSxVQUFVLE1BQU0sQ0FBQyxRQUFnQixFQUFFLFFBQWtCLEVBQUUsT0FBa0I7SUFDOUUsSUFBSSxRQUFlLENBQUM7SUFDcEIsSUFBSSxPQUFPLFFBQVEsS0FBSyxRQUFRLEVBQUUsQ0FBQztRQUNsQyxRQUFRLEdBQUcsQ0FBQyxRQUFRLENBQUMsQ0FBQztJQUN2QixDQUFDO0lBQ0QsUUFBUSxHQUFHLFFBQVEsQ0FBQyxHQUFHLENBQUMsT0FBTyxDQUFDLEVBQUU7UUFDakMsT0FBTyxxQkFBcUIsQ0FBQyxRQUFRLEVBQUUsT0FBTyxDQUFDLENBQUM7SUFDakQsQ0FBQyxDQUFDLENBQUMsR0FBRyxDQUFDLE9BQU8sQ0FBQyxFQUFFO1FBQ2hCLE9BQU8sSUFBSSxTQUFTLENBQUMsU0FBUyxDQUFDLE9BQU8sRUFBRSxPQUFPLElBQUk7WUFDbEQsU0FBUyxFQUFFLElBQUk7WUFDZixTQUFTLEVBQUUsSUFBSTtZQUNmLEdBQUcsRUFBRSxJQUFJO1NBQ1QsQ0FBQyxDQUFDO0lBQ0osQ0FBQyxDQUFDLENBQUM7SUFFSCxPQUFPLFNBQVMsWUFBWSxDQUFDLFlBQW9CO1FBQ2hELElBQUksSUFBSSxHQUFHLFVBQVUsQ0FBQztRQUN0QixJQUFJLFdBQVcsR0FBRyxLQUFLLENBQUM7UUFDeEIsSUFBSSxjQUFjLENBQUM7UUFDbkIsSUFBSSxDQUFDLENBQUM7UUFFTixLQUFLLENBQUMsR0FBRyxDQUFDLEVBQUUsQ0FBQyxHQUFHLFFBQVEsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxJQUFJLENBQUMsRUFBRSxDQUFDO1lBQ3pDLGNBQWMsR0FBRyxRQUFRLENBQUMsQ0FBQyxDQUFDLENBQUM7WUFDN0IsSUFBSSxjQUFjLENBQUMsTUFBTSxFQUFFLENBQUM7Z0JBQzNCLElBQUksR0FBRyxVQUFVLENBQUM7Z0JBQ2xCLElBQUksQ0FBQyxLQUFLLENBQUMsRUFBRSxDQUFDO29CQUNiLDhDQUE4QztvQkFDOUMsNkNBQTZDO29CQUM3Qyx5QkFBeUI7b0JBQ3pCLFdBQVcsR0FBRyxJQUFJLENBQUM7Z0JBQ3BCLENBQUM7WUFDRixDQUFDO1lBRUQsSUFBSSxJQUFJLEtBQUssVUFBVSxJQUFJLFdBQVcsSUFBSSxDQUFDLGNBQWMsQ0FBQyxLQUFLLENBQUMsWUFBWSxDQUFDLEVBQUUsQ0FBQztnQkFDL0UsZ0VBQWdFO2dCQUNoRSxPQUFPLEtBQUssQ0FBQztZQUNkLENBQUM7WUFFRCxJQUFJLElBQUksS0FBSyxVQUFVLElBQUksQ0FBQyxXQUFXLEVBQUUsQ0FBQztnQkFDekMsV0FBVyxHQUFHLGNBQWMsQ0FBQyxLQUFLLENBQUMsWUFBWSxDQUFDLENBQUM7WUFDbEQsQ0FBQztRQUNGLENBQUM7UUFFRCxPQUFPLFdBQVcsQ0FBQztJQUNwQixDQUFDLENBQUM7QUFDSCxDQUFDIn0= ;// ../fs/dist/utils/tree_walker.js @@ -166971,7 +172557,7 @@ function tree_walker_stream(path, options) { }; return rs; } - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidHJlZV93YWxrZXIuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvdXRpbHMvdHJlZV93YWxrZXIudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLFFBQVEsRUFBRSxNQUFNLGFBQWEsQ0FBQztBQUN2QyxPQUFPLEtBQU0sUUFBUSxNQUFNLFdBQVcsQ0FBQztBQUN2QyxPQUFPLEVBQUUsSUFBSSxJQUFJLFdBQVcsRUFBRSxLQUFLLElBQUksWUFBWSxFQUFFLE1BQU0sZUFBZSxDQUFDO0FBQzNFLE9BQU8sRUFBRSxTQUFTLEVBQTBCLE1BQU0sa0JBQWtCLENBQUM7QUFDckUsT0FBTyxFQUFFLElBQUksSUFBSSxRQUFRLEVBQUUsS0FBSyxJQUFJLFNBQVMsRUFBRSxNQUFNLFlBQVksQ0FBQztBQU9sRSw0REFBNEQ7QUFDNUQsT0FBTztBQUNQLDREQUE0RDtBQUM1RCxNQUFNLFVBQVUsSUFBSSxDQUFDLElBQVksRUFBRSxPQUFpQixFQUFFLFFBQTZDLEVBQUUsWUFBcUI7SUFDekgsTUFBTSxJQUFJLEdBQUcsV0FBVyxDQUFDLElBQUksRUFBRSxPQUFPLENBQUMsY0FBYyxDQUFDLENBQUM7SUFDdkQsSUFBSSxPQUFPLENBQUMsYUFBYSxLQUFLLFNBQVMsRUFBRSxDQUFDO1FBQ3pDLE9BQU8sQ0FBQyxhQUFhLEdBQUcsUUFBUSxDQUFDO0lBQ2xDLENBQUM7SUFDRCxJQUFJLFlBQVksS0FBSyxTQUFTLEVBQUUsQ0FBQztRQUNoQyxZQUFZLEdBQUcsQ0FBQyxDQUFDO0lBQ2xCLENBQUM7SUFFRCxJQUFJLFFBQVEsR0FBYSxFQUFFLENBQUM7SUFDNUIsTUFBTSxXQUFXLEdBQVksSUFBSSxJQUFJLElBQUksQ0FBQyxJQUFJLEtBQUssU0FBUyxDQUFDLEdBQUcsSUFBSSxZQUFZLEdBQUcsT0FBTyxDQUFDLGFBQWEsQ0FBQztJQUN6RyxJQUFJLFdBQVcsRUFBRSxDQUFDO1FBQ2pCLFFBQVEsR0FBRyxRQUFRLENBQUMsSUFBSSxDQUFDLENBQUM7SUFDM0IsQ0FBQztJQUNELFFBQVEsQ0FBQyxJQUFJLEVBQUUsSUFBSSxDQUFDLENBQUM7SUFDckIsSUFBSSxXQUFXLEVBQUUsQ0FBQztRQUNqQixRQUFRLENBQUMsT0FBTyxDQUFDLEtBQUssQ0FBQyxFQUFFLENBQUMsSUFBSSxDQUFDLElBQUksR0FBRyxRQUFRLENBQUMsR0FBRyxHQUFHLEtBQUssRUFBRSxPQUFPLEVBQUUsUUFBUSxFQUFFLFlBQVksR0FBRyxDQUFDLENBQUMsQ0FBQyxDQUFDO0lBQ25HLENBQUM7QUFDRixDQUFDO0FBYUQsTUFBTSxVQUFVLE1BQU0sQ0FBQyxJQUFZLEVBQUUsT0FBaUI7SUFDckQsTUFBTSxFQUFFLEdBQUcsSUFBSSxRQUFRLENBQUMsRUFBRSxVQUFVLEVBQUUsSUFBSSxFQUFFLENBQUMsQ0FBQztJQUM5QyxJQUFJLFlBQVksR0FBaUI7UUFDaEMsSUFBSSxFQUFFLElBQUk7UUFDVixNQUFNLEVBQUUsU0FBUztRQUNqQixLQUFLLEVBQUUsQ0FBQztLQUNSLENBQUM7SUFDRixJQUFJLE9BQU8sR0FBRyxLQUFLLENBQUM7SUFDcEIsSUFBSSxRQUFhLENBQUM7SUFDbEIsTUFBTSxLQUFLLEdBQUcsQ0FBQyxHQUFVLEVBQUUsRUFBRTtRQUM1QixFQUFFLENBQUMsSUFBSSxDQUFDLE9BQU8sRUFBRSxHQUFHLENBQUMsQ0FBQztJQUN2QixDQUFDLENBQUM7SUFDRixNQUFNLHVCQUF1QixHQUFHLENBQUMsSUFBa0IsRUFBZ0IsRUFBRTtRQUNwRSxJQUFJLElBQUksQ0FBQyxXQUFXLEVBQUUsQ0FBQztZQUN0QixPQUFPLElBQUksQ0FBQyxXQUFXLENBQUM7UUFDekIsQ0FBQzthQUFNLElBQUksSUFBSSxDQUFDLE1BQU0sRUFBRSxDQUFDO1lBQ3hCLE9BQU8sdUJBQXVCLENBQUMsSUFBSSxDQUFDLE1BQU0sQ0FBQyxDQUFDO1FBQzdDLENBQUM7UUFDRCxPQUFPLFNBQVMsQ0FBQztJQUNsQixDQUFDLENBQUM7SUFFRixNQUFNLG9CQUFvQixHQUFHLENBQUMsSUFBbUMsRUFBRSxFQUFFO1FBQ3BFLE1BQU0sWUFBWSxHQUFHLEVBQUUsQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUFDLENBQUM7UUFDbkMsT0FBTyxHQUFHLEtBQUssQ0FBQztRQUNoQixJQUFJLENBQUMsWUFBWSxFQUFFLENBQUM7WUFDbkIsK0NBQStDO1lBQy9DLEVBQUUsQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUFDLENBQUM7UUFDZixDQUFDO2FBQU0sSUFBSSxZQUFZLEVBQUUsQ0FBQztZQUN6QixRQUFRLEVBQUUsQ0FBQztRQUNaLENBQUM7SUFDRixDQUFDLENBQUM7SUFFRixJQUFJLE9BQU8sQ0FBQyxhQUFhLEtBQUssU0FBUyxFQUFFLENBQUM7UUFDekMsT0FBTyxDQUFDLGFBQWEsR0FBRyxRQUFRLENBQUM7SUFDbEMsQ0FBQztJQUVELFFBQVEsR0FBRyxHQUFTLEVBQUU7UUFDckIsTUFBTSxPQUFPLEdBQWlCLFlBQVksQ0FBQztRQUMzQyxPQUFPLEdBQUcsSUFBSSxDQUFDO1FBQ2YsWUFBWSxDQUFDLE9BQU8sQ0FBQyxJQUFJLEVBQUUsT0FBTyxDQUFDLGNBQWMsQ0FBQzthQUNoRCxJQUFJLENBQUMsQ0FBQyxTQUFnQixFQUFFLEVBQUU7WUFDMUIsT0FBTyxDQUFDLFNBQVMsR0FBRyxTQUFTLENBQUM7WUFDOUIsSUFBSSxTQUFTLElBQUksU0FBUyxDQUFDLElBQUksS0FBSyxTQUFTLENBQUMsR0FBRyxJQUFJLE9BQU8sQ0FBQyxLQUFLLEdBQUcsT0FBTyxDQUFDLGFBQWEsRUFBRSxDQUFDO2dCQUM1RixTQUFTLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQztxQkFDckIsSUFBSSxDQUFDLENBQUMsYUFBdUIsRUFBRSxFQUFFO29CQUNqQyxNQUFNLFFBQVEsR0FBRyxhQUFhLENBQUMsR0FBRyxDQUFDLENBQUMsSUFBSSxFQUFFLEVBQUU7d0JBQzNDLE9BQU87NEJBQ04sSUFBSSxFQUFFLElBQUk7NEJBQ1YsSUFBSSxFQUFFLE9BQU8sQ0FBQyxJQUFJLEdBQUcsUUFBUSxDQUFDLEdBQUcsR0FBRyxJQUFJOzRCQUN4QyxNQUFNLEVBQUUsT0FBTzs0QkFDZixLQUFLLEVBQUUsT0FBTyxDQUFDLEtBQUssR0FBRyxDQUFDO3lCQUN4QixDQUFDO29CQUNILENBQUMsQ0FBQyxDQUFDO29CQUNILFFBQVEsQ0FBQyxPQUFPLENBQUMsQ0FBQyxLQUFtQixFQUFFLEtBQWEsRUFBRSxFQUFFO3dCQUN2RCxLQUFLLENBQUMsV0FBVyxHQUFHLFFBQVEsQ0FBQyxLQUFLLEdBQUcsQ0FBQyxDQUFDLENBQUM7b0JBQ3pDLENBQUMsQ0FBQyxDQUFDO29CQUVILFlBQVksR0FBRyxRQUFRLENBQUMsQ0FBQyxDQUFDLElBQUksdUJBQXVCLENBQUMsT0FBTyxDQUFDLENBQUM7b0JBQy9ELG9CQUFvQixDQUFDLEVBQUUsSUFBSSxFQUFFLE9BQU8sQ0FBQyxJQUFJLEVBQUUsSUFBSSxFQUFFLFNBQVMsRUFBRSxDQUFDLENBQUM7Z0JBQy9ELENBQUMsQ0FBQztxQkFDRCxLQUFLLENBQUMsS0FBSyxDQUFDLENBQUM7WUFDaEIsQ0FBQztpQkFBTSxDQUFDO2dCQUNQLFlBQVksR0FBRyx1QkFBdUIsQ0FBQyxPQUFPLENBQUMsQ0FBQztnQkFDaEQsb0JBQW9CLENBQUMsRUFBRSxJQUFJLEVBQUUsT0FBTyxDQUFDLElBQUksRUFBRSxJQUFJLEVBQUUsU0FBUyxFQUFFLENBQUMsQ0FBQztZQUMvRCxDQUFDO1FBQ0YsQ0FBQyxDQUFDO2FBQ0QsS0FBSyxDQUFDLEtBQUssQ0FBQyxDQUFDO0lBQ2hCLENBQUMsQ0FBQztJQUNGLEVBQUUsQ0FBQyxPQUFPLENBQUMsR0FBRyxHQUFHLEVBQUU7UUFDbEIsSUFBSSxDQUFDLE9BQU8sRUFBRSxDQUFDO1lBQ2QsUUFBUSxFQUFFLENBQUM7UUFDWixDQUFDO0lBQ0YsQ0FBQyxDQUFDO0lBQ0YsT0FBTyxFQUFFLENBQUM7QUFDWCxDQUFDIn0= ;// ../core/dist/iterator.js /*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. @@ -167270,7 +172856,7 @@ function iterator_sync(from, options) { }, collectorSync); return new ArrayIterator(nodes); } - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaXRlcmF0b3IuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvaXRlcmF0b3IudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLElBQUksSUFBSSxjQUFjLEVBQUUsTUFBTSx3QkFBd0IsQ0FBQztBQUNoRSxPQUFPLEVBQVMsb0JBQW9CLEVBQWlDLGFBQWEsRUFBRSxNQUFNLGlCQUFpQixDQUFDO0FBQzVHLE9BQU8sRUFBRSxNQUFNLElBQUksT0FBTyxFQUFFLE1BQU0sb0JBQW9CLENBQUM7QUFFdkQsT0FBTyxFQUFFLGFBQWEsRUFBRSxNQUFNLHlCQUF5QixDQUFBO0FBRXZELE1BQU0sQ0FBQyxLQUFLLFVBQVUsS0FBSyxDQUFDLElBQVksRUFBRSxPQUFxQjtJQUM5RCxJQUFJLE9BQU8sSUFBSSxDQUFDLE9BQU8sQ0FBQyxNQUFNLEVBQUUsQ0FBQztRQUNoQyxJQUFJLE9BQU8sQ0FBQyxRQUFRLEVBQUUsQ0FBQztZQUN0QixPQUFPLENBQUMsTUFBTSxHQUFHLE9BQU8sQ0FBQyxJQUFJLEVBQUUsT0FBTyxDQUFDLFFBQVEsQ0FBQyxDQUFDO1FBQ2xELENBQUM7YUFBTSxDQUFDO1lBQ1AsT0FBTyxDQUFDLE1BQU0sR0FBRyxHQUFHLEVBQUUsQ0FBQyxJQUFJLENBQUM7UUFDN0IsQ0FBQztJQUNGLENBQUM7SUFDRCxNQUFNLGFBQWEsR0FBRyxVQUFVLElBQVksRUFBRSxJQUFXO1FBQ3hELElBQUksQ0FBQyxJQUFJLEVBQUUsQ0FBQztZQUNYLE9BQU87UUFDUixDQUFDO1FBQ0QsSUFBSSxPQUFPLENBQUMsTUFBTSxDQUFDLElBQUksQ0FBQyxFQUFFLENBQUM7WUFDMUIsS0FBSyxDQUFDLElBQUksQ0FBQztnQkFDVixJQUFJLEVBQUUsSUFBSTtnQkFDVixJQUFJLEVBQUUsSUFBSTtnQkFDVixNQUFNLEVBQUUsb0JBQW9CLENBQUMsU0FBUzthQUN0QyxDQUFDLENBQUM7UUFDSixDQUFDO0lBQ0YsQ0FBQyxDQUFDO0lBQ0YsTUFBTSxLQUFLLEdBQXNCLEVBQUUsQ0FBQztJQUNwQyxPQUFPLElBQUksT0FBTyxDQUFpQyxDQUFDLE9BQU8sRUFBRSxNQUFNLEVBQUUsRUFBRTtRQUN0RSxjQUFjLENBQUMsSUFBSSxFQUFFO1lBQ3BCLGNBQWMsRUFBRTtnQkFDZixJQUFJLEVBQUUsT0FBTyxDQUFDLENBQUMsQ0FBQyxPQUFPLENBQUMsS0FBSyxHQUFHLGFBQWEsQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxLQUFLO2dCQUN6RSxLQUFLLEVBQUUsT0FBTyxDQUFDLENBQUMsQ0FBQyxPQUFPLENBQUMsS0FBSyxHQUFHLGFBQWEsQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxLQUFLO2dCQUMzRSxRQUFRLEVBQUUsT0FBTyxDQUFDLENBQUMsQ0FBQyxPQUFPLENBQUMsS0FBSyxHQUFHLGFBQWEsQ0FBQyxRQUFRLENBQUMsQ0FBQyxDQUFDLEtBQUssQ0FBQyxDQUFDLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxJQUFJO2dCQUNoRixRQUFRLEVBQUUsT0FBTyxDQUFDLENBQUMsQ0FBQyxPQUFPLENBQUMsS0FBSyxHQUFHLGFBQWEsQ0FBQyxRQUFRLENBQUMsQ0FBQyxDQUFDLEtBQUssQ0FBQyxDQUFDLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxJQUFJO2dCQUNoRixJQUFJLEVBQUUsT0FBTyxDQUFDLENBQUMsQ0FBQyxPQUFPLENBQUMsS0FBSyxHQUFHLGFBQWEsQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxLQUFLO2FBQ3pFO1NBQ0QsRUFBRSxhQUFhLENBQUMsQ0FBQztRQUNsQixPQUFPLENBQUMsSUFBSSxhQUFhLENBQWtCLEtBQUssQ0FBQyxDQUFDLENBQUM7SUFDcEQsQ0FBQyxDQUFDLENBQUM7QUFDSixDQUFDO0FBR0QsTUFBTSxVQUFVLElBQUksQ0FBQyxJQUFZLEVBQUUsT0FBcUI7SUFDdkQsSUFBSSxPQUFPLElBQUksQ0FBQyxPQUFPLENBQUMsTUFBTSxFQUFFLENBQUM7UUFDaEMsSUFBSSxPQUFPLENBQUMsUUFBUSxFQUFFLENBQUM7WUFDdEIsT0FBTyxDQUFDLE1BQU0sR0FBRyxPQUFPLENBQUMsSUFBSSxFQUFFLE9BQU8sQ0FBQyxRQUFRLENBQUMsQ0FBQztRQUNsRCxDQUFDO2FBQU0sQ0FBQztZQUNQLE9BQU8sQ0FBQyxNQUFNLEdBQUcsR0FBRyxFQUFFLENBQUMsSUFBSSxDQUFDO1FBQzdCLENBQUM7SUFDRixDQUFDO0lBQ0QsTUFBTSxLQUFLLEdBQXNCLEVBQUUsQ0FBQztJQUVwQyxNQUFNLGFBQWEsR0FBRyxVQUFVLElBQVksRUFBRSxJQUFXO1FBQ3hELElBQUksQ0FBQyxJQUFJLEVBQUUsQ0FBQztZQUNYLE9BQU87UUFDUixDQUFDO1FBQ0QsSUFBSSxPQUFPLENBQUMsTUFBTSxDQUFDLElBQUksQ0FBQyxFQUFFLENBQUM7WUFDMUIsS0FBSyxDQUFDLElBQUksQ0FBQztnQkFDVixJQUFJLEVBQUUsSUFBSTtnQkFDVixJQUFJLEVBQUUsSUFBSTtnQkFDVixNQUFNLEVBQUUsb0JBQW9CLENBQUMsU0FBUzthQUN0QyxDQUFDLENBQUM7UUFDSixDQUFDO0lBQ0YsQ0FBQyxDQUFDO0lBRUYsY0FBYyxDQUFDLElBQUksRUFBRTtRQUNwQixjQUFjLEVBQUU7WUFDZixJQUFJLEVBQUUsT0FBTyxDQUFDLENBQUMsQ0FBQyxPQUFPLENBQUMsS0FBSyxHQUFHLGFBQWEsQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxLQUFLO1lBQ3pFLEtBQUssRUFBRSxPQUFPLENBQUMsQ0FBQyxDQUFDLE9BQU8sQ0FBQyxLQUFLLEdBQUcsYUFBYSxDQUFDLEtBQUssQ0FBQyxDQUFDLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLEtBQUs7WUFDM0UsUUFBUSxFQUFFLE9BQU8sQ0FBQyxDQUFDLENBQUMsT0FBTyxDQUFDLEtBQUssR0FBRyxhQUFhLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUMsSUFBSTtZQUNoRixRQUFRLEVBQUUsT0FBTyxDQUFDLENBQUMsQ0FBQyxPQUFPLENBQUMsS0FBSyxHQUFHLGFBQWEsQ0FBQyxRQUFRLENBQUMsQ0FBQyxDQUFDLEtBQUssQ0FBQyxDQUFDLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxJQUFJO1lBQ2hGLElBQUksRUFBRSxPQUFPLENBQUMsQ0FBQyxDQUFDLE9BQU8sQ0FBQyxLQUFLLEdBQUcsYUFBYSxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLEtBQUs7U0FDekU7S0FDRCxFQUFFLGFBQWEsQ0FBQyxDQUFDO0lBRWxCLE9BQU8sSUFBSSxhQUFhLENBQWtCLEtBQUssQ0FBQyxDQUFDO0FBQ2xELENBQUMifQ== ;// ../fs/dist/remove.js @@ -167602,7 +173188,7 @@ async function remove_async(path, options) { }); } } - +//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"remove.js","sourceRoot":"","sources":["../src/remove.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAA;AACtD,OAAO,EAAE,IAAI,IAAI,WAAW,EAAE,MAAM,cAAc,CAAA;AAClD,OAAO,EAAE,KAAK,IAAI,SAAS,EAAE,IAAI,IAAI,QAAQ,EAAE,MAAM,WAAW,CAAA;AAChE,OAAO,KAAK,QAAQ,MAAM,MAAM,CAAA;AAChC,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,IAAI,CAAA;AACzD,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAC5C,OAAO,EAAmC,cAAc,EAAE,MAAM,iBAAiB,CAAA;AACjF,OAAO,EAAE,MAAM,IAAI,OAAO,EAAE,MAAM,oBAAoB,CAAA;AACtD,OAAO,EAAqB,oBAAoB,EAA8B,YAAY,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAA;AAC3I,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AACzC,OAAO,EAAE,KAAK,IAAI,aAAa,EAAE,MAAM,eAAe,CAAA;AACtD,OAAO,EAAE,aAAa,EAAE,MAAM,aAAa,CAAA;AAE3C,MAAM,UAAU,aAAa,CAAC,UAAkB,EAAE,IAAY;IAC7D,MAAM,eAAe,GAAG,UAAU,GAAG,UAAU,CAAC;IAChD,gBAAgB,CAAC,eAAe,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC,CAAC;AAC1E,CAAC;AAED,MAAM,YAAY,GAAG,CAAC,OAAmB,EAAE,IAAY,EAAkB,EAAE;IAC1E,MAAM,IAAI,GAAmB,OAAO,IAAI,EAAoB,CAAC;IAC7D,MAAM,aAAa,GAAmB,EAAE,CAAC;IACzC,aAAa,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IACvC,aAAa,CAAC,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,CAAC;IACvD,aAAa,CAAC,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,CAAC;IACvD,aAAa,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;IACjC,aAAa,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IACvC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;QAClB,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YACnB,aAAa,CAAC,MAAM,GAAG,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;QACrD,CAAC;aAAM,CAAC;YACP,aAAa,CAAC,MAAM,GAAG,GAAG,EAAE;gBAC3B,OAAO,IAAI,CAAC;YACb,CAAC,CAAC;QACH,CAAC;IACF,CAAC;IACD,OAAO,aAAa,CAAC;AACtB,CAAC,CAAC;AAEF,4DAA4D;AAC5D,OAAO;AACP,4DAA4D;AAC5D,MAAM,UAAU,IAAI,CAAC,IAAY,EAAE,OAAwB;IAC1D,MAAM,aAAa,GAAG,WAAW,CAAC,IAAI,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;IAC5D,IAAI,aAAa,KAAK,SAAS,EAAE,CAAC;QACjC,sDAAsD;IACvD,CAAC;SAAM,IAAI,aAAa,CAAC,IAAI,KAAK,KAAK,EAAE,CAAC;QACzC,QAAQ,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE,EAAE;YACnC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;QACrC,CAAC,CAAC,CAAC;QACH,SAAS,CAAC,IAAI,CAAC,CAAC;IACjB,CAAC;SAAM,IAAI,aAAa,CAAC,IAAI,KAAK,MAAM,IAAI,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;QAC9E,UAAU,CAAC,IAAI,CAAC,CAAC;IAClB,CAAC;SAAM,CAAC;QACP,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC;IAC5B,CAAC;AACF,CAAC;AAED,4DAA4D;AAC5D,QAAQ;AACR,4DAA4D;AAC5D,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,OAAuB,EAAO,EAAE;IAC9D,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,CAAC,IAAI,EAAE,CAAC,GAAmB,EAAE,EAAE;YACpC,IAAI,CAAC,GAAG,EAAE,CAAC;gBACV,OAAO,EAAE,CAAC;YACX,CAAC;iBAAM,CAAC;gBACP,MAAM,CAAC,GAAG,CAAC,CAAC;YACb,CAAC;QACF,CAAC,CAAC,CAAC;IAEJ,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC;AAWF,MAAM,MAAM,GAAG,CAAC,KAAwB,EAAE,EAAE;IAC3C,IAAI,IAAI,GAAG,IAAI,CAAC;IAChB,KAAK,CAAC,OAAO,CAAC,CAAC,OAAwB,EAAE,EAAE;QAC1C,IAAI,OAAO,CAAC,MAAM,KAAK,oBAAoB,CAAC,IAAI,EAAE,CAAC;YAClD,IAAI,GAAG,KAAK,CAAC;QACd,CAAC;IACF,CAAC,CAAC,CAAC;IACH,OAAO,IAAI,CAAC;AACb,CAAC,CAAC;AACF,MAAM,IAAI,GAAG,CAAC,KAAwB,EAAmB,EAAE;IAC1D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACvC,IAAI,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,oBAAoB,CAAC,SAAS,EAAE,CAAC;YACxD,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC;QACjB,CAAC;IACF,CAAC;IACD,OAAO,IAAI,CAAC;AACb,CAAC,CAAC;AACF,8DAA8D;AAC9D,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,OAAuB,EAAE,QAA2B,EAA4B,EAAE;IACnH,QAAQ,QAAQ,CAAC,SAAS,EAAE,CAAC;QAC5B,KAAK,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC;YACzB,MAAM,aAAa,CAAC,IAAI,CAAC,CAAC;QAC3B,CAAC;QACD,KAAK,YAAY,CAAC,SAAS,CAAC;QAC5B,KAAK,YAAY,CAAC,MAAM,CAAC;QACzB,KAAK,YAAY,CAAC,QAAQ,CAAC;QAC3B,KAAK,YAAY,CAAC,KAAK,CAAC;QACxB,KAAK,YAAY,CAAC,eAAe,CAAC;QAClC,KAAK,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC;YACxB,OAAO,QAAQ,CAAC,SAAS,CAAC;QAC3B,CAAC;QACD,OAAO,CAAC,CAAC,CAAC;YACT,OAAO,SAAS,CAAC;QAClB,CAAC;IACF,CAAC;AACF,CAAC,CAAC;AACF,MAAM,UAAU,eAAe,CAAC,IAAY,EAAE,WAAyB;IACtE,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;QAC/B,OAAO,IAAI,CAAC;IACb,CAAC;IACD,IAAI,WAAW,KAAK,YAAY,CAAC,IAAI,EAAE,CAAC;QACvC,OAAO,KAAK,CAAC;IACd,CAAC;SAAM,IAAI,WAAW,KAAK,YAAY,CAAC,KAAK,EAAE,CAAC;QAC/C,OAAO,KAAK,CAAC;IACd,CAAC;SAAM,IAAI,WAAW,KAAK,YAAY,CAAC,KAAK,EAAE,CAAC;QAC/C,OAAO,IAAI,CAAC;IACb,CAAC;IACD,OAAO,KAAK,CAAC;AACd,CAAC;AACD,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,IAAkB,EAAE,IAAqB,EAAiB,EAAE;IAC1F,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;IAE7B,IAAI,CAAC,IAAI,EAAE,CAAC;QACX,OAAO;IACR,CAAC;IACD,IAAI,CAAC,MAAM,GAAG,oBAAoB,CAAC,UAAU,CAAC;IAC9C,MAAM,IAAI,GAAG,GAAG,EAAE;QACjB,IAAI,CAAC,MAAM,GAAG,oBAAoB,CAAC,IAAI,CAAC;QACxC,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;YACxB,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAClC,CAAC;aAAM,CAAC;YACP,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;gBACvB,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACnC,IAAI,SAAS,EAAE,CAAC;oBACf,OAAO,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC;gBAC1C,CAAC;qBAAM,CAAC;oBACP,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;gBAC3B,CAAC;YACF,CAAC;QAEF,CAAC;IACF,CAAC,CAAC;IACF,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;QACxB,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAClC,CAAC;IACD,IAAI,CAAC,eAAe,IAAI,CAAC,CAAC;IAC1B,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC;SACpB,IAAI,CAAC,CAAC,GAAQ,EAAE,EAAE;QAClB,IAAI,EAAE,CAAC;IACR,CAAC,CAAC;SACD,KAAK,CAAC,CAAC,GAAmB,EAAE,EAAE;QAC9B,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;YAExG,MAAM,QAAQ,GAAG,CAAC,QAA2B,EAAE,EAAE;gBAChD,QAAQ,CAAC,KAAK,GAAG,GAAG,CAAC,IAAI,CAAC;gBAC1B,mBAAmB;gBACnB,IAAI,QAAQ,IAAI,OAAO,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC;oBAChF,IAAI,CAAC,MAAwB,CAAC,IAAI,CAAC;wBACnC,KAAK,EAAE,QAAQ,CAAC,KAAK;wBACrB,IAAI,EAAE,IAAI;wBACV,QAAQ,EAAE,QAAQ;qBACH,CAAC,CAAC;gBACnB,CAAC;gBACD,IAAI,QAAQ,EAAE,CAAC;oBACd,gFAAgF;oBAChF,wCAAwC;oBACxC,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,KAAK,QAAQ,CAAC,MAAM,CAAC;oBACjD,IAAI,MAAM,EAAE,CAAC;wBACZ,OAAO,CAAC,gBAAgB,GAAG,QAAQ,CAAC;oBACrC,CAAC;oBAED,IAAI,GAAG,GAAG,QAAQ,CAAC,SAAS,CAAC;oBAC7B,GAAG,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;oBAC/C,IAAI,GAAG,KAAK,YAAY,CAAC,KAAK,EAAE,CAAC;wBAChC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;oBACnB,CAAC;oBACD,IAAI,IAAI,CAAC,KAAK,EAAE,CAAC;wBAChB,IAAI,EAAE,CAAC;wBACP,OAAO;oBACR,CAAC;oBACD,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;wBACtC,IAAI,EAAE,CAAC;wBACP,OAAO;oBACR,CAAC;oBACD,IAAI,CAAC,MAAM,GAAG,oBAAoB,CAAC,OAAO,CAAC;oBAC3C,IAAI,QAAQ,CAAC,SAAS,KAAK,YAAY,CAAC,KAAK,EAAE,CAAC;wBAC/C,IAAI,CAAC,MAAM,GAAG,oBAAoB,CAAC,SAAS,CAAC;wBAC7C,OAAO,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;oBAC3B,CAAC;gBACF,CAAC;YACF,CAAC,CAAC;YAEF,IAAI,CAAC,OAAO,CAAC,gBAAgB,EAAE,CAAC;gBAC/B,MAAM,OAAO,GAAG,OAAO,CAAC,gBAAgB,CAAC,IAAI,EAAE,UAAU,CAAC,IAAI,CAAC,EAAE,GAAG,CAAC,IAAI,CAAC,CAAC;gBAC3E,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YACxB,CAAC;iBAAM,CAAC;gBACP,QAAQ,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC;YACpC,CAAC;QACF,CAAC;IACF,CAAC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF,KAAK,UAAU,OAAO,CAAC,IAAY,EAAE,OAAwB;IAC5D,OAAO,IAAI,OAAO,CAAoB,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACzD,MAAM,GAAG,GAAsB,EAAE,CAAC;QAClC,aAAa,CAAC,IAAI,EAAE;YACnB,MAAM,EAAE,OAAO,CAAC,MAAM;SACtB,CAAC,CAAC,IAAI,CAAC,CAAC,EAAM,EAAE,EAAE;YAClB,IAAI,IAAqB,CAAC;YAC1B,OAAO,IAAI,GAAG,EAAE,CAAC,IAAI,EAAS,EAAE,CAAC;gBAChC,GAAG,CAAC,IAAI,CAAC;oBACR,IAAI,EAAE,IAAI,CAAC,IAAI;oBACf,IAAI,EAAE,IAAI,CAAC,IAAI;oBACf,MAAM,EAAE,oBAAoB,CAAC,SAAS;iBACtC,CAAC,CAAC;YACJ,CAAC;YACD,OAAO,CAAC,GAAG,CAAC,CAAC;QACd,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAU,EAAE,EAAE;YACvB,OAAO,CAAC,KAAK,CAAC,YAAY,EAAE,GAAG,CAAC,CAAC;QAClC,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC,CAAC;AACJ,CAAC;AACD,MAAM,CAAC,KAAK,UAAU,KAAK,CAAC,IAAY,EAAE,OAAwB;IACjE,OAAO,GAAG,YAAY,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;IACtC,MAAM,OAAO,GAAG,CAAC,GAAmB,EAAE,OAAY,EAAE,MAAW,EAAE,KAAyB,EAAE,EAAE;QAC7F,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;YAC/E,MAAM,OAAO,GAAG,GAAG,EAAE;gBACpB,qCAAqC;gBACrC,uCAAuC;gBACvC,SAAS,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,kBAA4B,EAAE,EAAE;oBACrD,MAAM,QAAQ,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC,QAAgB,EAAE,EAAE;wBAC5D,OAAO,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,EAAE,OAAO,CAAC,CAAC;oBACtD,CAAC,CAAC,CAAC;oBACH,OAAO,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;gBAC9B,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE;oBACZ,gDAAgD;oBAChD,gDAAgD;oBAChD,OAAO,KAAK,CAAC,IAAI,EAAE,CAAC,IAAoB,EAAE,EAAE;wBAC3C,IAAI,IAAI,EAAE,CAAC;4BACV,MAAM,CAAC,IAAI,CAAC,CAAC;wBACd,CAAC;oBACF,CAAC,CAAC,CAAC;gBACJ,CAAC,CAAC;qBACA,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YACzB,CAAC,CAAC;YACF,oCAAoC;YACpC,iCAAiC;YACjC,IAAI,OAAO,CAAC,gBAAgB,EAAE,CAAC;gBAC9B,MAAM,MAAM,GAAkB,KAAK,CAAC,CAAC;gBACrC,mBAAmB;gBACnB,MAAM,WAAW,GAAiB;oBACjC,OAAO,EAAE,OAAO;oBAChB,MAAM,EAAE,MAAM;oBACd,KAAK,EAAE,KAAK;oBACZ,eAAe,EAAE,CAAC;oBAClB,eAAe,EAAE,IAAI;oBACrB,OAAO,EAAE,OAAO;oBAChB,MAAM,EAAE,MAAM;oBACd,KAAK,EAAE,KAAK,IAAI,EAAE;iBAClB,CAAC;gBAEF,MAAM,OAAO,GAAG,GAAG,EAAE;oBACpB,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;oBAC1B,IAAI,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC;wBACnB,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC;oBACxB,CAAC;oBACD,IAAI,KAAK,CAAC,MAAM,EAAE,CAAC;wBAClB,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;wBACzB,IAAI,IAAI,EAAE,CAAC;4BACV,OAAO,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,IAAI,CAAC,CAAC;wBACvC,CAAC;oBACF,CAAC;gBACF,CAAC,CAAC;gBACF,IAAI,CAAC,KAAK,EAAE,CAAC;oBACZ,MAAM,MAAM,GAAG,WAAW,CAAC,KAAK,CAAC;oBACjC,aAAa,CAAC,IAAI,EAAE;wBACnB,MAAM,EAAE,OAAO,CAAC,MAAM;qBACtB,CAAC,CAAC,IAAI,CAAC,CAAC,EAAO,EAAE,EAAE;wBACnB,IAAI,IAAqB,CAAC;wBAC1B,OAAO,IAAI,GAAG,EAAE,CAAC,IAAI,EAAS,EAAE,CAAC;4BAChC,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,IAAI,CAAC,IAAI;gCACf,IAAI,EAAE,IAAI,CAAC,IAAI;gCACf,MAAM,EAAE,oBAAoB,CAAC,SAAS;6BACtC,CAAC,CAAC;wBACJ,CAAC;wBACD,OAAO,EAAE,CAAC;oBACX,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAW,EAAE,EAAE;wBACxB,OAAO,CAAC,KAAK,CAAC,YAAY,EAAE,IAAI,CAAC,CAAC;oBACnC,CAAC,CAAC,CAAC;gBACJ,CAAC;qBAAM,CAAC;oBACP,OAAO,EAAE,CAAC;gBACX,CAAC;YACF,CAAC;iBAAM,CAAC;gBACP,OAAO,EAAE,CAAC;YACX,CAAC;QACF,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;YAClC,0CAA0C;YAC1C,OAAO,EAAE,CAAC;QACX,CAAC;aAAM,CAAC;YACP,yDAAyD;YACzD,MAAM,CAAC,GAAG,CAAC,CAAC;QACb,CAAC;IACF,CAAC,CAAC;IAEF,iDAAiD;IACjD,sEAAsE;IACtE,oBAAoB;IACpB,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;QACtB,MAAM,KAAK,GAAG,MAAM,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,MAAM,GAAG,GAAG,IAAI,cAAc,CAAC,OAAO,CAAC,CAAC;QACxC,GAAG,CAAC,IAAI,GAAG,WAAW,CAAC;QACvB,OAAO,IAAI,OAAO,CAAgB,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrD,OAAO,CAAC,GAAG,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,CAAC;QACtC,CAAC,CAAC,CAAC;IACJ,CAAC;SAAM,CAAC;QACP,OAAO,IAAI,OAAO,CAAgB,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrD,oEAAoE;YACpE,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC;iBACpB,IAAI,CAAC,CAAC,GAAQ,EAAE,EAAE,CAAC,OAAO,EAAE,CAAC;iBAC7B,KAAK,CAAC,CAAC,GAAmB,EAAE,EAAE;gBAC9B,OAAO,CAAC,GAAG,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;YAC/B,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACJ,CAAC;AACF,CAAC"} ;// ../fs/dist/utils/mode.js // Converts mode to string 3 characters long. const normalizeFileMode = (mode) => { @@ -167615,7 +173201,7 @@ const normalizeFileMode = (mode) => { } return modeAsString.substring(modeAsString.length - 3); }; - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibW9kZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy91dGlscy9tb2RlLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLDZDQUE2QztBQUM3QyxNQUFNLENBQUMsTUFBTSxpQkFBaUIsR0FBRyxDQUFDLElBQXFCLEVBQVUsRUFBRTtJQUNsRSxJQUFJLFlBQW9CLENBQUM7SUFDekIsSUFBSSxPQUFPLElBQUksS0FBSyxRQUFRLEVBQUUsQ0FBQztRQUM5QixZQUFZLEdBQUcsSUFBSSxDQUFDLFFBQVEsQ0FBQyxDQUFDLENBQUMsQ0FBQztJQUNqQyxDQUFDO1NBQU0sQ0FBQztRQUNQLFlBQVksR0FBRyxJQUFJLENBQUM7SUFDckIsQ0FBQztJQUNELE9BQU8sWUFBWSxDQUFDLFNBQVMsQ0FBQyxZQUFZLENBQUMsTUFBTSxHQUFHLENBQUMsQ0FBQyxDQUFDO0FBQ3hELENBQUMsQ0FBQyJ9 ;// ../fs/dist/dir.js @@ -167802,7 +173388,7 @@ const dir_async = (path, passedCriteria) => { .then(resolve, reject); }); }; - +//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"dir.js","sourceRoot":"","sources":["../src/dir.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,SAAS,CAAA;AAC7B,OAAO,KAAK,QAAQ,MAAM,WAAW,CAAC;AACtC,OAAO,EAAS,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,OAAO,EAAE,MAAM,IAAI,CAAA;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,MAAM,CAAA;AAChC,OAAO,EAAE,IAAI,IAAI,UAAU,EAAE,KAAK,IAAI,WAAW,EAAE,MAAM,aAAa,CAAA;AACtE,OAAO,EAAE,iBAAiB,IAAI,QAAQ,EAAE,MAAM,iBAAiB,CAAA;AAC/D,OAAO,EAAE,gBAAgB,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAA;AACvE,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAC5C,OAAO,EAAE,MAAM,EAAE,MAAM,iBAAiB,CAAA;AACxC,OAAO,EAAE,IAAI,IAAI,MAAM,EAAE,MAAM,QAAQ,CAAA;AAOvC,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,UAAkB,EAAE,IAAY,EAAE,OAAkB,EAAE,EAAE;IACrF,MAAM,eAAe,GAAG,UAAU,GAAG,oBAAoB,CAAC;IAC1D,gBAAgB,CAAC,eAAe,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC5D,eAAe,CAAC,eAAe,EAAE,UAAU,EAAE,OAAO,EAAE;QACrD,KAAK,EAAE,CAAC,SAAS,CAAC;QAClB,IAAI,EAAE,CAAC,QAAQ,EAAE,QAAQ,CAAC;KAC1B,CAAC,CAAC;AACJ,CAAC,CAAC;AAEF,MAAM,QAAQ,GAAG,CAAC,OAAkB,EAAY,EAAE;IACjD,MAAM,MAAM,GAAG,OAAO,IAAI,EAAE,CAAC;IAC7B,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,SAAS,EAAE,CAAC;QACvC,MAAM,CAAC,KAAK,GAAG,KAAK,CAAC;IACtB,CAAC;IACD,IAAI,MAAM,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;QAC/B,MAAM,CAAC,IAAI,GAAG,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;IACrC,CAAC;IACD,OAAO,MAAM,CAAC;AACf,CAAC,CAAC;AAEF,4DAA4D;AAC5D,OAAO;AACP,4DAA4D;AAC5D,MAAM,YAAY,GAAG,CAAC,IAAY,EAAS,EAAE;IAC5C,IAAI,KAAY,CAAC;IACjB,IAAI,CAAC;QACJ,KAAK,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC;IACxB,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACd,mCAAmC;QACnC,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClC,MAAM,GAAG,CAAC;QACX,CAAC;IACF,CAAC;IAED,IAAI,KAAK,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;QACnC,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC;IAC5B,CAAC;IAED,OAAO,KAAK,CAAC;AACd,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,IAAY,EAAE,QAAkB;IAClD,MAAM,CAAC,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,CAAC,IAAc,EAAE,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC;AAC3D,CAAC;AAED,SAAS,YAAY,CAAC,IAAY,EAAE,KAAY,EAAE,OAAiB;IAClE,MAAM,KAAK,GAAG;QACb,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAChC,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;QAClC,CAAC;IACF,CAAC,CAAC;IACF,MAAM,cAAc,GAAG;QACtB,IAAI,IAAc,CAAC;QACnB,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;YACnB,0CAA0C;YAC1C,IAAI,GAAG,WAAW,CAAC,IAAI,CAAC,CAAC;YACzB,IAAI,CAAC,OAAO,CAAC,UAAU,QAAQ;gBAC9B,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;QACJ,CAAC;IACF,CAAC,CAAC;IACF,KAAK,EAAE,CAAC;IACR,cAAc,EAAE,CAAC;AAClB,CAAC;AAED,MAAM,CAAC,MAAM,IAAI,GAAG,CAAC,IAAY,EAAE,OAAkB,EAAE,EAAE;IACxD,MAAM,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;IACnC,MAAM,KAAK,GAAG,YAAY,CAAC,IAAI,CAAC,CAAC;IACjC,IAAI,KAAK,EAAE,CAAC;QACX,YAAY,CAAC,IAAI,EAAE,KAAK,EAAE,QAAQ,CAAC,CAAC;IACrC,CAAC;SAAM,CAAC;QACP,SAAS,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IAC3B,CAAC;AACF,CAAC,CAAC;AAEF,4DAA4D;AAC5D,QAAQ;AACR,4DAA4D;AAC5D,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,CAAC;AACrC,MAAM,eAAe,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;AAC3C,MAAM,YAAY,GAAG,CAAC,IAAY,EAAkB,EAAE;IACrD,OAAO,IAAI,OAAO,CAAQ,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC7C,YAAY,CAAC,IAAI,CAAC;aAChB,IAAI,CAAC,CAAC,KAAU,EAAE,EAAE;YACpB,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;gBACzB,OAAO,CAAC,KAAK,CAAC,CAAC;YAChB,CAAC;iBAAM,CAAC;gBACP,MAAM,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC;YAC9B,CAAC;QACF,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,GAAQ,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,IAAI,KAAK,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IAE1F,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC;AAEF,yDAAyD;AACzD,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,EAAE;IACnC,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,eAAe,CAAC,IAAI,CAAC;aACnB,IAAI,CAAC,UAAU,IAAW;YAC1B,MAAM,KAAK,GAAG,UAAU,KAAa;gBACpC,IAAI,OAAe,CAAC;gBACpB,IAAI,KAAK,KAAK,IAAI,CAAC,MAAM,EAAE,CAAC;oBAC3B,OAAO,CAAC,CAAC,CAAC,CAAC;gBACZ,CAAC;qBAAM,CAAC;oBACP,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC;oBAC9C,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC;gBACnD,CAAC;YACF,CAAC,CAAC;YACF,KAAK,CAAC,CAAC,CAAC,CAAC;QACV,CAAC,CAAC;aACD,KAAK,CAAC,MAAM,CAAC,CAAC;IACjB,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC;AAEF,MAAM,SAAS,GAAG,UAAU,QAAkB,EAAE,KAAY,EAAE,IAAY;IACzE,IAAI,QAAQ,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;QACjC,OAAO,SAAS,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE,QAAQ,CAAC,IAAI,CAAC,CAAC;IACjD,CAAC;IACD,OAAO,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC,CAAC;AAEF,MAAM,aAAa,GAAG,CAAC,IAAY,EAAE,KAAY,EAAE,OAAiB,EAAE,EAAE;IACvE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,MAAM,cAAc,GAAG;YACtB,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;gBACnB,OAAO,UAAU,CAAC,IAAI,CAAC,CAAC;YACzB,CAAC;YACD,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;QAC1B,CAAC,CAAC;QACF,SAAS,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC;aAC7B,IAAI,CAAC,cAAc,CAAC;aACpB,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACzB,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC;AAEF,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,QAAkB,EAAgB,EAAE;IACrE,MAAM,OAAO,GAAG,QAAQ,IAAI,EAAE,CAAC;IAC/B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,SAAS,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC;aACrC,IAAI,CAAC,OAAO,CAAC;aACb,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;YACd,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC3B,2DAA2D;gBAC3D,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,OAAO,CAAC;qBACzC,IAAI,CAAC,GAAG,EAAE;oBACV,qCAAqC;oBACrC,OAAO,SAAS,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;gBAChD,CAAC,CAAC;qBACD,IAAI,CAAC,OAAO,CAAC;qBACb,KAAK,CAAC,CAAC,IAAI,EAAE,EAAE;oBACf,IAAI,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;wBAC5B,2DAA2D;wBAC3D,qBAAqB;wBACrB,OAAO,CAAC,CAAC,CAAC,CAAC;oBACZ,CAAC;yBAAM,CAAC;wBACP,MAAM,CAAC,IAAI,CAAC,CAAC;oBACd,CAAC;gBACF,CAAC,CAAC,CAAC;YACL,CAAC;iBAAM,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAClC,uCAAuC;gBACvC,OAAO,CAAC,CAAC,CAAC,CAAC;YACZ,CAAC;iBAAM,CAAC;gBACP,MAAM,CAAC,GAAG,CAAC,CAAC;YACb,CAAC;QACF,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAAG,CAAC,IAAY,EAAE,cAAyB,EAAE,EAAE;IAChE,MAAM,QAAQ,GAAG,QAAQ,CAAC,cAAc,CAAC,CAAC;IAC1C,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,YAAY,CAAC,IAAI,CAAC;aAChB,IAAI,CAAC,CAAC,KAAY,EAAE,EAAE;YACtB,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;gBACzB,OAAO,aAAa,CAAC,IAAI,EAAE,KAAK,EAAE,QAAQ,CAAC,CAAC;YAC7C,CAAC;YACD,OAAO,UAAU,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;QACnC,CAAC,CAAC;aACD,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACzB,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC"} ;// ./node_modules/openai/internal/qs/formats.mjs const default_format = 'RFC3986'; const formatters = { @@ -168307,3952 +173893,6 @@ function stringify(object, opts = {}) { return joined.length > 0 ? prefix + joined : ''; } //# sourceMappingURL=stringify.mjs.map -;// ./node_modules/openai/version.mjs -const VERSION = '4.87.4'; // x-release-please-version -//# sourceMappingURL=version.mjs.map -;// ./node_modules/openai/_shims/registry.mjs -let auto = false; -let kind = undefined; -let registry_fetch = undefined; -let registry_Request = (/* unused pure expression or super */ null && (undefined)); -let registry_Response = (/* unused pure expression or super */ null && (undefined)); -let Headers = (/* unused pure expression or super */ null && (undefined)); -let registry_FormData = undefined; -let registry_Blob = (/* unused pure expression or super */ null && (undefined)); -let File = undefined; -let registry_ReadableStream = undefined; -let registry_getMultipartRequestOptions = undefined; -let getDefaultAgent = undefined; -let fileFromPath = undefined; -let isFsReadStream = undefined; -function setShims(shims, options = { auto: false }) { - if (auto) { - throw new Error(`you must \`import 'openai/shims/${shims.kind}'\` before importing anything else from openai`); - } - if (kind) { - throw new Error(`can't \`import 'openai/shims/${shims.kind}'\` after \`import 'openai/shims/${kind}'\``); - } - auto = options.auto; - kind = shims.kind; - registry_fetch = shims.fetch; - registry_Request = shims.Request; - registry_Response = shims.Response; - Headers = shims.Headers; - registry_FormData = shims.FormData; - registry_Blob = shims.Blob; - File = shims.File; - registry_ReadableStream = shims.ReadableStream; - registry_getMultipartRequestOptions = shims.getMultipartRequestOptions; - getDefaultAgent = shims.getDefaultAgent; - fileFromPath = shims.fileFromPath; - isFsReadStream = shims.isFsReadStream; -} -//# sourceMappingURL=registry.mjs.map -// EXTERNAL MODULE: external "stream" -var external_stream_ = __webpack_require__(2203); -// EXTERNAL MODULE: external "http" -var external_http_ = __webpack_require__(58611); -// EXTERNAL MODULE: ./node_modules/whatwg-url/lib/public-api.js -var public_api = __webpack_require__(53417); -// EXTERNAL MODULE: external "https" -var external_https_ = __webpack_require__(65692); -// EXTERNAL MODULE: external "zlib" -var external_zlib_ = __webpack_require__(43106); -;// ./node_modules/node-fetch/lib/index.mjs - - - - - - - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const lib_Readable = external_stream_.Readable; - -const lib_BUFFER = Symbol('buffer'); -const lib_TYPE = Symbol('type'); - -class lib_Blob { - constructor() { - this[lib_TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof lib_Blob) { - buffer = element[lib_BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } - - this[lib_BUFFER] = Buffer.concat(buffers); - - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[lib_TYPE] = type; - } - } - get size() { - return this[lib_BUFFER].length; - } - get type() { - return this[lib_TYPE]; - } - text() { - return Promise.resolve(this[lib_BUFFER].toString()); - } - arrayBuffer() { - const buf = this[lib_BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new lib_Readable(); - readable._read = function () {}; - readable.push(this[lib_BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; - - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - - const buffer = this[lib_BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new lib_Blob([], { type: arguments[2] }); - blob[lib_BUFFER] = slicedBuffer; - return blob; - } -} - -Object.defineProperties(lib_Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(lib_Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; - -let convert; -try { - convert = require('encoding').convert; -} catch (e) {} - -const INTERNALS = Symbol('Body internals'); - -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = external_stream_.PassThrough; - -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; - - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; - - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof external_stream_) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - - if (body instanceof external_stream_) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} - -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, - - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new lib_Blob([], { - type: ct.toLowerCase() - }), { - [lib_BUFFER]: buf - }); - }); - }, - - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; - - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, - - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, - - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; - -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); - -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; - -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; - - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } - - this[INTERNALS].disturbed = true; - - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - - let body = this.body; - - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is blob - if (isBlob(body)) { - body = body.stream(); - } - - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } - - // istanbul ignore if: should never happen - if (!(body instanceof external_stream_)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } - - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } - - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - - accumBytes += chunk.length; - accum.push(chunk); - }); - - body.on('end', function () { - if (abort) { - return; - } - - clearTimeout(resTimeout); - - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} - -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } - - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); - - // html5 - if (!res && str) { - res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; - - this[MAP] = Object.create(null); - - if (init instanceof lib_Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); - - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } - - return; - } - - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } - - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } - - return this[MAP][key].join(', '); - } - - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } - - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } - - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } - - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } - - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } - - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -lib_Headers.prototype.entries = lib_Headers.prototype[Symbol.iterator]; - -Object.defineProperty(lib_Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(lib_Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); - -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} - -const INTERNAL = Symbol('internal'); - -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} - -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } - - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; - - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } - - this[INTERNAL].index = index + 1; - - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); - -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } - - return obj; -} - -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new lib_Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} - -const INTERNALS$1 = Symbol('Response internals'); - -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = external_http_.STATUS_CODES; - -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class lib_Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - Body.call(this, body, opts); - - const status = opts.status || 200; - const headers = new lib_Headers(opts.headers); - - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } - - get url() { - return this[INTERNALS$1].url || ''; - } - - get status() { - return this[INTERNALS$1].status; - } - - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } - - get redirected() { - return this[INTERNALS$1].counter > 0; - } - - get statusText() { - return this[INTERNALS$1].statusText; - } - - get headers() { - return this[INTERNALS$1].headers; - } - - /** - * Clone this response - * - * @return Response - */ - clone() { - return new lib_Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} - -Body.mixIn(lib_Response.prototype); - -Object.defineProperties(lib_Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); - -Object.defineProperty(lib_Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); - -const INTERNALS$2 = Symbol('Request internals'); -const lib_URL = external_url_.URL || public_api.URL; - -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = external_url_.parse; -const format_url = external_url_.format; - -/** - * Wrapper around `new URL` to handle arbitrary URLs - * - * @param {string} urlStr - * @return {void} - */ -function parseURL(urlStr) { - /* - Check whether the URL is absolute or not - Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 - Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 - */ - if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { - urlStr = new lib_URL(urlStr).toString(); - } - - // Fallback to old implementation for arbitrary URLs - return parse_url(urlStr); -} - -const streamDestructionSupported = "destroy" in external_stream_.Readable.prototype; - -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} - -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} - -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class lib_Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parseURL(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parseURL(`${input}`); - } - input = {}; - } else { - parsedURL = parseURL(input.url); - } - - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } - - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); - - const headers = new lib_Headers(init.headers || input.headers || {}); - - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } - - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } - - get method() { - return this[INTERNALS$2].method; - } - - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } - - get headers() { - return this[INTERNALS$2].headers; - } - - get redirect() { - return this[INTERNALS$2].redirect; - } - - get signal() { - return this[INTERNALS$2].signal; - } - - /** - * Clone this request - * - * @return Request - */ - clone() { - return new lib_Request(this); - } -} - -Body.mixIn(lib_Request.prototype); - -Object.defineProperty(lib_Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(lib_Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); - -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new lib_Headers(request[INTERNALS$2].headers); - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } - - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } - - if (request.signal && request.body instanceof external_stream_.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } - - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} - -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ - -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); - - this.type = 'aborted'; - this.message = message; - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; - -const URL$1 = external_url_.URL || public_api.URL; - -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = external_stream_.PassThrough; - -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; - - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; - -/** - * isSameProtocol reports whether the two provided URLs use the same protocol. - * - * Both domains must already be in canonical form. - * @param {string|URL} original - * @param {string|URL} destination - */ -const isSameProtocol = function isSameProtocol(destination, original) { - const orig = new URL$1(original).protocol; - const dest = new URL$1(destination).protocol; - - return orig === dest; -}; - -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function lib_fetch(url, opts) { - - // allow custom promise - if (!lib_fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - - Body.Promise = lib_fetch.Promise; - - // wrap http.request into fetch - return new lib_fetch.Promise(function (resolve, reject) { - // build request object - const request = new lib_Request(url, opts); - const options = getNodeRequestOptions(request); - - const send = (options.protocol === 'https:' ? external_https_ : external_http_).request; - const signal = request.signal; - - let response = null; - - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof external_stream_.Readable) { - destroyStream(request.body, error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - - if (signal && signal.aborted) { - abort(); - return; - } - - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; - - // send request - const req = send(options); - let reqTimeout; - - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } - - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } - - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - - if (response && response.body) { - destroyStream(response.body, err); - } - - finalize(); - }); - - fixResponseChunkedTransferBadEnding(req, function (err) { - if (signal && signal.aborted) { - return; - } - - if (response && response.body) { - destroyStream(response.body, err); - } - }); - - /* c8 ignore next 18 */ - if (parseInt(process.version.substring(1)) < 14) { - // Before Node.js 14, pipeline() does not fully support async iterators and does not always - // properly handle when the socket close/end events are out of order. - req.on('socket', function (s) { - s.addListener('close', function (hadError) { - // if a data listener is still present we didn't end cleanly - const hasDataListener = s.listenerCount('data') > 0; - - // if end happened before close but the socket didn't emit an error, do it now - if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - response.body.emit('error', err); - } - }); - }); - } - - req.on('response', function (res) { - clearTimeout(reqTimeout); - - const headers = createHeadersLenient(res.headers); - - // HTTP fetch step 5 - if (lib_fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - - // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } - - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } - - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new lib_Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; - - if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } - - // HTTP-redirect fetch step 15 - resolve(lib_fetch(new lib_Request(locationURL, requestOpts))); - finalize(); - return; - } - } - - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new lib_Response(body, response_options); - resolve(response); - return; - } - - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: external_zlib_.Z_SYNC_FLUSH, - finishFlush: external_zlib_.Z_SYNC_FLUSH - }; - - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(external_zlib_.createGunzip(zlibOptions)); - response = new lib_Response(body, response_options); - resolve(response); - return; - } - - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(external_zlib_.createInflate()); - } else { - body = body.pipe(external_zlib_.createInflateRaw()); - } - response = new lib_Response(body, response_options); - resolve(response); - }); - raw.on('end', function () { - // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. - if (!response) { - response = new lib_Response(body, response_options); - resolve(response); - } - }); - return; - } - - // for br - if (codings == 'br' && typeof external_zlib_.createBrotliDecompress === 'function') { - body = body.pipe(external_zlib_.createBrotliDecompress()); - response = new lib_Response(body, response_options); - resolve(response); - return; - } - - // otherwise, use response as-is - response = new lib_Response(body, response_options); - resolve(response); - }); - - writeToStream(req, request); - }); -} -function fixResponseChunkedTransferBadEnding(request, errorCallback) { - let socket; - - request.on('socket', function (s) { - socket = s; - }); - - request.on('response', function (response) { - const headers = response.headers; - - if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { - response.once('close', function (hadError) { - // tests for socket presence, as in some situations the - // the 'socket' event is not triggered for the request - // (happens in deno), avoids `TypeError` - // if a data listener is still present we didn't end cleanly - const hasDataListener = socket && socket.listenerCount('data') > 0; - - if (hasDataListener && !hadError) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - errorCallback(err); - } - }); - } - }); -} - -function destroyStream(stream, err) { - if (stream.destroy) { - stream.destroy(err); - } else { - // node < 8 - stream.emit('error', err); - stream.end(); - } -} - -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -lib_fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -lib_fetch.Promise = global.Promise; - -/* harmony default export */ const node_fetch_lib = (lib_fetch); - - -// EXTERNAL MODULE: ./node_modules/formdata-node/lib/esm/File.js -var esm_File = __webpack_require__(71831); -// EXTERNAL MODULE: ./node_modules/formdata-node/lib/esm/isFile.js -var esm_isFile = __webpack_require__(80699); -// EXTERNAL MODULE: ./node_modules/formdata-node/lib/esm/Blob.js + 2 modules -var esm_Blob = __webpack_require__(89239); -;// ./node_modules/formdata-node/lib/esm/isBlob.js - -const isBlob_isBlob = (value) => value instanceof esm_Blob/* Blob */.Y; - -// EXTERNAL MODULE: ./node_modules/formdata-node/lib/esm/isFunction.js -var esm_isFunction = __webpack_require__(52937); -;// ./node_modules/formdata-node/lib/esm/deprecateConstructorEntries.js - -const deprecateConstructorEntries = (0,external_util_.deprecate)(() => { }, "Constructor \"entries\" argument is not spec-compliant " - + "and will be removed in next major release."); - -;// ./node_modules/formdata-node/lib/esm/FormData.js -var FormData_classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _FormData_instances, _FormData_entries, _FormData_setEntry; - - - - - - -class FormData_FormData { - constructor(entries) { - _FormData_instances.add(this); - _FormData_entries.set(this, new Map()); - if (entries) { - deprecateConstructorEntries(); - entries.forEach(({ name, value, fileName }) => this.append(name, value, fileName)); - } - } - static [(_FormData_entries = new WeakMap(), _FormData_instances = new WeakSet(), Symbol.hasInstance)](value) { - return Boolean(value - && (0,esm_isFunction/* isFunction */.T)(value.constructor) - && value[Symbol.toStringTag] === "FormData" - && (0,esm_isFunction/* isFunction */.T)(value.append) - && (0,esm_isFunction/* isFunction */.T)(value.set) - && (0,esm_isFunction/* isFunction */.T)(value.get) - && (0,esm_isFunction/* isFunction */.T)(value.getAll) - && (0,esm_isFunction/* isFunction */.T)(value.has) - && (0,esm_isFunction/* isFunction */.T)(value.delete) - && (0,esm_isFunction/* isFunction */.T)(value.entries) - && (0,esm_isFunction/* isFunction */.T)(value.values) - && (0,esm_isFunction/* isFunction */.T)(value.keys) - && (0,esm_isFunction/* isFunction */.T)(value[Symbol.iterator]) - && (0,esm_isFunction/* isFunction */.T)(value.forEach)); - } - append(name, value, fileName) { - FormData_classPrivateFieldGet(this, _FormData_instances, "m", _FormData_setEntry).call(this, { - name, - fileName, - append: true, - rawValue: value, - argsLength: arguments.length - }); - } - set(name, value, fileName) { - FormData_classPrivateFieldGet(this, _FormData_instances, "m", _FormData_setEntry).call(this, { - name, - fileName, - append: false, - rawValue: value, - argsLength: arguments.length - }); - } - get(name) { - const field = FormData_classPrivateFieldGet(this, _FormData_entries, "f").get(String(name)); - if (!field) { - return null; - } - return field[0]; - } - getAll(name) { - const field = FormData_classPrivateFieldGet(this, _FormData_entries, "f").get(String(name)); - if (!field) { - return []; - } - return field.slice(); - } - has(name) { - return FormData_classPrivateFieldGet(this, _FormData_entries, "f").has(String(name)); - } - delete(name) { - FormData_classPrivateFieldGet(this, _FormData_entries, "f").delete(String(name)); - } - *keys() { - for (const key of FormData_classPrivateFieldGet(this, _FormData_entries, "f").keys()) { - yield key; - } - } - *entries() { - for (const name of this.keys()) { - const values = this.getAll(name); - for (const value of values) { - yield [name, value]; - } - } - } - *values() { - for (const [, value] of this) { - yield value; - } - } - [(_FormData_setEntry = function _FormData_setEntry({ name, rawValue, append, fileName, argsLength }) { - const methodName = append ? "append" : "set"; - if (argsLength < 2) { - throw new TypeError(`Failed to execute '${methodName}' on 'FormData': ` - + `2 arguments required, but only ${argsLength} present.`); - } - name = String(name); - let value; - if ((0,esm_isFile/* isFile */.f)(rawValue)) { - value = fileName === undefined - ? rawValue - : new esm_File/* File */.Z([rawValue], fileName, { - type: rawValue.type, - lastModified: rawValue.lastModified - }); - } - else if (isBlob_isBlob(rawValue)) { - value = new esm_File/* File */.Z([rawValue], fileName === undefined ? "blob" : fileName, { - type: rawValue.type - }); - } - else if (fileName) { - throw new TypeError(`Failed to execute '${methodName}' on 'FormData': ` - + "parameter 2 is not of type 'Blob'."); - } - else { - value = String(rawValue); - } - const values = FormData_classPrivateFieldGet(this, _FormData_entries, "f").get(name); - if (!values) { - return void FormData_classPrivateFieldGet(this, _FormData_entries, "f").set(name, [value]); - } - if (!append) { - return void FormData_classPrivateFieldGet(this, _FormData_entries, "f").set(name, [value]); - } - values.push(value); - }, Symbol.iterator)]() { - return this.entries(); - } - forEach(callback, thisArg) { - for (const [name, value] of this) { - callback.call(thisArg, value, name, this); - } - } - get [Symbol.toStringTag]() { - return "FormData"; - } - [external_util_.inspect.custom]() { - return this[Symbol.toStringTag]; - } -} - -;// ./node_modules/formdata-node/lib/esm/index.js - - - - -// EXTERNAL MODULE: ./node_modules/agentkeepalive/index.js -var agentkeepalive = __webpack_require__(95692); -// EXTERNAL MODULE: ./node_modules/abort-controller/dist/abort-controller.js -var abort_controller = __webpack_require__(66584); -;// ./node_modules/form-data-encoder/lib/esm/util/createBoundary.js -const alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"; -function createBoundary() { - let size = 16; - let res = ""; - while (size--) { - res += alphabet[(Math.random() * alphabet.length) << 0]; - } - return res; -} -/* harmony default export */ const util_createBoundary = (createBoundary); - -;// ./node_modules/form-data-encoder/lib/esm/util/isPlainObject.js -const isPlainObject_getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase()); -function isPlainObject(value) { - if (isPlainObject_getType(value) !== "object") { - return false; - } - const pp = Object.getPrototypeOf(value); - if (pp === null || pp === undefined) { - return true; - } - const Ctor = pp.constructor && pp.constructor.toString(); - return Ctor === Object.toString(); -} -/* harmony default export */ const util_isPlainObject = (isPlainObject); - -;// ./node_modules/form-data-encoder/lib/esm/util/normalizeValue.js -const normalizeValue = (value) => String(value) - .replace(/\r|\n/g, (match, i, str) => { - if ((match === "\r" && str[i + 1] !== "\n") - || (match === "\n" && str[i - 1] !== "\r")) { - return "\r\n"; - } - return match; -}); -/* harmony default export */ const util_normalizeValue = (normalizeValue); - -;// ./node_modules/form-data-encoder/lib/esm/util/escapeName.js -const escapeName = (name) => String(name) - .replace(/\r/g, "%0D") - .replace(/\n/g, "%0A") - .replace(/"/g, "%22"); -/* harmony default export */ const util_escapeName = (escapeName); - -;// ./node_modules/form-data-encoder/lib/esm/util/isFunction.js -const isFunction_isFunction = (value) => (typeof value === "function"); -/* harmony default export */ const util_isFunction = (isFunction_isFunction); - -;// ./node_modules/form-data-encoder/lib/esm/util/isFileLike.js - -const isFileLike = (value) => Boolean(value - && typeof value === "object" - && util_isFunction(value.constructor) - && value[Symbol.toStringTag] === "File" - && util_isFunction(value.stream) - && value.name != null - && value.size != null - && value.lastModified != null); - -;// ./node_modules/form-data-encoder/lib/esm/util/isFormData.js - -const isFormData = (value) => Boolean(value - && util_isFunction(value.constructor) - && value[Symbol.toStringTag] === "FormData" - && util_isFunction(value.append) - && util_isFunction(value.getAll) - && util_isFunction(value.entries) - && util_isFunction(value[Symbol.iterator])); -const isFormDataLike = (/* unused pure expression or super */ null && (isFormData)); - -;// ./node_modules/form-data-encoder/lib/esm/FormDataEncoder.js -var FormDataEncoder_classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; -}; -var FormDataEncoder_classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _FormDataEncoder_instances, _FormDataEncoder_CRLF, _FormDataEncoder_CRLF_BYTES, _FormDataEncoder_CRLF_BYTES_LENGTH, _FormDataEncoder_DASHES, _FormDataEncoder_encoder, _FormDataEncoder_footer, _FormDataEncoder_form, _FormDataEncoder_options, _FormDataEncoder_getFieldHeader; - - - - - - -const FormDataEncoder_defaultOptions = { - enableAdditionalHeaders: false -}; -class FormDataEncoder { - constructor(form, boundaryOrOptions, options) { - _FormDataEncoder_instances.add(this); - _FormDataEncoder_CRLF.set(this, "\r\n"); - _FormDataEncoder_CRLF_BYTES.set(this, void 0); - _FormDataEncoder_CRLF_BYTES_LENGTH.set(this, void 0); - _FormDataEncoder_DASHES.set(this, "-".repeat(2)); - _FormDataEncoder_encoder.set(this, new TextEncoder()); - _FormDataEncoder_footer.set(this, void 0); - _FormDataEncoder_form.set(this, void 0); - _FormDataEncoder_options.set(this, void 0); - if (!isFormData(form)) { - throw new TypeError("Expected first argument to be a FormData instance."); - } - let boundary; - if (util_isPlainObject(boundaryOrOptions)) { - options = boundaryOrOptions; - } - else { - boundary = boundaryOrOptions; - } - if (!boundary) { - boundary = util_createBoundary(); - } - if (typeof boundary !== "string") { - throw new TypeError("Expected boundary argument to be a string."); - } - if (options && !util_isPlainObject(options)) { - throw new TypeError("Expected options argument to be an object."); - } - FormDataEncoder_classPrivateFieldSet(this, _FormDataEncoder_form, form, "f"); - FormDataEncoder_classPrivateFieldSet(this, _FormDataEncoder_options, { ...FormDataEncoder_defaultOptions, ...options }, "f"); - FormDataEncoder_classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES, FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")), "f"); - FormDataEncoder_classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f").byteLength, "f"); - this.boundary = `form-data-boundary-${boundary}`; - this.contentType = `multipart/form-data; boundary=${this.boundary}`; - FormDataEncoder_classPrivateFieldSet(this, _FormDataEncoder_footer, FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`), "f"); - this.contentLength = String(this.getContentLength()); - this.headers = Object.freeze({ - "Content-Type": this.contentType, - "Content-Length": this.contentLength - }); - Object.defineProperties(this, { - boundary: { writable: false, configurable: false }, - contentType: { writable: false, configurable: false }, - contentLength: { writable: false, configurable: false }, - headers: { writable: false, configurable: false } - }); - } - getContentLength() { - let length = 0; - for (const [name, raw] of FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_form, "f")) { - const value = isFileLike(raw) ? raw : FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(util_normalizeValue(raw)); - length += FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value).byteLength; - length += isFileLike(value) ? value.size : value.byteLength; - length += FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, "f"); - } - return length + FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_footer, "f").byteLength; - } - *values() { - for (const [name, raw] of FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_form, "f").entries()) { - const value = isFileLike(raw) ? raw : FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(util_normalizeValue(raw)); - yield FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value); - yield value; - yield FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f"); - } - yield FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_footer, "f"); - } - async *encode() { - for (const part of this.values()) { - if (isFileLike(part)) { - yield* part.stream(); - } - else { - yield part; - } - } - } - [(_FormDataEncoder_CRLF = new WeakMap(), _FormDataEncoder_CRLF_BYTES = new WeakMap(), _FormDataEncoder_CRLF_BYTES_LENGTH = new WeakMap(), _FormDataEncoder_DASHES = new WeakMap(), _FormDataEncoder_encoder = new WeakMap(), _FormDataEncoder_footer = new WeakMap(), _FormDataEncoder_form = new WeakMap(), _FormDataEncoder_options = new WeakMap(), _FormDataEncoder_instances = new WeakSet(), _FormDataEncoder_getFieldHeader = function _FormDataEncoder_getFieldHeader(name, value) { - let header = ""; - header += `${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`; - header += `Content-Disposition: form-data; name="${util_escapeName(name)}"`; - if (isFileLike(value)) { - header += `; filename="${util_escapeName(value.name)}"${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`; - header += `Content-Type: ${value.type || "application/octet-stream"}`; - } - if (FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_options, "f").enableAdditionalHeaders === true) { - header += `${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}Content-Length: ${isFileLike(value) ? value.size : value.byteLength}`; - } - return FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${header}${FormDataEncoder_classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`); - }, Symbol.iterator)]() { - return this.values(); - } - [Symbol.asyncIterator]() { - return this.encode(); - } -} -const Encoder = (/* unused pure expression or super */ null && (FormDataEncoder)); - -;// ./node_modules/form-data-encoder/lib/esm/index.js - - - - - - -;// ./node_modules/openai/_shims/MultipartBody.mjs -/** - * Disclaimer: modules in _shims aren't intended to be imported by SDK users. - */ -class MultipartBody { - constructor(body) { - this.body = body; - } - get [Symbol.toStringTag]() { - return 'MultipartBody'; - } -} -//# sourceMappingURL=MultipartBody.mjs.map -;// external "node:stream/web" -const web_namespaceObject = require("node:stream/web"); -;// ./node_modules/openai/_shims/node-runtime.mjs - - - - - - - - - -let fileFromPathWarned = false; -async function node_runtime_fileFromPath(path, ...args) { - // this import fails in environments that don't handle export maps correctly, like old versions of Jest - const { fileFromPath: _fileFromPath } = await __webpack_require__.e(/* import() */ 401).then(__webpack_require__.bind(__webpack_require__, 26401)); - if (!fileFromPathWarned) { - console.warn(`fileFromPath is deprecated; use fs.createReadStream(${JSON.stringify(path)}) instead`); - fileFromPathWarned = true; - } - // @ts-ignore - return await _fileFromPath(path, ...args); -} -const defaultHttpAgent = new agentkeepalive({ keepAlive: true, timeout: 5 * 60 * 1000 }); -const defaultHttpsAgent = new agentkeepalive.HttpsAgent({ keepAlive: true, timeout: 5 * 60 * 1000 }); -async function node_runtime_getMultipartRequestOptions(form, opts) { - const encoder = new FormDataEncoder(form); - const readable = external_node_stream_namespaceObject.Readable.from(encoder); - const body = new MultipartBody(readable); - const headers = { - ...opts.headers, - ...encoder.headers, - 'Content-Length': encoder.contentLength, - }; - return { ...opts, body: body, headers }; -} -function getRuntime() { - // Polyfill global object if needed. - if (typeof AbortController === 'undefined') { - // @ts-expect-error (the types are subtly different, but compatible in practice) - globalThis.AbortController = abort_controller.AbortController; - } - return { - kind: 'node', - fetch: node_fetch_lib, - Request: lib_Request, - Response: lib_Response, - Headers: lib_Headers, - FormData: FormData_FormData, - Blob: esm_Blob/* Blob */.Y, - File: esm_File/* File */.Z, - ReadableStream: web_namespaceObject.ReadableStream, - getMultipartRequestOptions: node_runtime_getMultipartRequestOptions, - getDefaultAgent: (url) => (url.startsWith('https') ? defaultHttpsAgent : defaultHttpAgent), - fileFromPath: node_runtime_fileFromPath, - isFsReadStream: (value) => value instanceof external_node_fs_namespaceObject.ReadStream, - }; -} -//# sourceMappingURL=node-runtime.mjs.map -;// ./node_modules/openai/_shims/index.mjs -/** - * Disclaimer: modules in _shims aren't intended to be imported by SDK users. - */ - - -if (!kind) setShims(getRuntime(), { auto: true }); - - -;// ./node_modules/openai/error.mjs -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -class error_OpenAIError extends Error { -} -class APIError extends error_OpenAIError { - constructor(status, error, message, headers) { - super(`${APIError.makeMessage(status, error, message)}`); - this.status = status; - this.headers = headers; - this.request_id = headers?.['x-request-id']; - this.error = error; - const data = error; - this.code = data?.['code']; - this.param = data?.['param']; - this.type = data?.['type']; - } - static makeMessage(status, error, message) { - const msg = error?.message ? - typeof error.message === 'string' ? - error.message - : JSON.stringify(error.message) - : error ? JSON.stringify(error) - : message; - if (status && msg) { - return `${status} ${msg}`; - } - if (status) { - return `${status} status code (no body)`; - } - if (msg) { - return msg; - } - return '(no status code or body)'; - } - static generate(status, errorResponse, message, headers) { - if (!status || !headers) { - return new APIConnectionError({ message, cause: castToError(errorResponse) }); - } - const error = errorResponse?.['error']; - if (status === 400) { - return new BadRequestError(status, error, message, headers); - } - if (status === 401) { - return new AuthenticationError(status, error, message, headers); - } - if (status === 403) { - return new PermissionDeniedError(status, error, message, headers); - } - if (status === 404) { - return new NotFoundError(status, error, message, headers); - } - if (status === 409) { - return new ConflictError(status, error, message, headers); - } - if (status === 422) { - return new UnprocessableEntityError(status, error, message, headers); - } - if (status === 429) { - return new RateLimitError(status, error, message, headers); - } - if (status >= 500) { - return new InternalServerError(status, error, message, headers); - } - return new APIError(status, error, message, headers); - } -} -class APIUserAbortError extends APIError { - constructor({ message } = {}) { - super(undefined, undefined, message || 'Request was aborted.', undefined); - } -} -class APIConnectionError extends APIError { - constructor({ message, cause }) { - super(undefined, undefined, message || 'Connection error.', undefined); - // in some environments the 'cause' property is already declared - // @ts-ignore - if (cause) - this.cause = cause; - } -} -class APIConnectionTimeoutError extends APIConnectionError { - constructor({ message } = {}) { - super({ message: message ?? 'Request timed out.' }); - } -} -class BadRequestError extends APIError { -} -class AuthenticationError extends APIError { -} -class PermissionDeniedError extends APIError { -} -class NotFoundError extends APIError { -} -class ConflictError extends APIError { -} -class UnprocessableEntityError extends APIError { -} -class RateLimitError extends APIError { -} -class InternalServerError extends APIError { -} -class LengthFinishReasonError extends error_OpenAIError { - constructor() { - super(`Could not parse response content as the length limit was reached`); - } -} -class ContentFilterFinishReasonError extends error_OpenAIError { - constructor() { - super(`Could not parse response content as the request was rejected by the content filter`); - } -} -//# sourceMappingURL=error.mjs.map -;// ./node_modules/openai/internal/decoders/line.mjs -var line_classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; -}; -var line_classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _LineDecoder_carriageReturnIndex; - -/** - * A re-implementation of httpx's `LineDecoder` in Python that handles incrementally - * reading lines from text. - * - * https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258 - */ -class LineDecoder { - constructor() { - _LineDecoder_carriageReturnIndex.set(this, void 0); - this.buffer = new Uint8Array(); - line_classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f"); - } - decode(chunk) { - if (chunk == null) { - return []; - } - const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk) - : typeof chunk === 'string' ? new TextEncoder().encode(chunk) - : chunk; - let newData = new Uint8Array(this.buffer.length + binaryChunk.length); - newData.set(this.buffer); - newData.set(binaryChunk, this.buffer.length); - this.buffer = newData; - const lines = []; - let patternIndex; - while ((patternIndex = findNewlineIndex(this.buffer, line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f"))) != null) { - if (patternIndex.carriage && line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") == null) { - // skip until we either get a corresponding `\n`, a new `\r` or nothing - line_classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, patternIndex.index, "f"); - continue; - } - // we got double \r or \rtext\n - if (line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") != null && - (patternIndex.index !== line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") + 1 || patternIndex.carriage)) { - lines.push(this.decodeText(this.buffer.slice(0, line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") - 1))); - this.buffer = this.buffer.slice(line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f")); - line_classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f"); - continue; - } - const endIndex = line_classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") !== null ? patternIndex.preceding - 1 : patternIndex.preceding; - const line = this.decodeText(this.buffer.slice(0, endIndex)); - lines.push(line); - this.buffer = this.buffer.slice(patternIndex.index); - line_classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f"); - } - return lines; - } - decodeText(bytes) { - if (bytes == null) - return ''; - if (typeof bytes === 'string') - return bytes; - // Node: - if (typeof Buffer !== 'undefined') { - if (bytes instanceof Buffer) { - return bytes.toString(); - } - if (bytes instanceof Uint8Array) { - return Buffer.from(bytes).toString(); - } - throw new error_OpenAIError(`Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global "Buffer" defined, which this library assumes to be Node. Please report this error.`); - } - // Browser - if (typeof TextDecoder !== 'undefined') { - if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) { - this.textDecoder ?? (this.textDecoder = new TextDecoder('utf8')); - return this.textDecoder.decode(bytes); - } - throw new error_OpenAIError(`Unexpected: received non-Uint8Array/ArrayBuffer (${bytes.constructor.name}) in a web platform. Please report this error.`); - } - throw new error_OpenAIError(`Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.`); - } - flush() { - if (!this.buffer.length) { - return []; - } - return this.decode('\n'); - } -} -_LineDecoder_carriageReturnIndex = new WeakMap(); -// prettier-ignore -LineDecoder.NEWLINE_CHARS = new Set(['\n', '\r']); -LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g; -/** - * This function searches the buffer for the end patterns, (\r or \n) - * and returns an object with the index preceding the matched newline and the - * index after the newline char. `null` is returned if no new line is found. - * - * ```ts - * findNewLineIndex('abc\ndef') -> { preceding: 2, index: 3 } - * ``` - */ -function findNewlineIndex(buffer, startIndex) { - const newline = 0x0a; // \n - const carriage = 0x0d; // \r - for (let i = startIndex ?? 0; i < buffer.length; i++) { - if (buffer[i] === newline) { - return { preceding: i, index: i + 1, carriage: false }; - } - if (buffer[i] === carriage) { - return { preceding: i, index: i + 1, carriage: true }; - } - } - return null; -} -function findDoubleNewlineIndex(buffer) { - // This function searches the buffer for the end patterns (\r\r, \n\n, \r\n\r\n) - // and returns the index right after the first occurrence of any pattern, - // or -1 if none of the patterns are found. - const newline = 0x0a; // \n - const carriage = 0x0d; // \r - for (let i = 0; i < buffer.length - 1; i++) { - if (buffer[i] === newline && buffer[i + 1] === newline) { - // \n\n - return i + 2; - } - if (buffer[i] === carriage && buffer[i + 1] === carriage) { - // \r\r - return i + 2; - } - if (buffer[i] === carriage && - buffer[i + 1] === newline && - i + 3 < buffer.length && - buffer[i + 2] === carriage && - buffer[i + 3] === newline) { - // \r\n\r\n - return i + 4; - } - } - return -1; -} -//# sourceMappingURL=line.mjs.map -;// ./node_modules/openai/internal/stream-utils.mjs -/** - * Most browsers don't yet have async iterable support for ReadableStream, - * and Node has a very different way of reading bytes from its "ReadableStream". - * - * This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490 - */ -function ReadableStreamToAsyncIterable(stream) { - if (stream[Symbol.asyncIterator]) - return stream; - const reader = stream.getReader(); - return { - async next() { - try { - const result = await reader.read(); - if (result?.done) - reader.releaseLock(); // release lock when stream becomes closed - return result; - } - catch (e) { - reader.releaseLock(); // release lock when stream becomes errored - throw e; - } - }, - async return() { - const cancelPromise = reader.cancel(); - reader.releaseLock(); - await cancelPromise; - return { done: true, value: undefined }; - }, - [Symbol.asyncIterator]() { - return this; - }, - }; -} -//# sourceMappingURL=stream-utils.mjs.map -;// ./node_modules/openai/streaming.mjs - - - - - -class Stream { - constructor(iterator, controller) { - this.iterator = iterator; - this.controller = controller; - } - static fromSSEResponse(response, controller) { - let consumed = false; - async function* iterator() { - if (consumed) { - throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.'); - } - consumed = true; - let done = false; - try { - for await (const sse of _iterSSEMessages(response, controller)) { - if (done) - continue; - if (sse.data.startsWith('[DONE]')) { - done = true; - continue; - } - if (sse.event === null || sse.event.startsWith('response.')) { - let data; - try { - data = JSON.parse(sse.data); - } - catch (e) { - console.error(`Could not parse message into JSON:`, sse.data); - console.error(`From chunk:`, sse.raw); - throw e; - } - if (data && data.error) { - throw new APIError(undefined, data.error, undefined, undefined); - } - yield data; - } - else { - let data; - try { - data = JSON.parse(sse.data); - } - catch (e) { - console.error(`Could not parse message into JSON:`, sse.data); - console.error(`From chunk:`, sse.raw); - throw e; - } - // TODO: Is this where the error should be thrown? - if (sse.event == 'error') { - throw new APIError(undefined, data.error, data.message, undefined); - } - yield { event: sse.event, data: data }; - } - } - done = true; - } - catch (e) { - // If the user calls `stream.controller.abort()`, we should exit without throwing. - if (e instanceof Error && e.name === 'AbortError') - return; - throw e; - } - finally { - // If the user `break`s, abort the ongoing request. - if (!done) - controller.abort(); - } - } - return new Stream(iterator, controller); - } - /** - * Generates a Stream from a newline-separated ReadableStream - * where each item is a JSON value. - */ - static fromReadableStream(readableStream, controller) { - let consumed = false; - async function* iterLines() { - const lineDecoder = new LineDecoder(); - const iter = ReadableStreamToAsyncIterable(readableStream); - for await (const chunk of iter) { - for (const line of lineDecoder.decode(chunk)) { - yield line; - } - } - for (const line of lineDecoder.flush()) { - yield line; - } - } - async function* iterator() { - if (consumed) { - throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.'); - } - consumed = true; - let done = false; - try { - for await (const line of iterLines()) { - if (done) - continue; - if (line) - yield JSON.parse(line); - } - done = true; - } - catch (e) { - // If the user calls `stream.controller.abort()`, we should exit without throwing. - if (e instanceof Error && e.name === 'AbortError') - return; - throw e; - } - finally { - // If the user `break`s, abort the ongoing request. - if (!done) - controller.abort(); - } - } - return new Stream(iterator, controller); - } - [Symbol.asyncIterator]() { - return this.iterator(); - } - /** - * Splits the stream into two streams which can be - * independently read from at different speeds. - */ - tee() { - const left = []; - const right = []; - const iterator = this.iterator(); - const teeIterator = (queue) => { - return { - next: () => { - if (queue.length === 0) { - const result = iterator.next(); - left.push(result); - right.push(result); - } - return queue.shift(); - }, - }; - }; - return [ - new Stream(() => teeIterator(left), this.controller), - new Stream(() => teeIterator(right), this.controller), - ]; - } - /** - * Converts this stream to a newline-separated ReadableStream of - * JSON stringified values in the stream - * which can be turned back into a Stream with `Stream.fromReadableStream()`. - */ - toReadableStream() { - const self = this; - let iter; - const encoder = new TextEncoder(); - return new registry_ReadableStream({ - async start() { - iter = self[Symbol.asyncIterator](); - }, - async pull(ctrl) { - try { - const { value, done } = await iter.next(); - if (done) - return ctrl.close(); - const bytes = encoder.encode(JSON.stringify(value) + '\n'); - ctrl.enqueue(bytes); - } - catch (err) { - ctrl.error(err); - } - }, - async cancel() { - await iter.return?.(); - }, - }); - } -} -async function* _iterSSEMessages(response, controller) { - if (!response.body) { - controller.abort(); - throw new error_OpenAIError(`Attempted to iterate over a response with no body`); - } - const sseDecoder = new SSEDecoder(); - const lineDecoder = new LineDecoder(); - const iter = ReadableStreamToAsyncIterable(response.body); - for await (const sseChunk of iterSSEChunks(iter)) { - for (const line of lineDecoder.decode(sseChunk)) { - const sse = sseDecoder.decode(line); - if (sse) - yield sse; - } - } - for (const line of lineDecoder.flush()) { - const sse = sseDecoder.decode(line); - if (sse) - yield sse; - } -} -/** - * Given an async iterable iterator, iterates over it and yields full - * SSE chunks, i.e. yields when a double new-line is encountered. - */ -async function* iterSSEChunks(iterator) { - let data = new Uint8Array(); - for await (const chunk of iterator) { - if (chunk == null) { - continue; - } - const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk) - : typeof chunk === 'string' ? new TextEncoder().encode(chunk) - : chunk; - let newData = new Uint8Array(data.length + binaryChunk.length); - newData.set(data); - newData.set(binaryChunk, data.length); - data = newData; - let patternIndex; - while ((patternIndex = findDoubleNewlineIndex(data)) !== -1) { - yield data.slice(0, patternIndex); - data = data.slice(patternIndex); - } - } - if (data.length > 0) { - yield data; - } -} -class SSEDecoder { - constructor() { - this.event = null; - this.data = []; - this.chunks = []; - } - decode(line) { - if (line.endsWith('\r')) { - line = line.substring(0, line.length - 1); - } - if (!line) { - // empty line and we didn't previously encounter any messages - if (!this.event && !this.data.length) - return null; - const sse = { - event: this.event, - data: this.data.join('\n'), - raw: this.chunks, - }; - this.event = null; - this.data = []; - this.chunks = []; - return sse; - } - this.chunks.push(line); - if (line.startsWith(':')) { - return null; - } - let [fieldname, _, value] = partition(line, ':'); - if (value.startsWith(' ')) { - value = value.substring(1); - } - if (fieldname === 'event') { - this.event = value; - } - else if (fieldname === 'data') { - this.data.push(value); - } - return null; - } -} -function partition(str, delimiter) { - const index = str.indexOf(delimiter); - if (index !== -1) { - return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)]; - } - return [str, '', '']; -} -//# sourceMappingURL=streaming.mjs.map -;// ./node_modules/openai/uploads.mjs - - -const isResponseLike = (value) => value != null && - typeof value === 'object' && - typeof value.url === 'string' && - typeof value.blob === 'function'; -const uploads_isFileLike = (value) => value != null && - typeof value === 'object' && - typeof value.name === 'string' && - typeof value.lastModified === 'number' && - isBlobLike(value); -/** - * The BlobLike type omits arrayBuffer() because @types/node-fetch@^2.6.4 lacks it; but this check - * adds the arrayBuffer() method type because it is available and used at runtime - */ -const isBlobLike = (value) => value != null && - typeof value === 'object' && - typeof value.size === 'number' && - typeof value.type === 'string' && - typeof value.text === 'function' && - typeof value.slice === 'function' && - typeof value.arrayBuffer === 'function'; -const isUploadable = (value) => { - return uploads_isFileLike(value) || isResponseLike(value) || isFsReadStream(value); -}; -/** - * Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats - * @param value the raw content of the file. Can be an {@link Uploadable}, {@link BlobLikePart}, or {@link AsyncIterable} of {@link BlobLikePart}s - * @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible - * @param {Object=} options additional properties - * @param {string=} options.type the MIME type of the content - * @param {number=} options.lastModified the last modified timestamp - * @returns a {@link File} with the given properties - */ -async function toFile(value, name, options) { - // If it's a promise, resolve it. - value = await value; - // If we've been given a `File` we don't need to do anything - if (uploads_isFileLike(value)) { - return value; - } - if (isResponseLike(value)) { - const blob = await value.blob(); - name || (name = new URL(value.url).pathname.split(/[\\/]/).pop() ?? 'unknown_file'); - // we need to convert the `Blob` into an array buffer because the `Blob` class - // that `node-fetch` defines is incompatible with the web standard which results - // in `new File` interpreting it as a string instead of binary data. - const data = isBlobLike(blob) ? [(await blob.arrayBuffer())] : [blob]; - return new File(data, name, options); - } - const bits = await getBytes(value); - name || (name = getName(value) ?? 'unknown_file'); - if (!options?.type) { - const type = bits[0]?.type; - if (typeof type === 'string') { - options = { ...options, type }; - } - } - return new File(bits, name, options); -} -async function getBytes(value) { - let parts = []; - if (typeof value === 'string' || - ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc. - value instanceof ArrayBuffer) { - parts.push(value); - } - else if (isBlobLike(value)) { - parts.push(await value.arrayBuffer()); - } - else if (isAsyncIterableIterator(value) // includes Readable, ReadableStream, etc. - ) { - for await (const chunk of value) { - parts.push(chunk); // TODO, consider validating? - } - } - else { - throw new Error(`Unexpected data type: ${typeof value}; constructor: ${value?.constructor - ?.name}; props: ${propsForError(value)}`); - } - return parts; -} -function propsForError(value) { - const props = Object.getOwnPropertyNames(value); - return `[${props.map((p) => `"${p}"`).join(', ')}]`; -} -function getName(value) { - return (getStringFromMaybeBuffer(value.name) || - getStringFromMaybeBuffer(value.filename) || - // For fs.ReadStream - getStringFromMaybeBuffer(value.path)?.split(/[\\/]/).pop()); -} -const getStringFromMaybeBuffer = (x) => { - if (typeof x === 'string') - return x; - if (typeof Buffer !== 'undefined' && x instanceof Buffer) - return String(x); - return undefined; -}; -const isAsyncIterableIterator = (value) => value != null && typeof value === 'object' && typeof value[Symbol.asyncIterator] === 'function'; -const isMultipartBody = (body) => body && typeof body === 'object' && body.body && body[Symbol.toStringTag] === 'MultipartBody'; -/** - * Returns a multipart/form-data request if any part of the given request body contains a File / Blob value. - * Otherwise returns the request as is. - */ -const maybeMultipartFormRequestOptions = async (opts) => { - if (!hasUploadableValue(opts.body)) - return opts; - const form = await createForm(opts.body); - return getMultipartRequestOptions(form, opts); -}; -const multipartFormRequestOptions = async (opts) => { - const form = await createForm(opts.body); - return registry_getMultipartRequestOptions(form, opts); -}; -const createForm = async (body) => { - const form = new registry_FormData(); - await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value))); - return form; -}; -const hasUploadableValue = (value) => { - if (isUploadable(value)) - return true; - if (Array.isArray(value)) - return value.some(hasUploadableValue); - if (value && typeof value === 'object') { - for (const k in value) { - if (hasUploadableValue(value[k])) - return true; - } - } - return false; -}; -const addFormValue = async (form, key, value) => { - if (value === undefined) - return; - if (value == null) { - throw new TypeError(`Received null for "${key}"; to pass null in FormData, you must use the string 'null'`); - } - // TODO: make nested formats configurable - if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { - form.append(key, String(value)); - } - else if (isUploadable(value)) { - const file = await toFile(value); - form.append(key, file); - } - else if (Array.isArray(value)) { - await Promise.all(value.map((entry) => addFormValue(form, key + '[]', entry))); - } - else if (typeof value === 'object') { - await Promise.all(Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop))); - } - else { - throw new TypeError(`Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`); - } -}; -//# sourceMappingURL=uploads.mjs.map -;// ./node_modules/openai/core.mjs -var core_classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; -}; -var core_classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _AbstractPage_client; - - - - - - -async function defaultParseResponse(props) { - const { response } = props; - if (props.options.stream) { - debug('response', response.status, response.url, response.headers, response.body); - // Note: there is an invariant here that isn't represented in the type system - // that if you set `stream: true` the response type must also be `Stream` - if (props.options.__streamClass) { - return props.options.__streamClass.fromSSEResponse(response, props.controller); - } - return Stream.fromSSEResponse(response, props.controller); - } - // fetch refuses to read the body when the status code is 204. - if (response.status === 204) { - return null; - } - if (props.options.__binaryResponse) { - return response; - } - const contentType = response.headers.get('content-type'); - const mediaType = contentType?.split(';')[0]?.trim(); - const isJSON = mediaType?.includes('application/json') || mediaType?.endsWith('+json'); - if (isJSON) { - const json = await response.json(); - debug('response', response.status, response.url, response.headers, json); - return _addRequestID(json, response); - } - const text = await response.text(); - debug('response', response.status, response.url, response.headers, text); - // TODO handle blob, arraybuffer, other content types, etc. - return text; -} -function _addRequestID(value, response) { - if (!value || typeof value !== 'object' || Array.isArray(value)) { - return value; - } - return Object.defineProperty(value, '_request_id', { - value: response.headers.get('x-request-id'), - enumerable: false, - }); -} -/** - * A subclass of `Promise` providing additional helper methods - * for interacting with the SDK. - */ -class APIPromise extends Promise { - constructor(responsePromise, parseResponse = defaultParseResponse) { - super((resolve) => { - // this is maybe a bit weird but this has to be a no-op to not implicitly - // parse the response body; instead .then, .catch, .finally are overridden - // to parse the response - resolve(null); - }); - this.responsePromise = responsePromise; - this.parseResponse = parseResponse; - } - _thenUnwrap(transform) { - return new APIPromise(this.responsePromise, async (props) => _addRequestID(transform(await this.parseResponse(props), props), props.response)); - } - /** - * Gets the raw `Response` instance instead of parsing the response - * data. - * - * If you want to parse the response body but still get the `Response` - * instance, you can use {@link withResponse()}. - * - * 👋 Getting the wrong TypeScript type for `Response`? - * Try setting `"moduleResolution": "NodeNext"` if you can, - * or add one of these imports before your first `import … from 'openai'`: - * - `import 'openai/shims/node'` (if you're running on Node) - * - `import 'openai/shims/web'` (otherwise) - */ - asResponse() { - return this.responsePromise.then((p) => p.response); - } - /** - * Gets the parsed response data, the raw `Response` instance and the ID of the request, - * returned via the X-Request-ID header which is useful for debugging requests and reporting - * issues to OpenAI. - * - * If you just want to get the raw `Response` instance without parsing it, - * you can use {@link asResponse()}. - * - * - * 👋 Getting the wrong TypeScript type for `Response`? - * Try setting `"moduleResolution": "NodeNext"` if you can, - * or add one of these imports before your first `import … from 'openai'`: - * - `import 'openai/shims/node'` (if you're running on Node) - * - `import 'openai/shims/web'` (otherwise) - */ - async withResponse() { - const [data, response] = await Promise.all([this.parse(), this.asResponse()]); - return { data, response, request_id: response.headers.get('x-request-id') }; - } - parse() { - if (!this.parsedPromise) { - this.parsedPromise = this.responsePromise.then(this.parseResponse); - } - return this.parsedPromise; - } - then(onfulfilled, onrejected) { - return this.parse().then(onfulfilled, onrejected); - } - catch(onrejected) { - return this.parse().catch(onrejected); - } - finally(onfinally) { - return this.parse().finally(onfinally); - } -} -class APIClient { - constructor({ baseURL, maxRetries = 2, timeout = 600000, // 10 minutes - httpAgent, fetch: overriddenFetch, }) { - this.baseURL = baseURL; - this.maxRetries = validatePositiveInteger('maxRetries', maxRetries); - this.timeout = validatePositiveInteger('timeout', timeout); - this.httpAgent = httpAgent; - this.fetch = overriddenFetch ?? registry_fetch; - } - authHeaders(opts) { - return {}; - } - /** - * Override this to add your own default headers, for example: - * - * { - * ...super.defaultHeaders(), - * Authorization: 'Bearer 123', - * } - */ - defaultHeaders(opts) { - return { - Accept: 'application/json', - 'Content-Type': 'application/json', - 'User-Agent': this.getUserAgent(), - ...getPlatformHeaders(), - ...this.authHeaders(opts), - }; - } - /** - * Override this to add your own headers validation: - */ - validateHeaders(headers, customHeaders) { } - defaultIdempotencyKey() { - return `stainless-node-retry-${uuid4()}`; - } - get(path, opts) { - return this.methodRequest('get', path, opts); - } - post(path, opts) { - return this.methodRequest('post', path, opts); - } - patch(path, opts) { - return this.methodRequest('patch', path, opts); - } - put(path, opts) { - return this.methodRequest('put', path, opts); - } - delete(path, opts) { - return this.methodRequest('delete', path, opts); - } - methodRequest(method, path, opts) { - return this.request(Promise.resolve(opts).then(async (opts) => { - const body = opts && isBlobLike(opts?.body) ? new DataView(await opts.body.arrayBuffer()) - : opts?.body instanceof DataView ? opts.body - : opts?.body instanceof ArrayBuffer ? new DataView(opts.body) - : opts && ArrayBuffer.isView(opts?.body) ? new DataView(opts.body.buffer) - : opts?.body; - return { method, path, ...opts, body }; - })); - } - getAPIList(path, Page, opts) { - return this.requestAPIList(Page, { method: 'get', path, ...opts }); - } - calculateContentLength(body) { - if (typeof body === 'string') { - if (typeof Buffer !== 'undefined') { - return Buffer.byteLength(body, 'utf8').toString(); - } - if (typeof TextEncoder !== 'undefined') { - const encoder = new TextEncoder(); - const encoded = encoder.encode(body); - return encoded.length.toString(); - } - } - else if (ArrayBuffer.isView(body)) { - return body.byteLength.toString(); - } - return null; - } - buildRequest(options, { retryCount = 0 } = {}) { - options = { ...options }; - const { method, path, query, headers: headers = {} } = options; - const body = ArrayBuffer.isView(options.body) || (options.__binaryRequest && typeof options.body === 'string') ? - options.body - : isMultipartBody(options.body) ? options.body.body - : options.body ? JSON.stringify(options.body, null, 2) - : null; - const contentLength = this.calculateContentLength(body); - const url = this.buildURL(path, query); - if ('timeout' in options) - validatePositiveInteger('timeout', options.timeout); - options.timeout = options.timeout ?? this.timeout; - const httpAgent = options.httpAgent ?? this.httpAgent ?? getDefaultAgent(url); - const minAgentTimeout = options.timeout + 1000; - if (typeof httpAgent?.options?.timeout === 'number' && - minAgentTimeout > (httpAgent.options.timeout ?? 0)) { - // Allow any given request to bump our agent active socket timeout. - // This may seem strange, but leaking active sockets should be rare and not particularly problematic, - // and without mutating agent we would need to create more of them. - // This tradeoff optimizes for performance. - httpAgent.options.timeout = minAgentTimeout; - } - if (this.idempotencyHeader && method !== 'get') { - if (!options.idempotencyKey) - options.idempotencyKey = this.defaultIdempotencyKey(); - headers[this.idempotencyHeader] = options.idempotencyKey; - } - const reqHeaders = this.buildHeaders({ options, headers, contentLength, retryCount }); - const req = { - method, - ...(body && { body: body }), - headers: reqHeaders, - ...(httpAgent && { agent: httpAgent }), - // @ts-ignore node-fetch uses a custom AbortSignal type that is - // not compatible with standard web types - signal: options.signal ?? null, - }; - return { req, url, timeout: options.timeout }; - } - buildHeaders({ options, headers, contentLength, retryCount, }) { - const reqHeaders = {}; - if (contentLength) { - reqHeaders['content-length'] = contentLength; - } - const defaultHeaders = this.defaultHeaders(options); - applyHeadersMut(reqHeaders, defaultHeaders); - applyHeadersMut(reqHeaders, headers); - // let builtin fetch set the Content-Type for multipart bodies - if (isMultipartBody(options.body) && kind !== 'node') { - delete reqHeaders['content-type']; - } - // Don't set theses headers if they were already set or removed through default headers or by the caller. - // We check `defaultHeaders` and `headers`, which can contain nulls, instead of `reqHeaders` to account - // for the removal case. - if (getHeader(defaultHeaders, 'x-stainless-retry-count') === undefined && - getHeader(headers, 'x-stainless-retry-count') === undefined) { - reqHeaders['x-stainless-retry-count'] = String(retryCount); - } - if (getHeader(defaultHeaders, 'x-stainless-timeout') === undefined && - getHeader(headers, 'x-stainless-timeout') === undefined && - options.timeout) { - reqHeaders['x-stainless-timeout'] = String(options.timeout); - } - this.validateHeaders(reqHeaders, headers); - return reqHeaders; - } - /** - * Used as a callback for mutating the given `FinalRequestOptions` object. - */ - async prepareOptions(options) { } - /** - * Used as a callback for mutating the given `RequestInit` object. - * - * This is useful for cases where you want to add certain headers based off of - * the request properties, e.g. `method` or `url`. - */ - async prepareRequest(request, { url, options }) { } - parseHeaders(headers) { - return (!headers ? {} - : Symbol.iterator in headers ? - Object.fromEntries(Array.from(headers).map((header) => [...header])) - : { ...headers }); - } - makeStatusError(status, error, message, headers) { - return APIError.generate(status, error, message, headers); - } - request(options, remainingRetries = null) { - return new APIPromise(this.makeRequest(options, remainingRetries)); - } - async makeRequest(optionsInput, retriesRemaining) { - const options = await optionsInput; - const maxRetries = options.maxRetries ?? this.maxRetries; - if (retriesRemaining == null) { - retriesRemaining = maxRetries; - } - await this.prepareOptions(options); - const { req, url, timeout } = this.buildRequest(options, { retryCount: maxRetries - retriesRemaining }); - await this.prepareRequest(req, { url, options }); - debug('request', url, options, req.headers); - if (options.signal?.aborted) { - throw new APIUserAbortError(); - } - const controller = new AbortController(); - const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(castToError); - if (response instanceof Error) { - if (options.signal?.aborted) { - throw new APIUserAbortError(); - } - if (retriesRemaining) { - return this.retryRequest(options, retriesRemaining); - } - if (response.name === 'AbortError') { - throw new APIConnectionTimeoutError(); - } - throw new APIConnectionError({ cause: response }); - } - const responseHeaders = createResponseHeaders(response.headers); - if (!response.ok) { - if (retriesRemaining && this.shouldRetry(response)) { - const retryMessage = `retrying, ${retriesRemaining} attempts remaining`; - debug(`response (error; ${retryMessage})`, response.status, url, responseHeaders); - return this.retryRequest(options, retriesRemaining, responseHeaders); - } - const errText = await response.text().catch((e) => castToError(e).message); - const errJSON = safeJSON(errText); - const errMessage = errJSON ? undefined : errText; - const retryMessage = retriesRemaining ? `(error; no more retries left)` : `(error; not retryable)`; - debug(`response (error; ${retryMessage})`, response.status, url, responseHeaders, errMessage); - const err = this.makeStatusError(response.status, errJSON, errMessage, responseHeaders); - throw err; - } - return { response, options, controller }; - } - requestAPIList(Page, options) { - const request = this.makeRequest(options, null); - return new PagePromise(this, request, Page); - } - buildURL(path, query) { - const url = isAbsoluteURL(path) ? - new URL(path) - : new URL(this.baseURL + (this.baseURL.endsWith('/') && path.startsWith('/') ? path.slice(1) : path)); - const defaultQuery = this.defaultQuery(); - if (!isEmptyObj(defaultQuery)) { - query = { ...defaultQuery, ...query }; - } - if (typeof query === 'object' && query && !Array.isArray(query)) { - url.search = this.stringifyQuery(query); - } - return url.toString(); - } - stringifyQuery(query) { - return Object.entries(query) - .filter(([_, value]) => typeof value !== 'undefined') - .map(([key, value]) => { - if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { - return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`; - } - if (value === null) { - return `${encodeURIComponent(key)}=`; - } - throw new error_OpenAIError(`Cannot stringify type ${typeof value}; Expected string, number, boolean, or null. If you need to pass nested query parameters, you can manually encode them, e.g. { query: { 'foo[key1]': value1, 'foo[key2]': value2 } }, and please open a GitHub issue requesting better support for your use case.`); - }) - .join('&'); - } - async fetchWithTimeout(url, init, ms, controller) { - const { signal, ...options } = init || {}; - if (signal) - signal.addEventListener('abort', () => controller.abort()); - const timeout = setTimeout(() => controller.abort(), ms); - const fetchOptions = { - signal: controller.signal, - ...options, - }; - if (fetchOptions.method) { - // Custom methods like 'patch' need to be uppercased - // See https://github.com/nodejs/undici/issues/2294 - fetchOptions.method = fetchOptions.method.toUpperCase(); - } - return ( - // use undefined this binding; fetch errors if bound to something else in browser/cloudflare - this.fetch.call(undefined, url, fetchOptions).finally(() => { - clearTimeout(timeout); - })); - } - shouldRetry(response) { - // Note this is not a standard header. - const shouldRetryHeader = response.headers.get('x-should-retry'); - // If the server explicitly says whether or not to retry, obey. - if (shouldRetryHeader === 'true') - return true; - if (shouldRetryHeader === 'false') - return false; - // Retry on request timeouts. - if (response.status === 408) - return true; - // Retry on lock timeouts. - if (response.status === 409) - return true; - // Retry on rate limits. - if (response.status === 429) - return true; - // Retry internal errors. - if (response.status >= 500) - return true; - return false; - } - async retryRequest(options, retriesRemaining, responseHeaders) { - let timeoutMillis; - // Note the `retry-after-ms` header may not be standard, but is a good idea and we'd like proactive support for it. - const retryAfterMillisHeader = responseHeaders?.['retry-after-ms']; - if (retryAfterMillisHeader) { - const timeoutMs = parseFloat(retryAfterMillisHeader); - if (!Number.isNaN(timeoutMs)) { - timeoutMillis = timeoutMs; - } - } - // About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After - const retryAfterHeader = responseHeaders?.['retry-after']; - if (retryAfterHeader && !timeoutMillis) { - const timeoutSeconds = parseFloat(retryAfterHeader); - if (!Number.isNaN(timeoutSeconds)) { - timeoutMillis = timeoutSeconds * 1000; - } - else { - timeoutMillis = Date.parse(retryAfterHeader) - Date.now(); - } - } - // If the API asks us to wait a certain amount of time (and it's a reasonable amount), - // just do what it says, but otherwise calculate a default - if (!(timeoutMillis && 0 <= timeoutMillis && timeoutMillis < 60 * 1000)) { - const maxRetries = options.maxRetries ?? this.maxRetries; - timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries); - } - await sleep(timeoutMillis); - return this.makeRequest(options, retriesRemaining - 1); - } - calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries) { - const initialRetryDelay = 0.5; - const maxRetryDelay = 8.0; - const numRetries = maxRetries - retriesRemaining; - // Apply exponential backoff, but not more than the max. - const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay); - // Apply some jitter, take up to at most 25 percent of the retry time. - const jitter = 1 - Math.random() * 0.25; - return sleepSeconds * jitter * 1000; - } - getUserAgent() { - return `${this.constructor.name}/JS ${VERSION}`; - } -} -class AbstractPage { - constructor(client, response, body, options) { - _AbstractPage_client.set(this, void 0); - core_classPrivateFieldSet(this, _AbstractPage_client, client, "f"); - this.options = options; - this.response = response; - this.body = body; - } - hasNextPage() { - const items = this.getPaginatedItems(); - if (!items.length) - return false; - return this.nextPageInfo() != null; - } - async getNextPage() { - const nextInfo = this.nextPageInfo(); - if (!nextInfo) { - throw new error_OpenAIError('No next page expected; please check `.hasNextPage()` before calling `.getNextPage()`.'); - } - const nextOptions = { ...this.options }; - if ('params' in nextInfo && typeof nextOptions.query === 'object') { - nextOptions.query = { ...nextOptions.query, ...nextInfo.params }; - } - else if ('url' in nextInfo) { - const params = [...Object.entries(nextOptions.query || {}), ...nextInfo.url.searchParams.entries()]; - for (const [key, value] of params) { - nextInfo.url.searchParams.set(key, value); - } - nextOptions.query = undefined; - nextOptions.path = nextInfo.url.toString(); - } - return await core_classPrivateFieldGet(this, _AbstractPage_client, "f").requestAPIList(this.constructor, nextOptions); - } - async *iterPages() { - // eslint-disable-next-line @typescript-eslint/no-this-alias - let page = this; - yield page; - while (page.hasNextPage()) { - page = await page.getNextPage(); - yield page; - } - } - async *[(_AbstractPage_client = new WeakMap(), Symbol.asyncIterator)]() { - for await (const page of this.iterPages()) { - for (const item of page.getPaginatedItems()) { - yield item; - } - } - } -} -/** - * This subclass of Promise will resolve to an instantiated Page once the request completes. - * - * It also implements AsyncIterable to allow auto-paginating iteration on an unawaited list call, eg: - * - * for await (const item of client.items.list()) { - * console.log(item) - * } - */ -class PagePromise extends APIPromise { - constructor(client, request, Page) { - super(request, async (props) => new Page(client, props.response, await defaultParseResponse(props), props.options)); - } - /** - * Allow auto-paginating iteration on an unawaited list call, eg: - * - * for await (const item of client.items.list()) { - * console.log(item) - * } - */ - async *[Symbol.asyncIterator]() { - const page = await this; - for await (const item of page) { - yield item; - } - } -} -const createResponseHeaders = (headers) => { - return new Proxy(Object.fromEntries( - // @ts-ignore - headers.entries()), { - get(target, name) { - const key = name.toString(); - return target[key.toLowerCase()] || target[key]; - }, - }); -}; -// This is required so that we can determine if a given object matches the RequestOptions -// type at runtime. While this requires duplication, it is enforced by the TypeScript -// compiler such that any missing / extraneous keys will cause an error. -const requestOptionsKeys = { - method: true, - path: true, - query: true, - body: true, - headers: true, - maxRetries: true, - stream: true, - timeout: true, - httpAgent: true, - signal: true, - idempotencyKey: true, - __metadata: true, - __binaryRequest: true, - __binaryResponse: true, - __streamClass: true, -}; -const isRequestOptions = (obj) => { - return (typeof obj === 'object' && - obj !== null && - !isEmptyObj(obj) && - Object.keys(obj).every((k) => hasOwn(requestOptionsKeys, k))); -}; -const getPlatformProperties = () => { - if (typeof Deno !== 'undefined' && Deno.build != null) { - return { - 'X-Stainless-Lang': 'js', - 'X-Stainless-Package-Version': VERSION, - 'X-Stainless-OS': normalizePlatform(Deno.build.os), - 'X-Stainless-Arch': normalizeArch(Deno.build.arch), - 'X-Stainless-Runtime': 'deno', - 'X-Stainless-Runtime-Version': typeof Deno.version === 'string' ? Deno.version : Deno.version?.deno ?? 'unknown', - }; - } - if (typeof EdgeRuntime !== 'undefined') { - return { - 'X-Stainless-Lang': 'js', - 'X-Stainless-Package-Version': VERSION, - 'X-Stainless-OS': 'Unknown', - 'X-Stainless-Arch': `other:${EdgeRuntime}`, - 'X-Stainless-Runtime': 'edge', - 'X-Stainless-Runtime-Version': process.version, - }; - } - // Check if Node.js - if (Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]') { - return { - 'X-Stainless-Lang': 'js', - 'X-Stainless-Package-Version': VERSION, - 'X-Stainless-OS': normalizePlatform(process.platform), - 'X-Stainless-Arch': normalizeArch(process.arch), - 'X-Stainless-Runtime': 'node', - 'X-Stainless-Runtime-Version': process.version, - }; - } - const browserInfo = getBrowserInfo(); - if (browserInfo) { - return { - 'X-Stainless-Lang': 'js', - 'X-Stainless-Package-Version': VERSION, - 'X-Stainless-OS': 'Unknown', - 'X-Stainless-Arch': 'unknown', - 'X-Stainless-Runtime': `browser:${browserInfo.browser}`, - 'X-Stainless-Runtime-Version': browserInfo.version, - }; - } - // TODO add support for Cloudflare workers, etc. - return { - 'X-Stainless-Lang': 'js', - 'X-Stainless-Package-Version': VERSION, - 'X-Stainless-OS': 'Unknown', - 'X-Stainless-Arch': 'unknown', - 'X-Stainless-Runtime': 'unknown', - 'X-Stainless-Runtime-Version': 'unknown', - }; -}; -// Note: modified from https://github.com/JS-DevTools/host-environment/blob/b1ab79ecde37db5d6e163c050e54fe7d287d7c92/src/isomorphic.browser.ts -function getBrowserInfo() { - if (typeof navigator === 'undefined' || !navigator) { - return null; - } - // NOTE: The order matters here! - const browserPatterns = [ - { key: 'edge', pattern: /Edge(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, - { key: 'ie', pattern: /MSIE(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, - { key: 'ie', pattern: /Trident(?:.*rv\:(\d+)\.(\d+)(?:\.(\d+))?)?/ }, - { key: 'chrome', pattern: /Chrome(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, - { key: 'firefox', pattern: /Firefox(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ }, - { key: 'safari', pattern: /(?:Version\W+(\d+)\.(\d+)(?:\.(\d+))?)?(?:\W+Mobile\S*)?\W+Safari/ }, - ]; - // Find the FIRST matching browser - for (const { key, pattern } of browserPatterns) { - const match = pattern.exec(navigator.userAgent); - if (match) { - const major = match[1] || 0; - const minor = match[2] || 0; - const patch = match[3] || 0; - return { browser: key, version: `${major}.${minor}.${patch}` }; - } - } - return null; -} -const normalizeArch = (arch) => { - // Node docs: - // - https://nodejs.org/api/process.html#processarch - // Deno docs: - // - https://doc.deno.land/deno/stable/~/Deno.build - if (arch === 'x32') - return 'x32'; - if (arch === 'x86_64' || arch === 'x64') - return 'x64'; - if (arch === 'arm') - return 'arm'; - if (arch === 'aarch64' || arch === 'arm64') - return 'arm64'; - if (arch) - return `other:${arch}`; - return 'unknown'; -}; -const normalizePlatform = (platform) => { - // Node platforms: - // - https://nodejs.org/api/process.html#processplatform - // Deno platforms: - // - https://doc.deno.land/deno/stable/~/Deno.build - // - https://github.com/denoland/deno/issues/14799 - platform = platform.toLowerCase(); - // NOTE: this iOS check is untested and may not work - // Node does not work natively on IOS, there is a fork at - // https://github.com/nodejs-mobile/nodejs-mobile - // however it is unknown at the time of writing how to detect if it is running - if (platform.includes('ios')) - return 'iOS'; - if (platform === 'android') - return 'Android'; - if (platform === 'darwin') - return 'MacOS'; - if (platform === 'win32') - return 'Windows'; - if (platform === 'freebsd') - return 'FreeBSD'; - if (platform === 'openbsd') - return 'OpenBSD'; - if (platform === 'linux') - return 'Linux'; - if (platform) - return `Other:${platform}`; - return 'Unknown'; -}; -let _platformHeaders; -const getPlatformHeaders = () => { - return (_platformHeaders ?? (_platformHeaders = getPlatformProperties())); -}; -const safeJSON = (text) => { - try { - return JSON.parse(text); - } - catch (err) { - return undefined; - } -}; -// https://url.spec.whatwg.org/#url-scheme-string -const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i; -const isAbsoluteURL = (url) => { - return startsWithSchemeRegexp.test(url); -}; -const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); -const validatePositiveInteger = (name, n) => { - if (typeof n !== 'number' || !Number.isInteger(n)) { - throw new error_OpenAIError(`${name} must be an integer`); - } - if (n < 0) { - throw new error_OpenAIError(`${name} must be a positive integer`); - } - return n; -}; -const castToError = (err) => { - if (err instanceof Error) - return err; - if (typeof err === 'object' && err !== null) { - try { - return new Error(JSON.stringify(err)); - } - catch { } - } - return new Error(err); -}; -const ensurePresent = (value) => { - if (value == null) - throw new OpenAIError(`Expected a value to be given but received ${value} instead.`); - return value; -}; -/** - * Read an environment variable. - * - * Trims beginning and trailing whitespace. - * - * Will return undefined if the environment variable doesn't exist or cannot be accessed. - */ -const readEnv = (env) => { - if (typeof process !== 'undefined') { - return process.env?.[env]?.trim() ?? undefined; - } - if (typeof Deno !== 'undefined') { - return Deno.env?.get?.(env)?.trim(); - } - return undefined; -}; -const coerceInteger = (value) => { - if (typeof value === 'number') - return Math.round(value); - if (typeof value === 'string') - return parseInt(value, 10); - throw new OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`); -}; -const coerceFloat = (value) => { - if (typeof value === 'number') - return value; - if (typeof value === 'string') - return parseFloat(value); - throw new OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`); -}; -const coerceBoolean = (value) => { - if (typeof value === 'boolean') - return value; - if (typeof value === 'string') - return value === 'true'; - return Boolean(value); -}; -const maybeCoerceInteger = (value) => { - if (value === undefined) { - return undefined; - } - return coerceInteger(value); -}; -const maybeCoerceFloat = (value) => { - if (value === undefined) { - return undefined; - } - return coerceFloat(value); -}; -const maybeCoerceBoolean = (value) => { - if (value === undefined) { - return undefined; - } - return coerceBoolean(value); -}; -// https://stackoverflow.com/a/34491287 -function isEmptyObj(obj) { - if (!obj) - return true; - for (const _k in obj) - return false; - return true; -} -// https://eslint.org/docs/latest/rules/no-prototype-builtins -function hasOwn(obj, key) { - return Object.prototype.hasOwnProperty.call(obj, key); -} -/** - * Copies headers from "newHeaders" onto "targetHeaders", - * using lower-case for all properties, - * ignoring any keys with undefined values, - * and deleting any keys with null values. - */ -function applyHeadersMut(targetHeaders, newHeaders) { - for (const k in newHeaders) { - if (!hasOwn(newHeaders, k)) - continue; - const lowerKey = k.toLowerCase(); - if (!lowerKey) - continue; - const val = newHeaders[k]; - if (val === null) { - delete targetHeaders[lowerKey]; - } - else if (val !== undefined) { - targetHeaders[lowerKey] = val; - } - } -} -const SENSITIVE_HEADERS = new Set(['authorization', 'api-key']); -function debug(action, ...args) { - if (typeof process !== 'undefined' && process?.env?.['DEBUG'] === 'true') { - const modifiedArgs = args.map((arg) => { - if (!arg) { - return arg; - } - // Check for sensitive headers in request body 'headers' object - if (arg['headers']) { - // clone so we don't mutate - const modifiedArg = { ...arg, headers: { ...arg['headers'] } }; - for (const header in arg['headers']) { - if (SENSITIVE_HEADERS.has(header.toLowerCase())) { - modifiedArg['headers'][header] = 'REDACTED'; - } - } - return modifiedArg; - } - let modifiedArg = null; - // Check for sensitive headers in headers object - for (const header in arg) { - if (SENSITIVE_HEADERS.has(header.toLowerCase())) { - // avoid making a copy until we need to - modifiedArg ?? (modifiedArg = { ...arg }); - modifiedArg[header] = 'REDACTED'; - } - } - return modifiedArg ?? arg; - }); - console.log(`OpenAI:DEBUG:${action}`, ...modifiedArgs); - } -} -/** - * https://stackoverflow.com/a/2117523 - */ -const uuid4 = () => { - return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => { - const r = (Math.random() * 16) | 0; - const v = c === 'x' ? r : (r & 0x3) | 0x8; - return v.toString(16); - }); -}; -const isRunningInBrowser = () => { - return ( - // @ts-ignore - typeof window !== 'undefined' && - // @ts-ignore - typeof window.document !== 'undefined' && - // @ts-ignore - typeof navigator !== 'undefined'); -}; -const isHeadersProtocol = (headers) => { - return typeof headers?.get === 'function'; -}; -const getRequiredHeader = (headers, header) => { - const foundHeader = getHeader(headers, header); - if (foundHeader === undefined) { - throw new Error(`Could not find ${header} header`); - } - return foundHeader; -}; -const getHeader = (headers, header) => { - const lowerCasedHeader = header.toLowerCase(); - if (isHeadersProtocol(headers)) { - // to deal with the case where the header looks like Stainless-Event-Id - const intercapsHeader = header[0]?.toUpperCase() + - header.substring(1).replace(/([^\w])(\w)/g, (_m, g1, g2) => g1 + g2.toUpperCase()); - for (const key of [header, lowerCasedHeader, header.toUpperCase(), intercapsHeader]) { - const value = headers.get(key); - if (value) { - return value; - } - } - } - for (const [key, value] of Object.entries(headers)) { - if (key.toLowerCase() === lowerCasedHeader) { - if (Array.isArray(value)) { - if (value.length <= 1) - return value[0]; - console.warn(`Received ${value.length} entries for the ${header} header, using the first entry.`); - return value[0]; - } - return value; - } - } - return undefined; -}; -/** - * Encodes a string to Base64 format. - */ -const toBase64 = (str) => { - if (!str) - return ''; - if (typeof Buffer !== 'undefined') { - return Buffer.from(str).toString('base64'); - } - if (typeof btoa !== 'undefined') { - return btoa(str); - } - throw new OpenAIError('Cannot generate b64 string; Expected `Buffer` or `btoa` to be defined'); -}; -function isObj(obj) { - return obj != null && typeof obj === 'object' && !Array.isArray(obj); -} -//# sourceMappingURL=core.mjs.map ;// ./node_modules/openai/resource.mjs // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. class APIResource { @@ -172420,12 +174060,44 @@ Chat.ChatCompletionsPage = ChatCompletionsPage; ;// ./node_modules/openai/resources/embeddings.mjs // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + class Embeddings extends APIResource { /** * Creates an embedding vector representing the input text. */ create(body, options) { - return this._client.post('/embeddings', { body, ...options }); + const hasUserProvidedEncodingFormat = !!body.encoding_format; + // No encoding_format specified, defaulting to base64 for performance reasons + // See https://github.com/openai/openai-node/pull/1312 + let encoding_format = hasUserProvidedEncodingFormat ? body.encoding_format : 'base64'; + if (hasUserProvidedEncodingFormat) { + debug('Request', 'User defined encoding_format:', body.encoding_format); + } + const response = this._client.post('/embeddings', { + body: { + ...body, + encoding_format: encoding_format, + }, + ...options, + }); + // if the user specified an encoding_format, return the response as-is + if (hasUserProvidedEncodingFormat) { + return response; + } + // in this stage, we are sure the user did not specify an encoding_format + // and we defaulted to base64 for performance reasons + // we are sure then that the response is base64 encoded, let's decode it + // the returned result will be a float32 array since this is OpenAI API's default encoding + debug('response', 'Decoding base64 embeddings to float32 array'); + return response._thenUnwrap((response) => { + if (response && response.data) { + response.data.forEach((embeddingBase64Obj) => { + const embeddingBase64Str = embeddingBase64Obj.embedding; + embeddingBase64Obj.embedding = toFloat32Array(embeddingBase64Str); + }); + } + return response; + }); } } //# sourceMappingURL=embeddings.mjs.map @@ -172571,7 +174243,12 @@ class Speech extends APIResource { class Transcriptions extends APIResource { create(body, options) { - return this._client.post('/audio/transcriptions', multipartFormRequestOptions({ body, ...options, __metadata: { model: body.model } })); + return this._client.post('/audio/transcriptions', multipartFormRequestOptions({ + body, + ...options, + stream: body.stream ?? false, + __metadata: { model: body.model }, + })); } } //# sourceMappingURL=transcriptions.mjs.map @@ -173425,154 +175102,6 @@ _EventStream_connectedPromise = new WeakMap(), _EventStream_resolveConnectedProm return this._emit('error', new error_OpenAIError(String(error))); }; //# sourceMappingURL=EventStream.mjs.map -;// ./node_modules/openai/lib/parser.mjs - -function makeParseableResponseFormat(response_format, parser) { - const obj = { ...response_format }; - Object.defineProperties(obj, { - $brand: { - value: 'auto-parseable-response-format', - enumerable: false, - }, - $parseRaw: { - value: parser, - enumerable: false, - }, - }); - return obj; -} -function makeParseableTextFormat(response_format, parser) { - const obj = { ...response_format }; - Object.defineProperties(obj, { - $brand: { - value: 'auto-parseable-response-format', - enumerable: false, - }, - $parseRaw: { - value: parser, - enumerable: false, - }, - }); - return obj; -} -function isAutoParsableResponseFormat(response_format) { - return response_format?.['$brand'] === 'auto-parseable-response-format'; -} -function makeParseableTool(tool, { parser, callback, }) { - const obj = { ...tool }; - Object.defineProperties(obj, { - $brand: { - value: 'auto-parseable-tool', - enumerable: false, - }, - $parseRaw: { - value: parser, - enumerable: false, - }, - $callback: { - value: callback, - enumerable: false, - }, - }); - return obj; -} -function isAutoParsableTool(tool) { - return tool?.['$brand'] === 'auto-parseable-tool'; -} -function maybeParseChatCompletion(completion, params) { - if (!params || !hasAutoParseableInput(params)) { - return { - ...completion, - choices: completion.choices.map((choice) => ({ - ...choice, - message: { - ...choice.message, - parsed: null, - ...(choice.message.tool_calls ? - { - tool_calls: choice.message.tool_calls, - } - : undefined), - }, - })), - }; - } - return parseChatCompletion(completion, params); -} -function parseChatCompletion(completion, params) { - const choices = completion.choices.map((choice) => { - if (choice.finish_reason === 'length') { - throw new LengthFinishReasonError(); - } - if (choice.finish_reason === 'content_filter') { - throw new ContentFilterFinishReasonError(); - } - return { - ...choice, - message: { - ...choice.message, - ...(choice.message.tool_calls ? - { - tool_calls: choice.message.tool_calls?.map((toolCall) => parseToolCall(params, toolCall)) ?? undefined, - } - : undefined), - parsed: choice.message.content && !choice.message.refusal ? - parseResponseFormat(params, choice.message.content) - : null, - }, - }; - }); - return { ...completion, choices }; -} -function parseResponseFormat(params, content) { - if (params.response_format?.type !== 'json_schema') { - return null; - } - if (params.response_format?.type === 'json_schema') { - if ('$parseRaw' in params.response_format) { - const response_format = params.response_format; - return response_format.$parseRaw(content); - } - return JSON.parse(content); - } - return null; -} -function parseToolCall(params, toolCall) { - const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name); - return { - ...toolCall, - function: { - ...toolCall.function, - parsed_arguments: isAutoParsableTool(inputTool) ? inputTool.$parseRaw(toolCall.function.arguments) - : inputTool?.function.strict ? JSON.parse(toolCall.function.arguments) - : null, - }, - }; -} -function shouldParseToolCall(params, toolCall) { - if (!params) { - return false; - } - const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name); - return isAutoParsableTool(inputTool) || inputTool?.function.strict || false; -} -function hasAutoParseableInput(params) { - if (isAutoParsableResponseFormat(params.response_format)) { - return true; - } - return (params.tools?.some((t) => isAutoParsableTool(t) || (t.type === 'function' && t.function.strict === true)) ?? false); -} -function validateInputTools(tools) { - for (const tool of tools ?? []) { - if (tool.type !== 'function') { - throw new error_OpenAIError(`Currently only \`function\` tool types support auto-parsing; Received \`${tool.type}\``); - } - if (tool.function.strict !== true) { - throw new error_OpenAIError(`The \`${tool.function.name}\` tool is not marked with \`strict: true\`. Only strict function tools can be auto-parsed`); - } - } -} -//# sourceMappingURL=parser.mjs.map ;// ./node_modules/openai/lib/AbstractChatCompletionRunner.mjs var AbstractChatCompletionRunner_classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); @@ -174826,18 +176355,44 @@ class Sessions extends APIResource { } } //# sourceMappingURL=sessions.mjs.map +;// ./node_modules/openai/resources/beta/realtime/transcription-sessions.mjs +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +class TranscriptionSessions extends APIResource { + /** + * Create an ephemeral API token for use in client-side applications with the + * Realtime API specifically for realtime transcriptions. Can be configured with + * the same session parameters as the `transcription_session.update` client event. + * + * It responds with a session object, plus a `client_secret` key which contains a + * usable ephemeral API token that can be used to authenticate browser clients for + * the Realtime API. + */ + create(body, options) { + return this._client.post('/realtime/transcription_sessions', { + body, + ...options, + headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers }, + }); + } +} +//# sourceMappingURL=transcription-sessions.mjs.map ;// ./node_modules/openai/resources/beta/realtime/realtime.mjs // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + + class Realtime extends APIResource { constructor() { super(...arguments); this.sessions = new Sessions(this._client); + this.transcriptionSessions = new TranscriptionSessions(this._client); } } Realtime.Sessions = Sessions; +Realtime.TranscriptionSessions = TranscriptionSessions; //# sourceMappingURL=realtime.mjs.map ;// ./node_modules/openai/lib/AssistantStream.mjs var AssistantStream_classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) { @@ -175974,7 +177529,7 @@ function ResponsesParser_hasAutoParseableInput(params) { } return false; } -function makeParseableResponseTool(tool, { parser, callback, }) { +function ResponsesParser_makeParseableResponseTool(tool, { parser, callback, }) { const obj = { ...tool }; Object.defineProperties(obj, { $brand: { @@ -176646,9 +178201,6 @@ const loadConfig = (options) => { -/** - * Default base URLs for different routers - */ const ROUTER_BASE_URLS = { openrouter: 'https://openrouter.ai/api/v1', openai: '', // OpenAI uses default URL @@ -176659,9 +178211,6 @@ const ROUTER_BASE_URLS = { gemini: 'https://generativelanguage.googleapis.com/v1beta', // Gemini API base URL xai: 'https://api.x.ai/v1', // XAI (Grok) API base URL }; -/** - * Default models for different routers - */ const DEFAULT_MODELS = { openrouter: 'anthropic/claude-3.5-sonnet', openai: 'gpt-4o', @@ -176678,18 +178227,14 @@ const DEFAULT_MODELS = { * @returns OpenAI client instance or undefined if configuration is invalid */ const createClient = (options) => { - // Load configuration from file const config = loadConfig(options); if (!config) { dist_in_logger.error("Config not found in $HOME/.osr/config.json. " + "Optionally, export OSR_CONFIG with the path to the configuration file."); return undefined; } - // Determine router to use (defaults to 'openrouter') const router = (options.router ?? 'openrouter'); - // Initialize API key let apiKey = options.api_key; - // Set API key based on router if not provided in options if (!apiKey) { switch (router) { case 'openrouter': @@ -176719,25 +178264,20 @@ const createClient = (options) => { break; } } - // Validate API key if (!apiKey) { dist_in_logger.error(`No ${router} key found. Please provide an "api_key", set it in the config, or pass it via JSON config.`); return undefined; } - // Set default baseURL if not provided const baseURL = options.baseURL ?? ROUTER_BASE_URLS[router]; - // Set default model if not provided if (!options.model) { options.model = DEFAULT_MODELS[router]; } - dist_in_logger.info(`Creating client with ${router} router, model ${options.model}, and API key ${apiKey} at ${baseURL}`); - // Create and return the OpenAI client instance return new node_modules_openai({ apiKey, baseURL, }); }; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY2xpZW50LmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL2NsaWVudC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFRLE1BQU0sTUFBTyxRQUFRLENBQUE7QUFDN0IsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLFlBQVksQ0FBQTtBQUNuQyxPQUFPLEVBQUUsVUFBVSxFQUFFLE1BQU0sYUFBYSxDQUFBO0FBT3hDOztHQUVHO0FBQ0gsTUFBTSxnQkFBZ0IsR0FBK0I7SUFDakQsVUFBVSxFQUFFLDhCQUE4QjtJQUMxQyxNQUFNLEVBQUUsRUFBRSxFQUFFLDBCQUEwQjtJQUN0QyxRQUFRLEVBQUUsMEJBQTBCO0lBQ3BDLFdBQVcsRUFBRSxzQ0FBc0M7SUFDbkQsTUFBTSxFQUFFLHdCQUF3QixFQUFFLHNDQUFzQztJQUN4RSxTQUFTLEVBQUUsNkJBQTZCLEVBQUUseUJBQXlCO0lBQ25FLE1BQU0sRUFBRSxrREFBa0QsRUFBRSxzQkFBc0I7SUFDbEYsR0FBRyxFQUFFLHFCQUFxQixFQUFFLDBCQUEwQjtDQUN6RCxDQUFBO0FBRUQ7O0dBRUc7QUFDSCxNQUFNLGNBQWMsR0FBK0I7SUFDL0MsVUFBVSxFQUFFLDZCQUE2QjtJQUN6QyxNQUFNLEVBQUUsUUFBUTtJQUNoQixRQUFRLEVBQUUsZUFBZTtJQUN6QixXQUFXLEVBQUUsY0FBYztJQUMzQixNQUFNLEVBQUUsU0FBUyxFQUFFLHVCQUF1QjtJQUMxQyxTQUFTLEVBQUUsbUJBQW1CLEVBQUUsMEJBQTBCO0lBQzFELE1BQU0sRUFBRSxnQkFBZ0IsRUFBRSx1QkFBdUI7SUFDakQsR0FBRyxFQUFFLFFBQVEsQ0FBQywyQkFBMkI7Q0FDNUMsQ0FBQTtBQUNEOzs7O0dBSUc7QUFDSCxNQUFNLENBQUMsTUFBTSxZQUFZLEdBQUcsQ0FBQyxPQUFxQixFQUFFLEVBQUU7SUFDbEQsK0JBQStCO0lBQy9CLE1BQU0sTUFBTSxHQUFHLFVBQVUsQ0FBQyxPQUFPLENBQUMsQ0FBQztJQUNuQyxJQUFJLENBQUMsTUFBTSxFQUFFLENBQUM7UUFDVixNQUFNLENBQUMsS0FBSyxDQUNSLDhDQUE4QztZQUM5Qyx3RUFBd0UsQ0FDM0UsQ0FBQztRQUNGLE9BQU8sU0FBUyxDQUFDO0lBQ3JCLENBQUM7SUFDRCxxREFBcUQ7SUFDckQsTUFBTSxNQUFNLEdBQWUsQ0FBQyxPQUFPLENBQUMsTUFBTSxJQUFJLFlBQVksQ0FBZSxDQUFDO0lBRTFFLHFCQUFxQjtJQUNyQixJQUFJLE1BQU0sR0FBRyxPQUFPLENBQUMsT0FBTyxDQUFDO0lBRTdCLHlEQUF5RDtJQUN6RCxJQUFJLENBQUMsTUFBTSxFQUFFLENBQUM7UUFDVixRQUFRLE1BQU0sRUFBRSxDQUFDO1lBQ2IsS0FBSyxZQUFZO2dCQUNiLE1BQU0sR0FBRyxNQUFNLEVBQUUsVUFBVSxFQUFFLEdBQUcsQ0FBQztnQkFDakMsTUFBTTtZQUNWLEtBQUssUUFBUTtnQkFDVCxNQUFNLEdBQUcsTUFBTSxFQUFFLE1BQU0sRUFBRSxHQUFHLENBQUM7Z0JBQzdCLE1BQU07WUFDVixLQUFLLFVBQVU7Z0JBQ1gsTUFBTSxHQUFHLE1BQU0sRUFBRSxRQUFRLEVBQUUsR0FBRyxDQUFDO2dCQUMvQixNQUFNO1lBQ1YsS0FBSyxhQUFhO2dCQUNkLE1BQU0sR0FBRyxNQUFNLEVBQUUsV0FBVyxFQUFFLEdBQUcsQ0FBQztnQkFDbEMsTUFBTTtZQUNWLEtBQUssUUFBUTtnQkFDVCx5REFBeUQ7Z0JBQ3pELE1BQU0sR0FBRyxRQUFRLENBQUMsQ0FBQyx1QkFBdUI7Z0JBQzFDLE1BQU07WUFDVixLQUFLLFdBQVc7Z0JBQ1osTUFBTSxHQUFHLE1BQU0sRUFBRSxTQUFTLEVBQUUsR0FBRyxDQUFDO2dCQUNoQyxNQUFNO1lBQ1YsS0FBSyxRQUFRO2dCQUNULE1BQU0sR0FBRyxNQUFNLEVBQUUsTUFBTSxFQUFFLEdBQUcsQ0FBQztnQkFDN0IsTUFBTTtZQUNWLEtBQUssS0FBSztnQkFDTixNQUFNLEdBQUcsTUFBTSxFQUFFLEdBQUcsRUFBRSxHQUFHLENBQUM7Z0JBQzFCLE1BQU07UUFDZCxDQUFDO0lBQ0wsQ0FBQztJQUVELG1CQUFtQjtJQUNuQixJQUFJLENBQUMsTUFBTSxFQUFHLENBQUM7UUFDWCxNQUFNLENBQUMsS0FBSyxDQUFDLE1BQU0sTUFBTSw0RkFBNEYsQ0FBQyxDQUFDO1FBQ3ZILE9BQU8sU0FBUyxDQUFDO0lBQ3JCLENBQUM7SUFFRCxzQ0FBc0M7SUFDdEMsTUFBTSxPQUFPLEdBQUcsT0FBTyxDQUFDLE9BQU8sSUFBSSxnQkFBZ0IsQ0FBQyxNQUFNLENBQUMsQ0FBQTtJQUUzRCxvQ0FBb0M7SUFDcEMsSUFBSSxDQUFDLE9BQU8sQ0FBQyxLQUFLLEVBQUUsQ0FBQztRQUNqQixPQUFPLENBQUMsS0FBSyxHQUFHLGNBQWMsQ0FBQyxNQUFNLENBQUMsQ0FBQTtJQUMxQyxDQUFDO0lBRUQsTUFBTSxDQUFDLElBQUksQ0FBQyx3QkFBd0IsTUFBTSxrQkFBa0IsT0FBTyxDQUFDLEtBQUssaUJBQWlCLE1BQU0sT0FBTyxPQUFPLEVBQUUsQ0FBQyxDQUFBO0lBRWpILCtDQUErQztJQUMvQyxPQUFPLElBQUksTUFBTSxDQUFDO1FBQ2QsTUFBTTtRQUNOLE9BQU87S0FDVixDQUFDLENBQUE7QUFDTixDQUFDLENBQUEifQ== +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY2xpZW50LmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL2NsaWVudC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFRLE1BQU0sTUFBTyxRQUFRLENBQUE7QUFDN0IsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLFlBQVksQ0FBQTtBQUNuQyxPQUFPLEVBQUUsVUFBVSxFQUFFLE1BQU0sYUFBYSxDQUFBO0FBS3hDLE1BQU0sZ0JBQWdCLEdBQStCO0lBQ2pELFVBQVUsRUFBRSw4QkFBOEI7SUFDMUMsTUFBTSxFQUFFLEVBQUUsRUFBRSwwQkFBMEI7SUFDdEMsUUFBUSxFQUFFLDBCQUEwQjtJQUNwQyxXQUFXLEVBQUUsc0NBQXNDO0lBQ25ELE1BQU0sRUFBRSx3QkFBd0IsRUFBRSxzQ0FBc0M7SUFDeEUsU0FBUyxFQUFFLDZCQUE2QixFQUFFLHlCQUF5QjtJQUNuRSxNQUFNLEVBQUUsa0RBQWtELEVBQUUsc0JBQXNCO0lBQ2xGLEdBQUcsRUFBRSxxQkFBcUIsRUFBRSwwQkFBMEI7Q0FDekQsQ0FBQTtBQUVELE1BQU0sY0FBYyxHQUErQjtJQUMvQyxVQUFVLEVBQUUsNkJBQTZCO0lBQ3pDLE1BQU0sRUFBRSxRQUFRO0lBQ2hCLFFBQVEsRUFBRSxlQUFlO0lBQ3pCLFdBQVcsRUFBRSxjQUFjO0lBQzNCLE1BQU0sRUFBRSxTQUFTLEVBQUUsdUJBQXVCO0lBQzFDLFNBQVMsRUFBRSxtQkFBbUIsRUFBRSwwQkFBMEI7SUFDMUQsTUFBTSxFQUFFLGdCQUFnQixFQUFFLHVCQUF1QjtJQUNqRCxHQUFHLEVBQUUsUUFBUSxDQUFDLDJCQUEyQjtDQUM1QyxDQUFBO0FBQ0Q7Ozs7R0FJRztBQUNILE1BQU0sQ0FBQyxNQUFNLFlBQVksR0FBRyxDQUFDLE9BQXFCLEVBQUUsRUFBRTtJQUNsRCxNQUFNLE1BQU0sR0FBRyxVQUFVLENBQUMsT0FBTyxDQUFDLENBQUM7SUFDbkMsSUFBSSxDQUFDLE1BQU0sRUFBRSxDQUFDO1FBQ1YsTUFBTSxDQUFDLEtBQUssQ0FDUiw4Q0FBOEM7WUFDOUMsd0VBQXdFLENBQzNFLENBQUM7UUFDRixPQUFPLFNBQVMsQ0FBQztJQUNyQixDQUFDO0lBQ0QsTUFBTSxNQUFNLEdBQWUsQ0FBQyxPQUFPLENBQUMsTUFBTSxJQUFJLFlBQVksQ0FBZSxDQUFDO0lBRTFFLElBQUksTUFBTSxHQUFHLE9BQU8sQ0FBQyxPQUFPLENBQUM7SUFFN0IsSUFBSSxDQUFDLE1BQU0sRUFBRSxDQUFDO1FBQ1YsUUFBUSxNQUFNLEVBQUUsQ0FBQztZQUNiLEtBQUssWUFBWTtnQkFDYixNQUFNLEdBQUcsTUFBTSxFQUFFLFVBQVUsRUFBRSxHQUFHLENBQUM7Z0JBQ2pDLE1BQU07WUFDVixLQUFLLFFBQVE7Z0JBQ1QsTUFBTSxHQUFHLE1BQU0sRUFBRSxNQUFNLEVBQUUsR0FBRyxDQUFDO2dCQUM3QixNQUFNO1lBQ1YsS0FBSyxVQUFVO2dCQUNYLE1BQU0sR0FBRyxNQUFNLEVBQUUsUUFBUSxFQUFFLEdBQUcsQ0FBQztnQkFDL0IsTUFBTTtZQUNWLEtBQUssYUFBYTtnQkFDZCxNQUFNLEdBQUcsTUFBTSxFQUFFLFdBQVcsRUFBRSxHQUFHLENBQUM7Z0JBQ2xDLE1BQU07WUFDVixLQUFLLFFBQVE7Z0JBQ1QseURBQXlEO2dCQUN6RCxNQUFNLEdBQUcsUUFBUSxDQUFDLENBQUMsdUJBQXVCO2dCQUMxQyxNQUFNO1lBQ1YsS0FBSyxXQUFXO2dCQUNaLE1BQU0sR0FBRyxNQUFNLEVBQUUsU0FBUyxFQUFFLEdBQUcsQ0FBQztnQkFDaEMsTUFBTTtZQUNWLEtBQUssUUFBUTtnQkFDVCxNQUFNLEdBQUcsTUFBTSxFQUFFLE1BQU0sRUFBRSxHQUFHLENBQUM7Z0JBQzdCLE1BQU07WUFDVixLQUFLLEtBQUs7Z0JBQ04sTUFBTSxHQUFHLE1BQU0sRUFBRSxHQUFHLEVBQUUsR0FBRyxDQUFDO2dCQUMxQixNQUFNO1FBQ2QsQ0FBQztJQUNMLENBQUM7SUFFRCxJQUFJLENBQUMsTUFBTSxFQUFHLENBQUM7UUFDWCxNQUFNLENBQUMsS0FBSyxDQUFDLE1BQU0sTUFBTSw0RkFBNEYsQ0FBQyxDQUFDO1FBQ3ZILE9BQU8sU0FBUyxDQUFDO0lBQ3JCLENBQUM7SUFFRCxNQUFNLE9BQU8sR0FBRyxPQUFPLENBQUMsT0FBTyxJQUFJLGdCQUFnQixDQUFDLE1BQU0sQ0FBQyxDQUFBO0lBRTNELElBQUksQ0FBQyxPQUFPLENBQUMsS0FBSyxFQUFFLENBQUM7UUFDakIsT0FBTyxDQUFDLEtBQUssR0FBRyxjQUFjLENBQUMsTUFBTSxDQUFDLENBQUE7SUFDMUMsQ0FBQztJQUNELE9BQU8sSUFBSSxNQUFNLENBQUM7UUFDZCxNQUFNO1FBQ04sT0FBTztLQUNWLENBQUMsQ0FBQTtBQUNOLENBQUMsQ0FBQSJ9 // EXTERNAL MODULE: ./node_modules/mime-types/index.js var mime_types = __webpack_require__(86049); ;// ./dist-in/mime-handlers.js @@ -183459,7 +184999,7 @@ function copy_async(from, to, options) { }).catch(reject); }); } - +//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"copy.js","sourceRoot":"","sources":["../src/copy.ts"],"names":[],"mappings":"AAAA,OAAO,KAAM,QAAQ,MAAM,MAAM,CAAC;AAClC,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,OAAO,EAAE,WAAW,EAAE,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,IAAI,CAAC;AACpF,OAAO,EAAE,IAAI,IAAI,MAAM,EAAE,MAAM,QAAQ,CAAC;AAExC,OAAO,EAAE,IAAI,IAAI,UAAU,EAAE,KAAK,IAAI,WAAW,EAAE,MAAM,aAAa,CAAC;AACvE,OAAO,EAAE,MAAM,IAAI,OAAO,EAAE,MAAM,oBAAoB,CAAC;AACvD,OAAO,EAAE,iBAAiB,IAAI,QAAQ,EAAE,MAAM,iBAAiB,CAAC;AAChE,OAAO,EAAE,IAAI,IAAI,cAAc,EAAE,MAAM,wBAAwB,CAAC;AAChE,OAAO,EAAE,gBAAgB,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AACxE,OAAO,EAAE,IAAI,IAAI,SAAS,EAAE,MAAM,YAAY,CAAC;AAC/C,OAAO,EAAE,oBAAoB,EAAE,eAAe,EAAE,MAAM,aAAa,CAAC;AAWnE,OAAQ,EACP,cAAc,EACd,SAAS,EACT,UAAU,EACV,oBAAoB,EACpB,MAAM,EACN,aAAa,EACb,YAAY,EACZ,QAAQ,EACR,MAAM,iBAAiB,CAAC;AAE1B,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,IAAI,IAAI,MAAM,EAAE,MAAM,aAAa,CAAC;AAC7C,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAC3C,OAAO,EAAE,KAAK,IAAI,aAAa,EAAE,MAAM,eAAe,CAAC;AACvD,0DAA0D;AAE1D,MAAM,eAAe,GAAG,SAAS,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC;AAC9C,MAAM,gBAAgB,GAAG,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC;AAChD,MAAM,cAAc,GAAG,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC;AAE5C,MAAM,cAAc,GAAG,SAAS,CAAwB,MAAM,CAAC,CAAC;AAEhE,MAAM,mBAAmB,GAAG,OAAO,GAAG,CAAC,CAAC,CAAC,0DAA0D;AAEnG,MAAM,UAAU,aAAa,CAAC,UAAkB,EAAE,IAAY,EAAE,EAAU,EAAE,OAAsB;IACjG,MAAM,eAAe,GAAG,UAAU,GAAG,uBAAuB,CAAC;IAC7D,gBAAgB,CAAC,eAAe,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC5D,gBAAgB,CAAC,eAAe,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxD,eAAe,CAAC,eAAe,EAAE,SAAS,EAAE,OAAO,EAAE;QACpD,SAAS,EAAE,CAAC,SAAS,CAAC;QACtB,QAAQ,EAAE,CAAC,QAAQ,EAAE,iBAAiB,CAAC;QACvC,QAAQ,EAAE,CAAC,UAAU,CAAC;QACtB,OAAO,EAAE,CAAC,UAAU,CAAC;QACrB,aAAa,EAAE,CAAC,UAAU,CAAC;QAC3B,gBAAgB,EAAE,CAAC,UAAU,CAAC;QAC9B,gBAAgB,EAAE,CAAC,QAAQ,CAAC;QAC5B,QAAQ,EAAE,CAAC,QAAQ,CAAC;QACpB,KAAK,EAAE,CAAC,SAAS,CAAC;QAClB,KAAK,EAAE,CAAC,QAAQ,CAAC;KACjB,CAAC,CAAC;AACJ,CAAC;AAED,MAAM,YAAY,GAAG,CAAC,OAAmB,EAAE,IAAY,EAAgB,EAAE;IACxE,MAAM,IAAI,GAAiB,OAAO,IAAI,EAAkB,CAAC;IACzD,MAAM,aAAa,GAAiB,EAAE,CAAC;IACvC,aAAa,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC;IACzC,aAAa,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IACvC,aAAa,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;IACjD,aAAa,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;IACrC,aAAa,CAAC,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,CAAC;IACvD,aAAa,CAAC,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,CAAC;IACvD,aAAa,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;IACjC,aAAa,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IACvC,aAAa,CAAC,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC;IACnD,aAAa,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,CAAC,CAAC;IACtC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;QAClB,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YACnB,aAAa,CAAC,MAAM,GAAG,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;QACrD,CAAC;aAAM,CAAC;YACP,aAAa,CAAC,MAAM,GAAG,GAAG,EAAE;gBAC3B,OAAO,IAAI,CAAC;YACb,CAAC,CAAC;QACH,CAAC;IACF,CAAC;IACD,OAAO,aAAa,CAAC;AACtB,CAAC,CAAC;AACF,4DAA4D;AAC5D,OAAO;AACP,4DAA4D;AAC5D,MAAM,uBAAuB,GAAG,CAAC,IAAY,EAAE,EAAU,EAAE,UAAwB,EAAE,EAAE,EAAE;IACxF,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE,CAAC;QACvB,MAAM,eAAe,CAAC,IAAI,CAAC,CAAC;IAC7B,CAAC;IAED,IAAI,UAAU,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,CAAC;QAC1C,MAAM,oBAAoB,CAAC,EAAE,CAAC,CAAC;IAChC,CAAC;AACF,CAAC,CAAC;AAEF,KAAK,UAAU,wBAAwB,CAAC,IAAY,EAAE,EAAU,EAAE,UAAwB,EAAE;IAC3F,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAC3B,IAAI,QAAQ,GAAG,KAAK,CAAC;QACrB,IAAI,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QACzB,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,MAAM,IAAI,GAAG,CAAC,GAAS,EAAE,EAAE;YAC1B,IAAI,CAAC,QAAQ,EAAE,CAAC;gBACf,QAAQ,GAAG,IAAI,CAAC;gBAChB,OAAO,CAAC,CAAC,CAAC,CAAC;YACZ,CAAC;QACF,CAAC,CAAC;QACF,MAAM,EAAE,GAAG,gBAAgB,CAAC,IAAI,CAAC;YAChC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;QAExC;;;;;;;;;;;UAWE;QAEF,MAAM,EAAE,GAAG,iBAAiB,CAAC,EAAE,CAAC,CAAC;QACjC,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;QAC1C,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACrB,wBAAwB;IACzB,CAAC,CAAC,CAAC;AACJ,CAAC;AAED,KAAK,UAAU,YAAY,CAAC,IAAY,EAAE,EAAU,EAAE,IAAY,EAAE,OAAqB;IACxF,IAAI,IAAI,GAAG,YAAY,CAAC,IAAI,CAAC,CAAC;IAC9B,MAAM,YAAY,GAAkB;QACnC,IAAI,EAAE,IAAI;KACV,CAAC;IAEF,IAAI,OAAO,CAAC,cAAc,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QAChD,IAAI,MAAM,EAAE,CAAC;YACZ,EAAE,GAAG,MAAM,CAAC;QACb,CAAC;IACF,CAAC;IAED,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;QACrB,IAAI,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;IACtD,CAAC;IAED,IAAI,OAAO,IAAI,OAAO,CAAC,aAAa,EAAE,CAAC;QACtC,MAAM,wBAAwB,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAC;IACnD,CAAC;SAAM,CAAC;QACP,SAAS,CAAC,EAAE,EAAE,IAAI,EAAE,YAAY,CAAC,CAAC;IACnC,CAAC;AACF,CAAC;AACD,MAAM,eAAe,GAAG,CAAC,IAAY,EAAE,EAAU,EAAE,EAAE;IACpD,MAAM,eAAe,GAAG,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;IAC9C,IAAI,CAAC;QACJ,WAAW,CAAC,eAAe,EAAE,EAAE,CAAC,CAAC;IAClC,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACd,wEAAwE;QACxE,kFAAkF;QAClF,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;YAClB,WAAW;YACX,EAAE,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,CAAC,CAAC;QACrC,CAAC;aAAM,CAAC;YACP,MAAM,GAAG,CAAC;QACX,CAAC;IACF,CAAC;AACF,CAAC,CAAC;AAEF,KAAK,UAAU,YAAY,CAAC,IAAY,EAAE,WAAkB,EAAE,EAAU,EAAE,OAAqB;IAC9F,MAAM,IAAI,GAAW,QAAQ,CAAC,WAAW,CAAC,IAAc,CAAC,CAAC;IAC1D,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,GAAG,EAAE,CAAC;QACxC,IAAI,OAAO,CAAC,cAAc,EAAE,CAAC;YAC5B,MAAM,MAAM,GAAG,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;YAChD,IAAI,MAAM,EAAE,CAAC;gBACZ,EAAE,GAAG,MAAM,CAAA;YACZ,CAAC;QACF,CAAC;QACD,MAAM,CAAC,EAAE,EAAE,EAAE,IAAI,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC;IACnD,CAAC;SAAM,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,IAAI,EAAE,CAAC;QAChD,MAAM,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA;IAC5C,CAAC;SAAM,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,OAAO,EAAE,CAAC;QACnD,IAAI,OAAO,CAAC,cAAc,EAAE,CAAC;YAC5B,MAAM,MAAM,GAAG,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,EAAE,CAAC,CAAA;YAC/C,IAAI,MAAM,EAAE,CAAC;gBACZ,EAAE,GAAG,MAAM,CAAA;YACZ,CAAC;QACF,CAAC;QACD,eAAe,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;IAC3B,CAAC;AACF,CAAC;AACD,MAAM,UAAU,IAAI,CAAC,IAAY,EAAE,EAAU,EAAE,OAAsB;IACpE,MAAM,IAAI,GAAG,YAAY,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;IACxC,uBAAuB,CAAC,IAAI,EAAE,EAAE,EAAE,IAAI,CAAC,CAAA;IACvC,MAAM,KAAK,GAAsB,EAAE,CAAA;IACnC,IAAI,SAAS,GAAG,CAAC,CAAA;IACjB,IAAI,OAAO,IAAI,OAAO,CAAC,KAAK,GAAG,UAAU,CAAC,KAAK,EAAE,CAAC;QACjD,MAAM,OAAO,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAA;QAC/B,IAAI,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;YAC3B,MAAM,CAAC,EAAE,CAAC,CAAA;QACX,CAAC;IACF,CAAC;IAED,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,WAAkB,EAAE,EAAE;QACpD,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC;YACvB,KAAK,CAAC,IAAI,CAAC;gBACV,IAAI,EAAE,IAAI;gBACV,IAAI,EAAE,WAAW;gBACjB,GAAG,EAAE,QAAQ,CAAC,OAAO,CAAC,EAAE,EAAE,QAAQ,CAAC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;aACxD,CAAC,CAAC;YACH,SAAS,IAAI,WAAW,CAAC,IAAI,CAAC;QAC/B,CAAC;IACF,CAAC,CAAC;IAEF,cAAc,CAAC,IAAI,EAAE;QACpB,cAAc,EAAE;YACf,IAAI,EAAE,IAAI;YACV,QAAQ,EAAE,IAAI;SACd;KACD,EAAE,OAAO,CAAC,CAAC;IAEZ,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,OAAO,EAAE,EAAE;QAC3B,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,EAAE,OAAO,CAAC,CAAA;QACrD,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YACnB,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,CAAC,CAAA;QACrE,CAAC;IACF,CAAC,CAAC,CAAA;AACH,CAAC;AAED,4DAA4D;AAC5D,QAAQ;AACR,4DAA4D;AAE5D;;;;;;;GAOG;AACH,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,EAAU,EAAE,IAAkB,EAAoC,EAAE;IACrG,OAAO,WAAW,CAAC,IAAI,CAAC;SACtB,IAAI,CAAC,aAAa,CAAC,EAAE;QACrB,IAAI,CAAC,aAAa,EAAE,CAAC;YACpB,MAAM,eAAe,CAAC,IAAI,CAAC,CAAC;QAC7B,CAAC;aAAM,CAAC;YACP,OAAO,WAAW,CAAC,EAAE,CAAC,CAAC;QACxB,CAAC;IACF,CAAC,CAAC;SACD,IAAI,CAAC,cAAc,CAAC,EAAE;QACtB,IAAI,cAAc,EAAE,CAAC;YACpB,IAAI,IAAI,CAAC,gBAAgB,EAAE,CAAC;gBAC3B,OAAO,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;YAC/C,CAAC;YACD,IAAI,IAAI,CAAC,gBAAgB,EAAE,CAAC;gBAC3B,MAAM,OAAO,GAAG,IAAI,CAAC,gBAAgB,CAAC,EAAE,EAAE,UAAU,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC;gBACzE,OAAO,CAAC,IAAI,CAAC,CAAC,QAA2B,EAAE,EAAE;oBAC5C,QAAQ,CAAC,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC;gBAChC,CAAC,CAAC,CAAC;gBACH,OAAO,OAAO,CAAC;YAChB,CAAC;YACD,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC;gBACrB,MAAM,oBAAoB,CAAC,EAAE,CAAC,CAAC;YAChC,CAAC;QACF,CAAC;IACF,CAAC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF,MAAM,aAAa,GAAG,CAAC,IAAY,EAAE,EAAU,EAAE,IAAS,EAAE,OAAsB,EAAE,cAAwB,EAAE,EAAE;IAC/G,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,MAAM,UAAU,GAAG,EAAE,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC;QAC7C,MAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,CAAC,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,UAAU,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC/B,WAAW,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAmB,EAAE,EAAE;YAC/C,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;YACvC,yDAAyD;YACzD,oCAAoC;YACpC,UAAU,CAAC,MAAM,EAAE,CAAC;YACpB,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,CAAC,QAAQ,IAAI,cAAc,KAAK,SAAS,EAAE,CAAC;gBAClE,4DAA4D;gBAC5D,cAAc,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE;oBACzC,gEAAgE;oBAChE,uEAAuE;oBACvE,iCAAiC;oBACjC,aAAa,CAAC,IAAI,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC;yBACvC,IAAI,CAAC,OAAO,CAAC;yBACb,KAAK,CAAC,MAAM,CAAC,CAAC;gBACjB,CAAC,CAAC,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACP,MAAM,CAAC,GAAG,CAAC,CAAC;YACb,CAAC;QACF,CAAC,CAAC,CAAC;QAEH,WAAW,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,EAAE;YAC7B,0BAA0B;YAC1B,IAAI,OAAO,IAAI,OAAO,CAAC,KAAK,GAAG,UAAU,CAAC,cAAc,EAAE,CAAC;gBAC1D,MAAM,UAAU,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;gBACrC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,GAAG,EAAE,CAAC,GAAmB,EAAE,EAAU,EAAE,EAAE;oBACpD,IAAI,GAAG,EAAE,CAAC;wBACT,MAAM,GAAG,CAAC;oBACX,CAAC;oBACD,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,UAAU,CAAC,KAAK,EAAE,UAAU,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE,EAAE;wBAC3D,IAAI,IAAI,EAAE,CAAC;4BACV,MAAM,IAAI,CAAC;wBACZ,CAAC;wBACD,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,IAAI,CAAC,CAAC;wBACnB,OAAO,CAAC,CAAC,CAAC,CAAC;oBACZ,CAAC,CAAC,CAAC;gBACJ,CAAC,CAAC,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACP,OAAO,CAAC,CAAC,CAAC,CAAC;YACZ,CAAC;QACF,CAAC,CAAC,CAAC;QAEH,MAAM,IAAI,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC;QACpC,IAAI,cAAc,GAAG,IAAI,CAAC;QAC1B,IAAI,OAAO,IAAI,OAAO,CAAC,aAAa,IAAI,IAAI,GAAG,mBAAmB,EAAE,CAAC;YACpE;;;;iBAIK;YACL,IAAI,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;YACzB,IAAI,KAAK,GAAG,CAAC,CAAC;YACd,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;YAC3B,cAAc,CAAC,EAAE,CAAC,UAAU,EAAE,CAAC,CAAM,EAAE,EAAE;gBACxC,OAAO,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,OAAO,CAAC,GAAG,IAAI,CAAC;gBACxC,KAAK,GAAG,CAAC,CAAC,WAAW,GAAG,OAAO,CAAC;gBAChC,OAAO,CAAC,aAAa,CAAC,IAAI,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC;gBACrD,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;oBACnB,OAAO,CAAC,GAAG,CAAC,QAAQ,GAAG,IAAI,GAAG,IAAI,GAAG,CAAC,CAAC,WAAW,GAAG,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC;gBACzE,CAAC;YACF,CAAC,CAAC,CAAC;YACH,UAAU,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QACnD,CAAC;aAAM,CAAC;YACP,IAAI,OAAO,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;gBAC9B,OAAO,CAAC,GAAG,CAAC,QAAQ,GAAG,IAAI,GAAG,MAAM,GAAG,EAAE,CAAC,CAAC;YAC5C,CAAC;YACD,UAAU,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QAC9B,CAAC;IACF,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC;AACF,MAAM,UAAU,gBAAgB,CAAC,IAAY,EAAE,EAAU;IACxD,OAAO,gBAAgB,CAAC,IAAI,CAAC;SAC3B,IAAI,CAAC,CAAC,eAAuB,EAAE,EAAE;QACjC,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACtC,eAAe,CAAC,eAAe,EAAE,EAAE,EAAE,IAAI,CAAC;iBACxC,IAAI,CAAC,OAAO,CAAC;iBACb,KAAK,CAAC,CAAC,GAAmB,EAAE,EAAE;gBAC9B,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,CAAC,MAAM,EAAE,CAAC;oBAChC,wEAAwE;oBACxE,kFAAkF;oBAClF,cAAc,CAAC,EAAE,EAAE,IAAI,CAAC;wBACvB,WAAW;yBACV,IAAI,CAAC,GAAG,EAAE;wBACV,OAAO,eAAe,CAAC,eAAe,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;oBACnD,CAAC,CAAC;yBACD,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;gBACzB,CAAC;qBAAM,CAAC;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;gBACb,CAAC;YACF,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,aAAa,GAAG,CAAC,IAAY,EAAE,WAAkB,EAAE,EAAU,EAAE,OAAqB,EAAgB,EAAE;IAC3G,MAAM,IAAI,GAAG,QAAQ,CAAC,WAAW,CAAC,IAAI,CAAC,CAAA;IACvC,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,GAAG,EAAE,CAAC;QACxC,OAAO,cAAc,CAAC,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAA;IAC1C,CAAC;SAAM,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,IAAI,EAAE,CAAC;QAChD,OAAO,aAAa,CAAC,IAAI,EAAE,EAAE,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA;IAC9C,CAAC;SAAM,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,OAAO,EAAE,CAAC;QACnD,OAAO,gBAAgB,CAAC,IAAI,EAAE,EAAE,CAAC,CAAA;IAClC,CAAC;IACD,yBAAyB;IACzB,OAAO,OAAO,CAAC,OAAO,EAAE,CAAA;AACzB,CAAC,CAAA;AACD,8DAA8D;AAC9D,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,EAAU,EAAE,OAAqB,EAAE,QAA2B,EAA4B,EAAE;IAC7H,QAAQ,QAAQ,CAAC,SAAS,EAAE,CAAC;QAC5B,KAAK,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC;YACzB,MAAM,oBAAoB,CAAC,EAAE,CAAC,CAAC;QAChC,CAAC;QACD,KAAK,YAAY,CAAC,SAAS,CAAC;QAC5B,KAAK,YAAY,CAAC,MAAM,CAAC;QACzB,KAAK,YAAY,CAAC,QAAQ,CAAC;QAC3B,KAAK,YAAY,CAAC,KAAK,CAAC;QACxB,KAAK,YAAY,CAAC,eAAe,CAAC;QAClC,KAAK,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC;YACxB,OAAO,QAAQ,CAAC,SAAS,CAAC;QAC3B,CAAC;QACD,OAAO,CAAC,CAAC,CAAC;YACT,OAAO,SAAS,CAAC;QAClB,CAAC;IACF,CAAC;AACF,CAAC,CAAC;AAEF,MAAM,UAAU,eAAe,CAAC,IAAY,EAAE,EAAU,EAAE,OAAqB,EAAE,WAAyB;IACzG,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;QAC/B,OAAO,IAAI,CAAC;IACb,CAAC;IACD,MAAM,GAAG,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;IAC7B,MAAM,GAAG,GAAG,UAAU,CAAC,EAAE,CAAC,CAAC;IAC3B,IAAI,WAAW,KAAK,YAAY,CAAC,IAAI,EAAE,CAAC;QACvC,OAAO,KAAK,CAAC;IACd,CAAC;SAAM,IAAI,WAAW,KAAK,YAAY,CAAC,QAAQ,EAAE,CAAC;QAClD,IAAI,GAAG,CAAC,IAAI,KAAK,SAAS,CAAC,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,SAAS,CAAC,GAAG,EAAE,CAAC;YAC9D,OAAO,IAAI,CAAC;QACb,CAAC;QACD,IAAI,GAAG,CAAC,UAAU,CAAC,OAAO,EAAE,GAAG,GAAG,CAAC,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC;YACzD,OAAO,KAAK,CAAC;QACd,CAAC;IACF,CAAC;SAAM,IAAI,WAAW,KAAK,YAAY,CAAC,eAAe,EAAE,CAAC;QACzD,2FAA2F;QAC3F,IAAI,GAAG,CAAC,IAAI,KAAK,SAAS,CAAC,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,SAAS,CAAC,GAAG,EAAE,CAAC;YAC9D,OAAO,IAAI,CAAC;QACb,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,SAAS,CAAC,IAAI,IAAI,GAAG,CAAC,IAAI,KAAK,SAAS,CAAC,IAAI,EAAE,CAAC;YACvE,IAAI,GAAG,CAAC,IAAI,KAAK,GAAG,CAAC,IAAI,EAAE,CAAC;gBAC3B,OAAO,KAAK,CAAC;YACd,CAAC;QACF,CAAC;IACF,CAAC;SAAM,IAAI,WAAW,KAAK,YAAY,CAAC,SAAS,EAAE,CAAC;QACnD,OAAO,IAAI,CAAC;IACb,CAAC;SAAM,IAAI,WAAW,KAAK,YAAY,CAAC,KAAK,EAAE,CAAC;QAC/C,OAAO,KAAK,CAAC;IACd,CAAC;AACF,CAAC;AAED,SAAS,MAAM,CAAC,KAAwB;IACvC,IAAI,IAAI,GAAG,IAAI,CAAC;IAChB,KAAK,CAAC,OAAO,CAAC,CAAC,OAAwB,EAAE,EAAE;QAC1C,IAAI,OAAO,CAAC,MAAM,KAAK,oBAAoB,CAAC,IAAI,EAAE,CAAC;YAClD,IAAI,GAAG,KAAK,CAAC;QACd,CAAC;IACF,CAAC,CAAC,CAAC;IACH,OAAO,IAAI,CAAC;AACb,CAAC;AACD;;;;;;;;GAQG;AACH,KAAK,UAAU,OAAO,CAAC,IAAY,EAAE,EAAU,EAAE,IAAkB,EAAE,IAAqB;IACzF,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;IAC7B,IAAI,GAAW,CAAC;IAChB,IAAI,QAAgB,CAAC;IACrB,IAAI,CAAC,IAAI,EAAE,CAAC;QACX,OAAO;IACR,CAAC;IACD,GAAG,GAAG,QAAQ,CAAC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;IACzC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;IAErC,IAAI,CAAC,MAAM,GAAG,oBAAoB,CAAC,UAAU,CAAC;IAC9C,MAAM,IAAI,GAAG,GAAG,EAAE;QACjB,IAAI,CAAC,MAAM,GAAG,oBAAoB,CAAC,IAAI,CAAC;QACxC,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;YACxB,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAClC,CAAC;IACF,CAAC,CAAC;IACF,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;QACxB,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAClC,CAAC;IACD,IAAI,CAAC,eAAe,IAAI,CAAC,CAAC;IAC1B,wCAAwC;IACxC,MAAM,OAAO,GAAG,CAAC,kBAAqC,EAAE,EAAE;QACzD,IAAI,CAAC,MAAM,GAAG,oBAAoB,CAAC,OAAO,CAAC;QAC3C,mBAAmB;QACnB,IAAI,kBAAkB,IAAI,OAAO,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC;YACxF,IAAI,CAAC,MAAwB,CAAC,IAAI,CAAC;gBACnC,KAAK,EAAE,kBAAkB,CAAC,KAAK;gBAC/B,IAAI,EAAE,IAAI;gBACV,QAAQ,EAAE,kBAAkB;aACb,CAAC,CAAC;QACnB,CAAC;QACD,IAAI,kBAAkB,EAAE,CAAC;YACxB,gFAAgF;YAChF,wCAAwC;YACxC,MAAM,MAAM,GAAG,kBAAkB,CAAC,IAAI,KAAK,QAAQ,CAAC,MAAM,CAAC;YAC3D,IAAI,MAAM,EAAE,CAAC;gBACZ,OAAO,CAAC,gBAAgB,GAAG,kBAAkB,CAAC;YAC/C,CAAC;YACD,IAAI,aAAa,GAAG,kBAAkB,CAAC,SAAS,CAAC;YACjD,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,kBAAkB,CAAiB,CAAA;YAE5F,IAAI,aAAa,KAAK,YAAY,CAAC,KAAK,EAAE,CAAC;gBAC1C,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;YACnB,CAAC;YACD,IAAI,IAAI,CAAC,KAAK,EAAE,CAAC;gBAChB,OAAO;YACR,CAAC;YAED,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,aAAa,CAAC,EAAE,CAAC;gBACnE,IAAI,EAAE,CAAC;gBACP,OAAO;YACR,CAAC;QAEF,CAAC;QACD,IAAI,CAAC,MAAM,GAAG,oBAAoB,CAAC,OAAO,CAAC;QAC3C,aAAa,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE;YAChE,IAAI,CAAC,eAAe,IAAI,CAAC,CAAC;YAC1B,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;gBACtB,IAAI,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,eAAe,EAAE,IAAI,CAAC,IAAI,CAAC,KAAK,KAAK,EAAE,CAAC;oBAClG,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;oBAClB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAC;gBACvB,CAAC;YACF,CAAC;YACD,IAAI,EAAE,CAAC;QACR,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAmB,EAAE,EAAE;YAChC,IAAI,OAAO,IAAI,OAAO,CAAC,gBAAgB,EAAE,CAAC;gBACzC,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,CAAC,UAAU,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,CAAC,QAAQ,EAAE,CAAC;oBACpE,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC,EAAE,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,oBAAuC,EAAE,EAAE;wBACpH,2EAA2E;wBAC3E,IAAI,IAAI,CAAC,0BAA0B,EAAE,CAAC;4BACrC,oBAAoB,GAAG,IAAI,CAAC,0BAA0B,CAAC;wBACxD,CAAC;wBACD,qFAAqF;wBACrF,IAAI,oBAAoB,CAAC,IAAI,KAAK,QAAQ,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,0BAA0B,EAAE,CAAC;4BACvF,IAAI,CAAC,0BAA0B,GAAG,oBAAoB,CAAC;wBACxD,CAAC;wBAED,IAAI,oBAAoB,CAAC,SAAS,KAAK,YAAY,CAAC,KAAK,EAAE,CAAC;4BAC3D,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;4BAClB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAC;wBACvB,CAAC;wBACD,IAAI,oBAAoB,CAAC,SAAS,KAAK,YAAY,CAAC,KAAK,EAAE,CAAC;4BAC3D,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;4BAClB,OAAO,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;wBACzB,CAAC;wBACD,IAAI,oBAAoB,CAAC,SAAS,KAAK,YAAY,CAAC,IAAI,EAAE,CAAC;4BAC1D,IAAI,CAAC,eAAe,IAAI,CAAC,CAAC;wBAC3B,CAAC;wBAED,8CAA8C;wBAC9C,IAAI,oBAAoB,CAAC,SAAS,KAAK,YAAY,CAAC,QAAQ;4BAC3D,oBAAoB,CAAC,SAAS,KAAK,YAAY,CAAC,eAAe;4BAC/D,oBAAoB,CAAC,SAAS,KAAK,YAAY,CAAC,SAAS,EAAE,CAAC;4BAC5D,IAAI,CAAC,MAAM,CAAC,IAAI,cAAc,CAAC,4DAA4D,GAAG,oBAAoB,CAAC,SAAS,CAAC,CAAC,CAAC;wBAChI,CAAC;oBACF,CAAC,CAAC,CAAC;gBACJ,CAAC;YACF,CAAC;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QAClB,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC;IACF,OAAO,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AAC/D,CAAC;AAED,SAAS,IAAI,CAAC,KAAwB;IACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACvC,IAAI,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,oBAAoB,CAAC,SAAS,EAAE,CAAC;YACxD,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC;QACjB,CAAC;IACF,CAAC;IACD,OAAO,IAAI,CAAC;AACb,CAAC;AAcD;;;;;;;GAOG;AACH,MAAM,UAAU,KAAK,CAAC,IAAY,EAAE,EAAU,EAAE,OAAsB;IACrE,OAAO,GAAG,YAAY,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;IACtC,OAAO,IAAI,OAAO,CAAc,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACnD,UAAU,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAA2B,EAAE,EAAE;YAClE,IAAI,CAAC,QAAQ,EAAE,CAAC;gBACf,QAAQ,GAAG,OAAO,CAAC,gBAAgB,IAAI;oBACtC,IAAI,EAAE,QAAQ,CAAC,IAAI;oBACnB,SAAS,EAAE,YAAY,CAAC,SAAS;iBACjC,CAAC;YACH,CAAC;iBAAM,CAAC;gBACP,IAAI,QAAQ,CAAC,IAAI,KAAK,QAAQ,CAAC,MAAM,EAAE,CAAC;oBACvC,OAAO,CAAC,gBAAgB,GAAG,QAAQ,CAAC;gBACrC,CAAC;YACF,CAAC;YACD,IAAI,aAAa,GAAG,QAAQ,CAAC,SAAS,CAAC;YACvC,IAAI,MAAM,GAAgB,KAAK,CAAC,CAAC;YAEjC,IAAI,OAAO,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC;gBACnE,MAAM,GAAG,EAAE,CAAC;YACb,CAAC;YAED,+CAA+C;YAC/C,aAAa,GAAG,UAAU,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,EAAE,QAAQ,CAAiB,CAAC;YAExE,+DAA+D;YAC/D,IAAI,OAAO,IAAI,OAAO,CAAC,gBAAgB,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,EAAE,aAAa,CAAC,EAAE,CAAC;gBAC/F,OAAO,OAAO,EAAE,CAAC;YAClB,CAAC;YACD,wBAAwB;YACxB,IAAI,OAAO,IAAI,OAAO,CAAC,KAAsC,EAAE,CAAC;gBAC/D,MAAM,OAAO,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;gBAChC,IAAI,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;oBAC3B,MAAM,CAAC,EAAE,CAAC,CAAC;gBACZ,CAAC;YACF,CAAC;YACD,mBAAmB;YACnB,MAAM,WAAW,GAAiB;gBACjC,OAAO,EAAE,OAAO;gBAChB,MAAM,EAAE,MAAM;gBACd,KAAK,EAAE,KAAK;gBACZ,eAAe,EAAE,CAAC;gBAClB,eAAe,EAAE,QAAQ;gBACzB,OAAO,EAAE,OAAO;gBAChB,MAAM,EAAE,MAAM;gBACd,KAAK,EAAE,EAAE;gBACT,0BAA0B,EAAE,IAAI;aAChC,CAAC;YACF,MAAM,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC;YAChC,2EAA2E;YAC3E,MAAM,OAAO,GAAG;gBACf,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;gBAC1B,IAAI,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC;oBACnB,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC;gBACxB,CAAC;gBACD,IAAI,KAAK,CAAC,MAAM,EAAE,CAAC;oBAClB,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;oBACzB,IAAI,IAAI,EAAE,CAAC;wBACV,OAAO,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,GAAa,EAAE,WAAW,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;oBACzE,CAAC;gBACF,CAAC;YACF,CAAC,CAAC;YAEF,IAAI,KAAK,GAAkB,aAAa,CAAC,IAAI,CAAC;YAC9C,IAAI,OAAO,IAAI,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK,GAAG,UAAU,CAAC,eAAe,EAAE,CAAC;gBAC5E,KAAK,IAAI,aAAa,CAAC,QAAQ,CAAC;YACjC,CAAC;YACD,aAAa,CAAC,IAAI,EAAE;gBACnB,MAAM,EAAE,OAAO,CAAC,MAAM;gBACtB,KAAK,EAAE,KAAK;aACZ,CAAC,CAAC,IAAI,CAAC,CAAC,EAAO,EAAE,EAAE;gBACnB,IAAI,IAAqB,CAAC;gBAC1B,OAAO,IAAI,GAAG,EAAE,CAAC,IAAI,EAAS,EAAE,CAAC;oBAChC,KAAK,CAAC,IAAI,CAAC;wBACV,IAAI,EAAE,IAAI,CAAC,IAAI;wBACf,IAAI,EAAE,IAAI,CAAC,IAAI;wBACf,GAAG,EAAE,QAAQ,CAAC,OAAO,CAAC,EAAE,EAAE,QAAQ,CAAC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;wBAC7D,MAAM,EAAE,oBAAoB,CAAC,SAAS;qBACtC,CAAC,CAAC;gBACJ,CAAC;gBACD,OAAO,EAAE,CAAC;YACX,CAAC,CAAC,CAAC;QACJ,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;IAClB,CAAC,CAAC,CAAC;AACJ,CAAC"} ;// ../fs/dist/move.js @@ -183567,7 +185107,7 @@ const move_async = (from, to) => { }); }); }; - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibW92ZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9tb3ZlLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBTSxRQUFRLE1BQU0sTUFBTSxDQUFDO0FBQ2xDLE9BQU8sRUFBRSxNQUFNLEVBQUUsVUFBVSxFQUFFLE1BQU0sSUFBSSxDQUFDO0FBQ3hDLE9BQU8sRUFBRSxLQUFLLElBQUksV0FBVyxFQUFFLElBQUksSUFBSSxVQUFVLEVBQUUsTUFBTSxhQUFhLENBQUM7QUFDdkUsT0FBTyxFQUFFLGdCQUFnQixFQUFFLE1BQU0scUJBQXFCLENBQUM7QUFDdkQsT0FBTyxFQUFFLGVBQWUsRUFBRSxNQUFNLGFBQWEsQ0FBQztBQUM5QyxPQUFPLEVBQUUsTUFBTSxFQUFFLE1BQU0saUJBQWlCLENBQUM7QUFDekMsT0FBTyxFQUFFLElBQUksSUFBSSxRQUFRLEVBQUUsTUFBTSxXQUFXLENBQUM7QUFDN0MsT0FBTyxFQUFFLElBQUksSUFBSSxVQUFVLEVBQUUsTUFBTSxhQUFhLENBQUM7QUFDakQsT0FBTyxFQUFFLFNBQVMsRUFBRSxNQUFNLE1BQU0sQ0FBQztBQUNqQyxPQUFPLEVBQUUsS0FBSyxJQUFJLFVBQVUsRUFBRSxJQUFJLElBQUksT0FBTyxFQUFFLE1BQU0sVUFBVSxDQUFDO0FBRWhFLE1BQU0sQ0FBQyxNQUFNLGFBQWEsR0FBRyxDQUFDLFVBQWtCLEVBQUUsSUFBWSxFQUFFLEVBQVUsRUFBRSxFQUFFO0lBQzdFLE1BQU0sZUFBZSxHQUFXLFVBQVUsR0FBRyxZQUFZLENBQUM7SUFDMUQsZ0JBQWdCLENBQUMsZUFBZSxFQUFFLE1BQU0sRUFBRSxJQUFJLEVBQUUsQ0FBQyxRQUFRLENBQUMsQ0FBQyxDQUFDO0lBQzVELGdCQUFnQixDQUFDLGVBQWUsRUFBRSxJQUFJLEVBQUUsRUFBRSxFQUFFLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQztBQUN6RCxDQUFDLENBQUM7QUFDRiw0REFBNEQ7QUFDNUQsT0FBTztBQUNQLDREQUE0RDtBQUU1RCxNQUFNLENBQUMsTUFBTSxJQUFJLEdBQUcsQ0FBQyxJQUFZLEVBQUUsRUFBVSxFQUFRLEVBQUU7SUFDdEQsSUFBSSxDQUFDO1FBQ0osVUFBVSxDQUFDLElBQUksRUFBRSxFQUFFLENBQUMsQ0FBQztJQUN0QixDQUFDO0lBQUMsT0FBTyxHQUFHLEVBQUUsQ0FBQztRQUNkLCtDQUErQztRQUMvQyxJQUFJLEdBQUcsQ0FBQyxJQUFJLEtBQUssTUFBTSxDQUFDLFlBQVksRUFBRSxDQUFDO1lBQ3RDLElBQUksQ0FBQztnQkFDSixRQUFRLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxDQUFDO1lBQ3BCLENBQUM7WUFBQyxPQUFPLENBQUMsRUFBRSxDQUFDO2dCQUNaLE1BQU0sQ0FBQyxDQUFDO1lBQ1QsQ0FBQztZQUNELElBQUksQ0FBQztnQkFDSixVQUFVLENBQUMsSUFBSSxDQUFDLENBQUM7WUFDbEIsQ0FBQztZQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7Z0JBQ1osTUFBTSxDQUFDLENBQUM7WUFDVCxDQUFDO1lBQ0QsT0FBTztRQUNSLENBQUM7UUFDRCxJQUFJLEdBQUcsQ0FBQyxJQUFJLEtBQUssTUFBTSxDQUFDLFFBQVEsRUFBRSxDQUFDO1lBQ2xDLGlEQUFpRDtZQUNqRCxNQUFNLEdBQUcsQ0FBQztRQUNYLENBQUM7YUFBTSxDQUFDO1lBQ1AsZ0RBQWdEO1lBQ2hELDhCQUE4QjtZQUM5QixJQUFJLENBQUMsVUFBVSxDQUFDLElBQUksQ0FBQyxFQUFFLENBQUM7Z0JBQ3ZCLE1BQU0sZUFBZSxDQUFDLElBQUksQ0FBQyxDQUFDO1lBQzdCLENBQUM7WUFDRCxJQUFJLENBQUMsVUFBVSxDQUFDLEVBQUUsQ0FBQyxFQUFFLENBQUM7Z0JBQ3JCLGtEQUFrRDtnQkFDbEQsT0FBTyxDQUFDLFFBQVEsQ0FBQyxPQUFPLENBQUMsRUFBRSxDQUFDLENBQUMsQ0FBQztnQkFDOUIsb0JBQW9CO2dCQUNwQixVQUFVLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxDQUFDO1lBQ3RCLENBQUM7UUFDRixDQUFDO0lBQ0YsQ0FBQztBQUNGLENBQUMsQ0FBQztBQUVGLDREQUE0RDtBQUM1RCxRQUFRO0FBQ1IsNERBQTREO0FBRTVELE1BQU0sZ0NBQWdDLEdBQUcsQ0FBQyxFQUFVLEVBQWdCLEVBQUU7SUFDckUsT0FBTyxJQUFJLE9BQU8sQ0FBTSxDQUFDLE9BQU8sRUFBRSxNQUFNLEVBQUUsRUFBRTtRQUMzQyxNQUFNLE9BQU8sR0FBVyxRQUFRLENBQUMsT0FBTyxDQUFDLEVBQUUsQ0FBQyxDQUFDO1FBQzdDLFdBQVcsQ0FBQyxPQUFPLENBQUM7YUFDbEIsSUFBSSxDQUFDLFNBQVMsQ0FBQyxFQUFFO1lBQ2pCLElBQUksQ0FBQyxTQUFTLEVBQUUsQ0FBQztnQkFDaEIsVUFBVSxDQUFDLE9BQU8sQ0FBQyxDQUFDLElBQUksQ0FBQyxPQUFPLEVBQUUsTUFBTSxDQUFDLENBQUM7WUFDM0MsQ0FBQztpQkFBTSxDQUFDO2dCQUNQLGdCQUFnQjtnQkFDaEIsTUFBTSxFQUFFLENBQUM7WUFDVixDQUFDO1FBQ0YsQ0FBQyxDQUFDO2FBQ0QsS0FBSyxDQUFDLE1BQU0sQ0FBQyxDQUFDO0lBQ2pCLENBQUMsQ0FBQyxDQUFDO0FBQ0osQ0FBQyxDQUFDO0FBRUYsTUFBTSxDQUFDLE1BQU0sS0FBSyxHQUFHLENBQUMsSUFBWSxFQUFFLEVBQVUsRUFBZ0IsRUFBRTtJQUMvRCxPQUFPLElBQUksT0FBTyxDQUFNLENBQUMsT0FBTyxFQUFFLE1BQU0sRUFBRSxFQUFFO1FBQzNDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQyxJQUFJLEVBQUUsRUFBRSxDQUFDO2FBQ3pCLElBQUksQ0FBQyxPQUFPLENBQUM7YUFDYixLQUFLLENBQUMsR0FBRyxDQUFDLEVBQUU7WUFDWixJQUFJLEdBQUcsQ0FBQyxJQUFJLEtBQUssTUFBTSxDQUFDLFFBQVEsRUFBRSxDQUFDO2dCQUNsQyw2Q0FBNkM7Z0JBQzdDLE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQztZQUNiLENBQUM7aUJBQU0sQ0FBQztnQkFDUCxnREFBZ0Q7Z0JBQ2hELDhCQUE4QjtnQkFDOUIsV0FBVyxDQUFDLElBQUksQ0FBQztxQkFDZixJQUFJLENBQUMsU0FBUyxDQUFDLEVBQUU7b0JBQ2pCLElBQUksQ0FBQyxTQUFTLEVBQUUsQ0FBQzt3QkFDaEIsTUFBTSxDQUFDLGVBQWUsQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDO29CQUMvQixDQUFDO3lCQUFNLENBQUM7d0JBQ1AsZ0NBQWdDLENBQUMsRUFBRSxDQUFDOzRCQUNuQyxvQkFBb0I7NkJBQ25CLElBQUksQ0FBQyxHQUFHLEVBQUUsQ0FBQyxTQUFTLENBQUMsTUFBTSxDQUFDLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxDQUFDOzZCQUN2QyxJQUFJLENBQUMsT0FBTyxFQUFFLE1BQU0sQ0FBQyxDQUFDO29CQUN6QixDQUFDO2dCQUNGLENBQUMsQ0FBQztxQkFDRCxLQUFLLENBQUMsTUFBTSxDQUFDLENBQUM7WUFDakIsQ0FBQztRQUNGLENBQUMsQ0FBQyxDQUFDO0lBQ0wsQ0FBQyxDQUFDLENBQUM7QUFDSixDQUFDLENBQUMifQ== ;// ../fs/dist/rename.js @@ -183589,7 +185129,7 @@ function rename_sync(path, newName) { function rename_async(path, newName) { return moveASync(path, pathUtil.join(pathUtil.dirname(path), newName)); } - +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicmVuYW1lLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL3JlbmFtZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEtBQUssUUFBUSxNQUFNLE1BQU0sQ0FBQztBQUNqQyxPQUFPLEVBQUUsSUFBSSxJQUFJLFFBQVEsRUFBRSxLQUFLLElBQUksU0FBUyxFQUFFLE1BQU0sV0FBVyxDQUFDO0FBQ2pFLE9BQU8sRUFBRSxnQkFBZ0IsRUFBRSxNQUFNLHFCQUFxQixDQUFDO0FBRXZELE1BQU0sVUFBVSxhQUFhLENBQUMsVUFBa0IsRUFBRSxJQUFZLEVBQUUsT0FBZTtJQUM5RSxNQUFNLGVBQWUsR0FBRyxVQUFVLEdBQUcsaUJBQWlCLENBQUM7SUFDdkQsZ0JBQWdCLENBQUMsZUFBZSxFQUFFLE1BQU0sRUFBRSxJQUFJLEVBQUUsQ0FBQyxRQUFRLENBQUMsQ0FBQyxDQUFDO0lBQzVELGdCQUFnQixDQUFDLGVBQWUsRUFBRSxTQUFTLEVBQUUsT0FBTyxFQUFFLENBQUMsUUFBUSxDQUFDLENBQUMsQ0FBQztBQUNuRSxDQUFDO0FBRUQsNERBQTREO0FBQzVELE9BQU87QUFDUCw0REFBNEQ7QUFDNUQsTUFBTSxVQUFVLElBQUksQ0FBQyxJQUFZLEVBQUUsT0FBZTtJQUNqRCxRQUFRLENBQUMsSUFBSSxFQUFFLFFBQVEsQ0FBQyxJQUFJLENBQUMsUUFBUSxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMsRUFBRSxPQUFPLENBQUMsQ0FBQyxDQUFDO0FBQ2hFLENBQUM7QUFFRCw0REFBNEQ7QUFDNUQsUUFBUTtBQUNSLDREQUE0RDtBQUM1RCxNQUFNLFVBQVUsS0FBSyxDQUFDLElBQVksRUFBRSxPQUFlO0lBQ2xELE9BQU8sU0FBUyxDQUFDLElBQUksRUFBRSxRQUFRLENBQUMsSUFBSSxDQUFDLFFBQVEsQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLEVBQUUsT0FBTyxDQUFDLENBQUMsQ0FBQztBQUN4RSxDQUFDIn0= ;// ../ai-tools/dist/constants.js const LOGGER_NAME = 'llm-tools-cli'; const constants_EXCLUDE_GLOB = [ @@ -206374,7 +207914,7 @@ const runTools = async (client, params, options) => { const ret = content(result); return await onCompletion(ret, options); }; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicnVuLXRvb2xzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL2NvbW1hbmRzL3J1bi10b29scy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFNQSxPQUFPLEVBQUUsT0FBTyxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDN0MsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQUNwQyxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFFbEQsTUFBTSxDQUFDLE1BQU0sUUFBUSxHQUFHLEtBQUssRUFBRSxNQUFjLEVBQUUsTUFBVyxFQUFFLE9BQWtCLEVBQUUsRUFBRTtJQUNoRixNQUFNLFNBQVMsR0FBRyxJQUFJLENBQUMsR0FBRyxFQUFFLENBQUMsUUFBUSxFQUFFLENBQUE7SUFDdkMsTUFBTSxlQUFlLEdBQUc7UUFDdEIsU0FBUztRQUNULE1BQU0sRUFBRSxPQUFPLENBQUMsTUFBTTtRQUN0QixTQUFTLEVBQUUsSUFBSSxJQUFJLEVBQUUsQ0FBQyxXQUFXLEVBQUU7UUFDbkMsUUFBUSxFQUFFLEVBQUU7S0FDYixDQUFBO0lBQ0QsSUFBSSxPQUFPLENBQUMsR0FBRyxFQUFFLENBQUM7UUFDaEIsTUFBTSxDQUFDLElBQUksQ0FBQyw2QkFBNkIsQ0FBQyxDQUFBO1FBQzFDLE9BQU87WUFDTCxNQUFNLEVBQUUsU0FBUztZQUNqQixlQUFlO1lBQ2YsVUFBVSxFQUFFLEVBQUU7WUFDZCxTQUFTLEVBQUUsRUFBRTtTQUNkLENBQUE7SUFDSCxDQUFDO0lBQ0QsTUFBTSxVQUFVLEdBQUcsQ0FBQyxPQUFZLEVBQUUsU0FBaUIsRUFBRSxNQUFNLEVBQUUsRUFBRTtRQUM3RCxPQUFPO1lBQ0wsR0FBRyxPQUFPO1lBQ1YsU0FBUyxFQUFFLElBQUksSUFBSSxFQUFFLENBQUMsV0FBVyxFQUFFO1lBQ25DLFNBQVM7WUFDVCxNQUFNO1NBQ1AsQ0FBQTtJQUNILENBQUMsQ0FBQTtJQUNELElBQUksTUFBTSxHQUFHLElBQUksQ0FBQTtJQUNqQixJQUFJLENBQUM7UUFDSCxNQUFNLEdBQUcsTUFBTSxNQUFNLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxXQUFXLENBQUMsUUFBUSxDQUFDLE1BQTZDLENBQUM7YUFDaEcsRUFBRSxDQUFDLFNBQVMsRUFBRSxDQUFDLE9BQW1DLEVBQUUsRUFBRTtZQUNyRCxPQUFPLENBQUMsU0FBUyxDQUFDLFNBQVMsQ0FBQyxVQUFVLENBQUMsT0FBTyxFQUFFLFNBQVMsRUFBRSxPQUFPLENBQUMsTUFBTSxDQUFDLENBQUMsQ0FBQTtRQUM3RSxDQUFDLENBQUM7YUFDRCxFQUFFLENBQUMsY0FBYyxFQUFFLENBQUMsSUFBd0MsRUFBRSxFQUFFO1lBQy9ELE9BQU8sT0FBTyxDQUFDLFNBQVMsQ0FBQyxVQUFVLENBQUMsVUFBVSxDQUFDLElBQUksRUFBRSxTQUFTLEVBQUUsT0FBTyxDQUFDLE1BQU0sQ0FBQyxDQUFDLENBQUE7UUFDbEYsQ0FBQyxDQUFDO2FBQ0QsRUFBRSxDQUFDLG9CQUFvQixFQUFFLENBQUMsQ0FBQyxFQUFFLEVBQUU7WUFDOUIsT0FBTyxDQUFDLFNBQVMsQ0FBQyxvQkFBb0IsQ0FBQyxDQUFDLENBQUMsQ0FBQTtRQUMzQyxDQUFDLENBQUM7YUFDRCxFQUFFLENBQUMsZ0JBQWdCLEVBQUUsT0FBTyxDQUFDLFNBQVMsQ0FBQyxnQkFBZ0IsQ0FBQzthQUN4RCxFQUFFLENBQUMsU0FBUyxFQUFFLE9BQU8sQ0FBQyxTQUFTLENBQUMsU0FBUyxDQUFDLENBQUE7SUFDL0MsQ0FBQztJQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7UUFDWCxNQUFNLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxDQUFBO0lBQ2pCLENBQUM7SUFFRCxJQUFJLE1BQU0sR0FBRyxPQUFPLENBQUMsTUFBTSxDQUFDLENBQUE7SUFDNUIsSUFBSSxDQUFDO1FBQ0gsTUFBTSxHQUFHLE1BQU0sTUFBTSxDQUFDLG1CQUFtQixFQUFFLENBQUE7SUFDN0MsQ0FBQztJQUFDLE9BQU8sS0FBSyxFQUFFLENBQUM7UUFDZixJQUFJLEtBQUssQ0FBQyxPQUFPLENBQUMsUUFBUSxDQUFDLGlCQUFpQixDQUFDLEVBQUUsQ0FBQztZQUM5QyxNQUFNLENBQUMsS0FBSyxDQUFDLHVDQUF1QyxFQUFFLEtBQUssQ0FBQyxPQUFPLEVBQUUsS0FBSyxDQUFDLE9BQU8sRUFBRSxLQUFLLENBQUMsQ0FBQTtZQUMxRixPQUFNO1FBQ1IsQ0FBQztRQUNELE1BQU0sQ0FBQyxLQUFLLENBQUMsNEJBQTRCLEVBQUUsS0FBSyxDQUFDLE9BQU8sRUFBRSxLQUFLLENBQUMsTUFBTSxDQUFDLENBQUE7UUFDdkUsT0FBTTtJQUNSLENBQUM7SUFDRCxNQUFNLEdBQUcsR0FBRyxPQUFPLENBQUMsTUFBTSxDQUFDLENBQUE7SUFDM0IsT0FBTyxNQUFNLFlBQVksQ0FBQyxHQUFHLEVBQUUsT0FBTyxDQUFDLENBQUE7QUFDekMsQ0FBQyxDQUFBIn0= +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicnVuLXRvb2xzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL2NvbW1hbmRzL3J1bi10b29scy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFNQSxPQUFPLEVBQUUsT0FBTyxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDN0MsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQUNwQyxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFFbEQsTUFBTSxDQUFDLE1BQU0sUUFBUSxHQUFHLEtBQUssRUFBRSxNQUFjLEVBQUUsTUFBVyxFQUFFLE9BQWtCLEVBQUUsRUFBRTtJQUNoRixNQUFNLFNBQVMsR0FBRyxJQUFJLENBQUMsR0FBRyxFQUFFLENBQUMsUUFBUSxFQUFFLENBQUE7SUFDdkMsTUFBTSxlQUFlLEdBQUc7UUFDdEIsU0FBUztRQUNULE1BQU0sRUFBRSxPQUFPLENBQUMsTUFBTTtRQUN0QixTQUFTLEVBQUUsSUFBSSxJQUFJLEVBQUUsQ0FBQyxXQUFXLEVBQUU7UUFDbkMsUUFBUSxFQUFFLEVBQUU7S0FDYixDQUFBO0lBQ0QsSUFBSSxPQUFPLENBQUMsR0FBRyxFQUFFLENBQUM7UUFDaEIsTUFBTSxDQUFDLElBQUksQ0FBQyw2QkFBNkIsQ0FBQyxDQUFBO1FBQzFDLE9BQU87WUFDTCxNQUFNLEVBQUUsU0FBUztZQUNqQixlQUFlO1lBQ2YsVUFBVSxFQUFFLEVBQUU7WUFDZCxTQUFTLEVBQUUsRUFBRTtTQUNkLENBQUE7SUFDSCxDQUFDO0lBQ0QsTUFBTSxVQUFVLEdBQUcsQ0FBQyxPQUFZLEVBQUUsU0FBaUIsRUFBRSxNQUFNLEVBQUUsRUFBRTtRQUM3RCxPQUFPO1lBQ0wsR0FBRyxPQUFPO1lBQ1YsU0FBUyxFQUFFLElBQUksSUFBSSxFQUFFLENBQUMsV0FBVyxFQUFFO1lBQ25DLFNBQVM7WUFDVCxNQUFNO1NBQ1AsQ0FBQTtJQUNILENBQUMsQ0FBQTtJQUNELElBQUksTUFBTSxHQUFHLElBQUksQ0FBQTtJQUNqQixJQUFJLENBQUM7UUFDSCxNQUFNLEdBQUcsTUFBTSxNQUFNLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxXQUFXLENBQUMsUUFBUSxDQUFDLE1BQTZDLENBQUM7YUFDaEcsRUFBRSxDQUFDLFNBQVMsRUFBRSxDQUFDLE9BQW1DLEVBQUUsRUFBRTtZQUNwRCxPQUFPLENBQUMsU0FBd0IsQ0FBQyxTQUFTLENBQUMsVUFBVSxDQUFDLE9BQU8sRUFBRSxTQUFTLEVBQUUsT0FBTyxDQUFDLE1BQU0sQ0FBQyxDQUFDLENBQUE7UUFDN0YsQ0FBQyxDQUFDO2FBQ0QsRUFBRSxDQUFDLGNBQWMsRUFBRSxDQUFDLElBQXdDLEVBQUUsRUFBRTtZQUMvRCxPQUFRLE9BQU8sQ0FBQyxTQUF3QixDQUFDLFVBQVUsQ0FBQyxVQUFVLENBQUMsSUFBSSxFQUFFLFNBQVMsRUFBRSxPQUFPLENBQUMsTUFBTSxDQUFDLENBQUMsQ0FBQTtRQUNsRyxDQUFDLENBQUM7YUFDRCxFQUFFLENBQUMsb0JBQW9CLEVBQUUsQ0FBQyxDQUFDLEVBQUUsRUFBRTtZQUM3QixPQUFPLENBQUMsU0FBd0IsQ0FBQyxvQkFBb0IsQ0FBQyxDQUFDLENBQUMsQ0FBQTtRQUMzRCxDQUFDLENBQUM7YUFDRCxFQUFFLENBQUMsZ0JBQWdCLEVBQUcsT0FBTyxDQUFDLFNBQXdCLENBQUMsZ0JBQWdCLENBQUM7YUFDeEUsRUFBRSxDQUFDLFNBQVMsRUFBRyxPQUFPLENBQUMsU0FBd0IsQ0FBQyxTQUFTLENBQUMsQ0FBQTtJQUMvRCxDQUFDO0lBQUMsT0FBTyxDQUFDLEVBQUUsQ0FBQztRQUNYLE1BQU0sQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLENBQUE7SUFDakIsQ0FBQztJQUVELElBQUksTUFBTSxHQUFHLE9BQU8sQ0FBQyxNQUFNLENBQUMsQ0FBQTtJQUM1QixJQUFJLENBQUM7UUFDSCxNQUFNLEdBQUcsTUFBTSxNQUFNLENBQUMsbUJBQW1CLEVBQUUsQ0FBQTtJQUM3QyxDQUFDO0lBQUMsT0FBTyxLQUFLLEVBQUUsQ0FBQztRQUNmLElBQUksS0FBSyxDQUFDLE9BQU8sQ0FBQyxRQUFRLENBQUMsaUJBQWlCLENBQUMsRUFBRSxDQUFDO1lBQzlDLE1BQU0sQ0FBQyxLQUFLLENBQUMsdUNBQXVDLEVBQUUsS0FBSyxDQUFDLE9BQU8sRUFBRSxLQUFLLENBQUMsT0FBTyxFQUFFLEtBQUssQ0FBQyxDQUFBO1lBQzFGLE9BQU07UUFDUixDQUFDO1FBQ0QsTUFBTSxDQUFDLEtBQUssQ0FBQyw0QkFBNEIsRUFBRSxLQUFLLENBQUMsT0FBTyxFQUFFLEtBQUssQ0FBQyxNQUFNLENBQUMsQ0FBQTtRQUN2RSxPQUFNO0lBQ1IsQ0FBQztJQUNELE1BQU0sR0FBRyxHQUFHLE9BQU8sQ0FBQyxNQUFNLENBQUMsQ0FBQTtJQUMzQixPQUFPLE1BQU0sWUFBWSxDQUFDLEdBQUcsRUFBRSxPQUFPLENBQUMsQ0FBQTtBQUN6QyxDQUFDLENBQUEifQ== ;// ./dist-in/commands/run-assistant.js @@ -211690,7 +213230,7 @@ function listModelsAsStrings(models) { return `${model.id} | ${isFree ? 'free' : 'paid'}`; }); } -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9tb2RlbHMvb3BlbnJvdXRlci50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEtBQUssTUFBTSxPQUFPLENBQUE7QUFDekIsT0FBTyxLQUFLLElBQUksTUFBTSxXQUFXLENBQUM7QUFFbEMsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUNwRCxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksS0FBSyxFQUFFLE1BQU0sb0JBQW9CLENBQUE7QUFFbEQsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQTRCcEMsT0FBTyxFQUFFLGFBQWEsRUFBRSxNQUFNLFVBQVUsQ0FBQztBQUN6QyxpSEFBaUg7QUFDakgsTUFBTSxTQUFTLEdBQUcsYUFBYSxDQUFDLElBQUksR0FBRyxDQUFDLEdBQUcsRUFBRSxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsQ0FBQyxDQUFDLENBQUM7QUFDL0QsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLFNBQVMsQ0FBQyxDQUFDLEdBQUcsRUFBRSxNQUFNLEVBQUUsd0JBQXdCLENBQUMsQ0FBQyxDQUFBO0FBQzlHLE1BQU0sY0FBYyxHQUFHLEVBQUUsR0FBRyxFQUFFLEdBQUcsRUFBRSxHQUFHLElBQUksQ0FBQyxDQUFDLDJCQUEyQjtBQUV2RSxNQUFNLENBQUMsTUFBTSxhQUFhLEdBQUcsQ0FBQyxZQUFvQixVQUFVLEVBQXFDLEVBQUU7SUFDakcsSUFBSSxDQUFDO1FBQ0gsSUFBSSxDQUFDLE1BQU0sQ0FBQyxTQUFTLENBQUMsRUFBRSxDQUFDO1lBQ3ZCLE9BQU8sSUFBSSxDQUFBO1FBQ2IsQ0FBQztRQUNELE1BQU0sU0FBUyxHQUFHLElBQUksQ0FBQyxTQUFTLEVBQUUsTUFBTSxDQUFpQixDQUFDO1FBQzFELE1BQU0sR0FBRyxHQUFHLElBQUksQ0FBQyxHQUFHLEVBQUUsQ0FBQztRQUN2QixJQUFJLEdBQUcsR0FBRyxTQUFTLENBQUMsU0FBUyxHQUFHLGNBQWMsRUFBRSxDQUFDO1lBQy9DLGFBQWE7UUFDZixDQUFDO1FBQ0QsT0FBTyxTQUFTLENBQUMsTUFBYSxDQUFDO0lBQ2pDLENBQUM7SUFBQyxPQUFPLEtBQUssRUFBRSxDQUFDO1FBQ2YsTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsRUFBRSxLQUFLLENBQUMsQ0FBQTtRQUNoRCxPQUFPLElBQUksQ0FBQztJQUNkLENBQUM7QUFDSCxDQUFDLENBQUE7QUFDRCxTQUFTLFlBQVksQ0FBQyxNQUF5QixFQUFFLFlBQW9CLFVBQVU7SUFDN0UsTUFBTSxTQUFTLEdBQWlCO1FBQzlCLFNBQVMsRUFBRSxJQUFJLENBQUMsR0FBRyxFQUFFO1FBQ3JCLE1BQU07S0FDUCxDQUFDO0lBQ0YsS0FBSyxDQUFDLFNBQVMsRUFBRSxTQUFTLENBQUMsQ0FBQTtBQUM3QixDQUFDO0FBRUQsTUFBTSxDQUFDLE1BQU0scUJBQXFCLEdBQUcsS0FBSyxFQUFFLFlBQW9CLFVBQVUsRUFBOEIsRUFBRTtJQUN4RyxJQUFJLENBQUM7UUFDSCx5Q0FBeUM7UUFDekMsTUFBTSxRQUFRLEdBQUcsTUFBTSxLQUFLLENBQUMsR0FBRyxDQUM5QixxQ0FBcUMsRUFDckM7WUFDRSxNQUFNLEVBQUU7WUFDTixnQ0FBZ0M7YUFDakM7U0FDRixDQUNGLENBQUE7UUFDRCxZQUFZLENBQUMsUUFBUSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsQ0FBQTtRQUNoQyxNQUFNLENBQUMsS0FBSyxDQUFDLFdBQVcsUUFBUSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsTUFBTSx5QkFBeUIsU0FBUyxFQUFFLENBQUMsQ0FBQTtRQUN0RixPQUFPLFFBQVEsQ0FBQyxJQUFJLENBQUMsSUFBYSxDQUFBO0lBQ3BDLENBQUM7SUFBQyxPQUFPLEtBQUssRUFBRSxDQUFDO1FBQ2YsTUFBTSxDQUFDLEtBQUssQ0FBQyxtQ0FBbUMsRUFBRSxLQUFLLENBQUMsQ0FBQTtRQUN4RCxNQUFNLEtBQUssQ0FBQztJQUNkLENBQUM7QUFDSCxDQUFDLENBQUE7QUFFRCxNQUFNLFVBQVUsY0FBYyxDQUFDLE1BQXlCO0lBQ3RELE9BQU8sTUFBTSxDQUFDLE1BQU0sQ0FDbEIsQ0FBQyxLQUFLLEVBQUUsRUFBRSxDQUNSLEtBQUssQ0FBQyxPQUFPLENBQUMsTUFBTSxLQUFLLENBQUM7UUFDMUIsS0FBSyxDQUFDLE9BQU8sQ0FBQyxVQUFVLEtBQUssQ0FBQztRQUM5QixLQUFLLENBQUMsT0FBTyxDQUFDLEtBQUssS0FBSyxDQUFDLENBQzVCLENBQUM7QUFDSixDQUFDO0FBQ0QsTUFBTSxVQUFVLG9CQUFvQixDQUFDLE1BQXlCO0lBQzVELE9BQU8sTUFBTSxDQUFDLE1BQU0sQ0FBQyxDQUFDLEtBQUssRUFBRSxFQUFFLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxNQUFNLEtBQUssQ0FBQyxDQUFDLENBQUM7QUFDOUQsQ0FBQztBQUVELE1BQU0sVUFBVSx3QkFBd0IsQ0FBQyxNQUF5QjtJQUNoRSxPQUFPLE1BQU0sQ0FBQyxNQUFNLENBQUMsQ0FBQyxLQUFLLEVBQUUsRUFBRSxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsVUFBVSxLQUFLLENBQUMsQ0FBQyxDQUFDO0FBQ2xFLENBQUM7QUFFRCxNQUFNLFVBQVUsbUJBQW1CLENBQUMsTUFBeUI7SUFDM0QsT0FBTyxNQUFNLENBQUMsTUFBTSxDQUFDLENBQUMsS0FBSyxFQUFFLEVBQUUsQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLEtBQUssS0FBSyxDQUFDLENBQUMsQ0FBQztBQUM3RCxDQUFDO0FBQ0QsTUFBTSxVQUFVLG1CQUFtQixDQUFDLE1BQXlCO0lBQzNELE1BQU0sR0FBRyxNQUFNLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxFQUFFLENBQUMsRUFBRSxFQUFFLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxhQUFhLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUE7SUFDNUQsT0FBTyxNQUFNLENBQUMsR0FBRyxDQUFDLENBQUMsS0FBSyxFQUFFLEVBQUU7UUFDMUIsTUFBTSxNQUFNLEdBQUcsS0FBSyxDQUFDLElBQUksQ0FBQyxRQUFRLENBQUMsTUFBTSxDQUFDLENBQUE7UUFDMUMsT0FBTyxHQUFHLEtBQUssQ0FBQyxFQUFFLE1BQU0sTUFBTSxDQUFDLENBQUMsQ0FBQyxNQUFNLENBQUMsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFBO0lBQ3BELENBQUMsQ0FBQyxDQUFBO0FBQ0osQ0FBQyJ9 +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9tb2RlbHMvb3BlbnJvdXRlci50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEtBQUssTUFBTSxPQUFPLENBQUE7QUFDekIsT0FBTyxLQUFLLElBQUksTUFBTSxXQUFXLENBQUM7QUFFbEMsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUNwRCxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksS0FBSyxFQUFFLE1BQU0sb0JBQW9CLENBQUE7QUFFbEQsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQXlDcEMsT0FBTyxFQUFFLGFBQWEsRUFBRSxNQUFNLFVBQVUsQ0FBQztBQUN6QyxpSEFBaUg7QUFDakgsTUFBTSxTQUFTLEdBQUcsYUFBYSxDQUFDLElBQUksR0FBRyxDQUFDLEdBQUcsRUFBRSxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsQ0FBQyxDQUFDLENBQUM7QUFDL0QsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLFNBQVMsQ0FBQyxDQUFDLEdBQUcsRUFBRSxNQUFNLEVBQUUsd0JBQXdCLENBQUMsQ0FBQyxDQUFBO0FBQzlHLE1BQU0sY0FBYyxHQUFHLEVBQUUsR0FBRyxFQUFFLEdBQUcsRUFBRSxHQUFHLElBQUksQ0FBQyxDQUFDLDJCQUEyQjtBQUV2RSxNQUFNLENBQUMsTUFBTSxhQUFhLEdBQUcsQ0FBQyxZQUFvQixVQUFVLEVBQXFDLEVBQUU7SUFDakcsSUFBSSxDQUFDO1FBQ0gsSUFBSSxDQUFDLE1BQU0sQ0FBQyxTQUFTLENBQUMsRUFBRSxDQUFDO1lBQ3ZCLE9BQU8sSUFBSSxDQUFBO1FBQ2IsQ0FBQztRQUNELE1BQU0sU0FBUyxHQUFHLElBQUksQ0FBQyxTQUFTLEVBQUUsTUFBTSxDQUFpQixDQUFDO1FBQzFELE1BQU0sR0FBRyxHQUFHLElBQUksQ0FBQyxHQUFHLEVBQUUsQ0FBQztRQUN2QixJQUFJLEdBQUcsR0FBRyxTQUFTLENBQUMsU0FBUyxHQUFHLGNBQWMsRUFBRSxDQUFDO1lBQy9DLGFBQWE7UUFDZixDQUFDO1FBQ0QsT0FBTyxTQUFTLENBQUMsTUFBYSxDQUFDO0lBQ2pDLENBQUM7SUFBQyxPQUFPLEtBQUssRUFBRSxDQUFDO1FBQ2YsTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsRUFBRSxLQUFLLENBQUMsQ0FBQTtRQUNoRCxPQUFPLElBQUksQ0FBQztJQUNkLENBQUM7QUFDSCxDQUFDLENBQUE7QUFDRCxTQUFTLFlBQVksQ0FBQyxNQUF5QixFQUFFLFlBQW9CLFVBQVU7SUFDN0UsTUFBTSxTQUFTLEdBQWlCO1FBQzlCLFNBQVMsRUFBRSxJQUFJLENBQUMsR0FBRyxFQUFFO1FBQ3JCLE1BQU07S0FDUCxDQUFDO0lBQ0YsS0FBSyxDQUFDLFNBQVMsRUFBRSxTQUFTLENBQUMsQ0FBQTtBQUM3QixDQUFDO0FBRUQsTUFBTSxDQUFDLE1BQU0scUJBQXFCLEdBQUcsS0FBSyxFQUFFLFlBQW9CLFVBQVUsRUFBOEIsRUFBRTtJQUN4RyxJQUFJLENBQUM7UUFDSCx5Q0FBeUM7UUFDekMsTUFBTSxRQUFRLEdBQUcsTUFBTSxLQUFLLENBQUMsR0FBRyxDQUM5QixxQ0FBcUMsRUFDckM7WUFDRSxNQUFNLEVBQUU7WUFDTixnQ0FBZ0M7YUFDakM7U0FDRixDQUNGLENBQUE7UUFDRCxZQUFZLENBQUMsUUFBUSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsQ0FBQTtRQUNoQyxNQUFNLENBQUMsS0FBSyxDQUFDLFdBQVcsUUFBUSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsTUFBTSx5QkFBeUIsU0FBUyxFQUFFLENBQUMsQ0FBQTtRQUN0RixPQUFPLFFBQVEsQ0FBQyxJQUFJLENBQUMsSUFBYSxDQUFBO0lBQ3BDLENBQUM7SUFBQyxPQUFPLEtBQUssRUFBRSxDQUFDO1FBQ2YsTUFBTSxDQUFDLEtBQUssQ0FBQyxtQ0FBbUMsRUFBRSxLQUFLLENBQUMsQ0FBQTtRQUN4RCxNQUFNLEtBQUssQ0FBQztJQUNkLENBQUM7QUFDSCxDQUFDLENBQUE7QUFFRCxNQUFNLFVBQVUsY0FBYyxDQUFDLE1BQXlCO0lBQ3RELE9BQU8sTUFBTSxDQUFDLE1BQU0sQ0FDbEIsQ0FBQyxLQUFLLEVBQUUsRUFBRSxDQUNSLEtBQUssQ0FBQyxPQUFPLENBQUMsTUFBTSxLQUFLLENBQUM7UUFDMUIsS0FBSyxDQUFDLE9BQU8sQ0FBQyxVQUFVLEtBQUssQ0FBQztRQUM5QixLQUFLLENBQUMsT0FBTyxDQUFDLEtBQUssS0FBSyxDQUFDLENBQzVCLENBQUM7QUFDSixDQUFDO0FBQ0QsTUFBTSxVQUFVLG9CQUFvQixDQUFDLE1BQXlCO0lBQzVELE9BQU8sTUFBTSxDQUFDLE1BQU0sQ0FBQyxDQUFDLEtBQUssRUFBRSxFQUFFLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxNQUFNLEtBQUssQ0FBQyxDQUFDLENBQUM7QUFDOUQsQ0FBQztBQUVELE1BQU0sVUFBVSx3QkFBd0IsQ0FBQyxNQUF5QjtJQUNoRSxPQUFPLE1BQU0sQ0FBQyxNQUFNLENBQUMsQ0FBQyxLQUFLLEVBQUUsRUFBRSxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsVUFBVSxLQUFLLENBQUMsQ0FBQyxDQUFDO0FBQ2xFLENBQUM7QUFFRCxNQUFNLFVBQVUsbUJBQW1CLENBQUMsTUFBeUI7SUFDM0QsT0FBTyxNQUFNLENBQUMsTUFBTSxDQUFDLENBQUMsS0FBSyxFQUFFLEVBQUUsQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLEtBQUssS0FBSyxDQUFDLENBQUMsQ0FBQztBQUM3RCxDQUFDO0FBQ0QsTUFBTSxVQUFVLG1CQUFtQixDQUFDLE1BQXlCO0lBQzNELE1BQU0sR0FBRyxNQUFNLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxFQUFFLENBQUMsRUFBRSxFQUFFLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxhQUFhLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUE7SUFDNUQsT0FBTyxNQUFNLENBQUMsR0FBRyxDQUFDLENBQUMsS0FBSyxFQUFFLEVBQUU7UUFDMUIsTUFBTSxNQUFNLEdBQUcsS0FBSyxDQUFDLElBQUksQ0FBQyxRQUFRLENBQUMsTUFBTSxDQUFDLENBQUE7UUFDMUMsT0FBTyxHQUFHLEtBQUssQ0FBQyxFQUFFLE1BQU0sTUFBTSxDQUFDLENBQUMsQ0FBQyxNQUFNLENBQUMsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFBO0lBQ3BELENBQUMsQ0FBQyxDQUFBO0FBQ0osQ0FBQyJ9 ;// ./dist-in/models/openai.js @@ -211752,8 +213292,8 @@ function openai_listModelsAsStrings(models) { } //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbmFpLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL21vZGVscy9vcGVuYWkudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxNQUFNLE1BQU0sUUFBUSxDQUFBO0FBQzNCLE9BQU8sS0FBSyxJQUFJLE1BQU0sV0FBVyxDQUFBO0FBQ2pDLE9BQU8sRUFBRSxJQUFJLElBQUksTUFBTSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDcEQsT0FBTyxFQUFFLElBQUksSUFBSSxJQUFJLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUNoRCxPQUFPLEVBQUUsSUFBSSxJQUFJLEtBQUssRUFBRSxNQUFNLG9CQUFvQixDQUFBO0FBRWxELE9BQU8sRUFBRSxNQUFNLEVBQUUsTUFBTSxhQUFhLENBQUE7QUFhcEMsT0FBTyxFQUFFLGFBQWEsRUFBRSxNQUFNLFVBQVUsQ0FBQztBQUN6QyxpSEFBaUg7QUFDakgsTUFBTSxTQUFTLEdBQUcsYUFBYSxDQUFDLElBQUksR0FBRyxDQUFDLEdBQUcsRUFBRSxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsQ0FBQyxDQUFDLENBQUM7QUFDL0QsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLFNBQVMsQ0FBQyxDQUFDLEdBQUcsRUFBRSxNQUFNLEVBQUUsb0JBQW9CLENBQUMsQ0FBQyxDQUFBO0FBRTFHLE1BQU0sY0FBYyxHQUFHLEVBQUUsR0FBRyxFQUFFLEdBQUcsRUFBRSxHQUFHLElBQUksQ0FBQTtBQUUxQyxLQUFLLFVBQVUsYUFBYSxDQUFDLFlBQW9CLFVBQVU7SUFDdkQsSUFBSSxDQUFDO1FBQ0QsSUFBSSxDQUFDLE1BQU0sQ0FBQyxTQUFTLENBQUMsRUFBRSxDQUFDO1lBQ3JCLE9BQU8sSUFBSSxDQUFBO1FBQ2YsQ0FBQztRQUNELE1BQU0sU0FBUyxHQUFHLElBQUksQ0FBQyxTQUFTLENBQWlCLENBQUE7UUFDakQsTUFBTSxHQUFHLEdBQUcsSUFBSSxDQUFDLEdBQUcsRUFBRSxDQUFBO1FBQ3RCLElBQUksR0FBRyxHQUFHLFNBQVMsQ0FBQyxTQUFTLEdBQUcsY0FBYyxFQUFFLENBQUM7WUFDN0MsYUFBYTtRQUNqQixDQUFDO1FBQ0QsT0FBTyxTQUFTLENBQUMsTUFBTSxDQUFBO0lBQzNCLENBQUM7SUFBQyxPQUFPLEtBQUssRUFBRSxDQUFDO1FBQ2IsTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsRUFBRSxLQUFLLENBQUMsQ0FBQTtRQUNoRCxPQUFPLElBQUksQ0FBQTtJQUNmLENBQUM7QUFDTCxDQUFDO0FBQ0QsU0FBUyxZQUFZLENBQUMsTUFBcUIsRUFBRSxZQUFvQixVQUFVO0lBQ3ZFLElBQUksQ0FBQztRQUNELE1BQU0sU0FBUyxHQUFpQjtZQUM1QixTQUFTLEVBQUUsSUFBSSxDQUFDLEdBQUcsRUFBRTtZQUNyQixNQUFNO1NBQ1QsQ0FBQTtRQUNELEtBQUssQ0FBQyxTQUFTLEVBQUUsU0FBUyxDQUFDLENBQUE7SUFDL0IsQ0FBQztJQUFDLE9BQU8sS0FBSyxFQUFFLENBQUM7UUFDYixNQUFNLENBQUMsS0FBSyxDQUFDLHlCQUF5QixFQUFFLEtBQUssQ0FBQyxDQUFBO0lBQ2xELENBQUM7QUFDTCxDQUFDO0FBQ0QsTUFBTSxDQUFDLEtBQUssVUFBVSxpQkFBaUIsQ0FBQyxNQUFjLEVBQUUsWUFBb0IsVUFBVTtJQUNsRixJQUFJLENBQUM7UUFDRCxNQUFNLE1BQU0sR0FBRyxJQUFJLE1BQU0sQ0FBQyxFQUFFLE1BQU0sRUFBRSxDQUFDLENBQUE7UUFDckMsTUFBTSxRQUFRLEdBQUcsTUFBTSxNQUFNLENBQUMsTUFBTSxDQUFDLElBQUksRUFBRSxDQUFBO1FBQzNDLE1BQU0sTUFBTSxHQUFHLFFBQVEsQ0FBQyxJQUFJLENBQUE7UUFDNUIsTUFBTSxDQUFDLElBQUksQ0FBQyxXQUFXLE1BQU0sQ0FBQyxNQUFNLHNCQUFzQixTQUFTLEVBQUUsQ0FBQyxDQUFBO1FBQ3RFLFlBQVksQ0FBQyxNQUFNLEVBQUUsU0FBUyxDQUFDLENBQUE7UUFDL0IsT0FBTyxNQUFNLENBQUE7SUFDakIsQ0FBQztJQUFDLE9BQU8sS0FBSyxFQUFFLENBQUM7UUFDYixNQUFNLENBQUMsS0FBSyxDQUFDLCtCQUErQixFQUFFLEtBQUssQ0FBQyxDQUFBO1FBQ3BELE1BQU0sS0FBSyxDQUFBO0lBQ2YsQ0FBQztBQUNMLENBQUM7QUFDRCxNQUFNLFVBQVUsbUJBQW1CLENBQUMsTUFBcUI7SUFDckQsTUFBTSxHQUFHLE1BQU0sQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLEVBQUUsQ0FBQyxFQUFFLEVBQUUsQ0FBQyxDQUFDLENBQUMsRUFBRSxDQUFDLGFBQWEsQ0FBQyxDQUFDLENBQUMsRUFBRSxDQUFDLENBQUMsQ0FBQTtJQUN4RCxPQUFPLE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQyxLQUFLLEVBQUUsRUFBRSxDQUFDLEdBQUcsS0FBSyxDQUFDLEVBQUUsRUFBRSxDQUFDLENBQUM7QUFDaEQsQ0FBQyJ9 ;// ./dist-in/models/cache/openai.js -const openai_models = [{ "id": "gpt-4o-audio-preview-2024-10-01", "object": "model", "created": 1727389042, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview", "object": "model", "created": 1727659998, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview-2024-10-01", "object": "model", "created": 1727131766, "owned_by": "system" }, { "id": "dall-e-2", "object": "model", "created": 1698798177, "owned_by": "system" }, { "id": "gpt-4o-2024-08-06", "object": "model", "created": 1722814719, "owned_by": "system" }, { "id": "gpt-4-turbo", "object": "model", "created": 1712361441, "owned_by": "system" }, { "id": "gpt-4-1106-preview", "object": "model", "created": 1698957206, "owned_by": "system" }, { "id": "gpt-4o", "object": "model", "created": 1715367049, "owned_by": "system" }, { "id": "gpt-3.5-turbo", "object": "model", "created": 1677610602, "owned_by": "openai" }, { "id": "gpt-3.5-turbo-0125", "object": "model", "created": 1706048358, "owned_by": "system" }, { "id": "gpt-3.5-turbo-instruct", "object": "model", "created": 1692901427, "owned_by": "system" }, { "id": "gpt-4-1106-vision-preview", "object": "model", "created": 1711473033, "owned_by": "system" }, { "id": "babbage-002", "object": "model", "created": 1692634615, "owned_by": "system" }, { "id": "whisper-1", "object": "model", "created": 1677532384, "owned_by": "openai-internal" }, { "id": "dall-e-3", "object": "model", "created": 1698785189, "owned_by": "system" }, { "id": "text-embedding-3-small", "object": "model", "created": 1705948997, "owned_by": "system" }, { "id": "gpt-3.5-turbo-16k", "object": "model", "created": 1683758102, "owned_by": "openai-internal" }, { "id": "gpt-4-0125-preview", "object": "model", "created": 1706037612, "owned_by": "system" }, { "id": "gpt-4-turbo-preview", "object": "model", "created": 1706037777, "owned_by": "system" }, { "id": "chatgpt-4o-latest", "object": "model", "created": 1723515131, "owned_by": "system" }, { "id": "omni-moderation-latest", "object": "model", "created": 1731689265, "owned_by": "system" }, { "id": "gpt-4o-2024-05-13", "object": "model", "created": 1715368132, "owned_by": "system" }, { "id": "o1-preview-2024-09-12", "object": "model", "created": 1725648865, "owned_by": "system" }, { "id": "omni-moderation-2024-09-26", "object": "model", "created": 1732734466, "owned_by": "system" }, { "id": "tts-1-hd-1106", "object": "model", "created": 1699053533, "owned_by": "system" }, { "id": "o1-preview", "object": "model", "created": 1725648897, "owned_by": "system" }, { "id": "gpt-4", "object": "model", "created": 1687882411, "owned_by": "openai" }, { "id": "gpt-4-0613", "object": "model", "created": 1686588896, "owned_by": "openai" }, { "id": "tts-1-hd", "object": "model", "created": 1699046015, "owned_by": "system" }, { "id": "gpt-4-vision-preview", "object": "model", "created": 1698894917, "owned_by": "system" }, { "id": "text-embedding-ada-002", "object": "model", "created": 1671217299, "owned_by": "openai-internal" }, { "id": "gpt-3.5-turbo-1106", "object": "model", "created": 1698959748, "owned_by": "system" }, { "id": "gpt-4o-audio-preview", "object": "model", "created": 1727460443, "owned_by": "system" }, { "id": "tts-1", "object": "model", "created": 1681940951, "owned_by": "openai-internal" }, { "id": "tts-1-1106", "object": "model", "created": 1699053241, "owned_by": "system" }, { "id": "gpt-3.5-turbo-instruct-0914", "object": "model", "created": 1694122472, "owned_by": "system" }, { "id": "davinci-002", "object": "model", "created": 1692634301, "owned_by": "system" }, { "id": "text-embedding-3-large", "object": "model", "created": 1705953180, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview-2024-12-17", "object": "model", "created": 1733945430, "owned_by": "system" }, { "id": "gpt-4o-mini-realtime-preview", "object": "model", "created": 1734387380, "owned_by": "system" }, { "id": "gpt-4o-mini-realtime-preview-2024-12-17", "object": "model", "created": 1734112601, "owned_by": "system" }, { "id": "o1-mini", "object": "model", "created": 1725649008, "owned_by": "system" }, { "id": "gpt-4o-2024-11-20", "object": "model", "created": 1731975040, "owned_by": "system" }, { "id": "o1-mini-2024-09-12", "object": "model", "created": 1725648979, "owned_by": "system" }, { "id": "gpt-4o-mini-2024-07-18", "object": "model", "created": 1721172717, "owned_by": "system" }, { "id": "gpt-4o-mini", "object": "model", "created": 1721172741, "owned_by": "system" }, { "id": "gpt-4o-audio-preview-2024-12-17", "object": "model", "created": 1734034239, "owned_by": "system" }, { "id": "gpt-4o-mini-audio-preview", "object": "model", "created": 1734387424, "owned_by": "system" }, { "id": "gpt-4o-mini-audio-preview-2024-12-17", "object": "model", "created": 1734115920, "owned_by": "system" }, { "id": "gpt-4-turbo-2024-04-09", "object": "model", "created": 1712601677, "owned_by": "system" }]; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbmFpLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vLi4vc3JjL21vZGVscy9jYWNoZS9vcGVuYWkudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsTUFBTSxDQUFDLE1BQU0sTUFBTSxHQUFHLENBQUMsRUFBQyxJQUFJLEVBQUMsaUNBQWlDLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyx5QkFBeUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG9DQUFvQyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsbUJBQW1CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxhQUFhLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLFFBQVEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGVBQWUsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG9CQUFvQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQywyQkFBMkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGFBQWEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLFdBQVcsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLGlCQUFpQixFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxtQkFBbUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLGlCQUFpQixFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsb0JBQW9CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxxQkFBcUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG1CQUFtQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxtQkFBbUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHVCQUF1QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsNEJBQTRCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxlQUFlLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxPQUFPLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxzQkFBc0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsaUJBQWlCLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHNCQUFzQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsT0FBTyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsaUJBQWlCLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyw2QkFBNkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGFBQWEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsb0NBQW9DLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyw4QkFBOEIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHlDQUF5QyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsU0FBUyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsbUJBQW1CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsYUFBYSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsaUNBQWlDLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQywyQkFBMkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHNDQUFzQyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsQ0FBQyxDQUFBIn0= +const openai_models = [{ "id": "gpt-4o-audio-preview-2024-10-01", "object": "model", "created": 1727389042, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview", "object": "model", "created": 1727659998, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview-2024-10-01", "object": "model", "created": 1727131766, "owned_by": "system" }, { "id": "dall-e-2", "object": "model", "created": 1698798177, "owned_by": "system" }, { "id": "gpt-4o-2024-08-06", "object": "model", "created": 1722814719, "owned_by": "system" }, { "id": "gpt-4-turbo", "object": "model", "created": 1712361441, "owned_by": "system" }, { "id": "gpt-4-1106-preview", "object": "model", "created": 1698957206, "owned_by": "system" }, { "id": "gpt-4o", "object": "model", "created": 1715367049, "owned_by": "system" }, { "id": "gpt-3.5-turbo", "object": "model", "created": 1677610602, "owned_by": "openai" }, { "id": "gpt-3.5-turbo-0125", "object": "model", "created": 1706048358, "owned_by": "system" }, { "id": "gpt-3.5-turbo-instruct", "object": "model", "created": 1692901427, "owned_by": "system" }, { "id": "gpt-4-1106-vision-preview", "object": "model", "created": 1711473033, "owned_by": "system" }, { "id": "babbage-002", "object": "model", "created": 1692634615, "owned_by": "system" }, { "id": "whisper-1", "object": "model", "created": 1677532384, "owned_by": "openai-internal" }, { "id": "dall-e-3", "object": "model", "created": 1698785189, "owned_by": "system" }, { "id": "text-embedding-3-small", "object": "model", "created": 1705948997, "owned_by": "system" }, { "id": "gpt-3.5-turbo-16k", "object": "model", "created": 1683758102, "owned_by": "openai-internal" }, { "id": "gpt-4-0125-preview", "object": "model", "created": 1706037612, "owned_by": "system" }, { "id": "gpt-4-turbo-preview", "object": "model", "created": 1706037777, "owned_by": "system" }, { "id": "chatgpt-4o-latest", "object": "model", "created": 1723515131, "owned_by": "system" }, { "id": "omni-moderation-latest", "object": "model", "created": 1731689265, "owned_by": "system" }, { "id": "gpt-4o-2024-05-13", "object": "model", "created": 1715368132, "owned_by": "system" }, { "id": "o1-preview-2024-09-12", "object": "model", "created": 1725648865, "owned_by": "system" }, { "id": "omni-moderation-2024-09-26", "object": "model", "created": 1732734466, "owned_by": "system" }, { "id": "tts-1-hd-1106", "object": "model", "created": 1699053533, "owned_by": "system" }, { "id": "o1-preview", "object": "model", "created": 1725648897, "owned_by": "system" }, { "id": "gpt-4", "object": "model", "created": 1687882411, "owned_by": "openai" }, { "id": "gpt-4-0613", "object": "model", "created": 1686588896, "owned_by": "openai" }, { "id": "tts-1-hd", "object": "model", "created": 1699046015, "owned_by": "system" }, { "id": "gpt-4-vision-preview", "object": "model", "created": 1698894917, "owned_by": "system" }, { "id": "text-embedding-ada-002", "object": "model", "created": 1671217299, "owned_by": "openai-internal" }, { "id": "gpt-3.5-turbo-1106", "object": "model", "created": 1698959748, "owned_by": "system" }, { "id": "gpt-4o-audio-preview", "object": "model", "created": 1727460443, "owned_by": "system" }, { "id": "tts-1", "object": "model", "created": 1681940951, "owned_by": "openai-internal" }, { "id": "tts-1-1106", "object": "model", "created": 1699053241, "owned_by": "system" }, { "id": "gpt-3.5-turbo-instruct-0914", "object": "model", "created": 1694122472, "owned_by": "system" }, { "id": "davinci-002", "object": "model", "created": 1692634301, "owned_by": "system" }, { "id": "text-embedding-3-large", "object": "model", "created": 1705953180, "owned_by": "system" }, { "id": "gpt-4o-realtime-preview-2024-12-17", "object": "model", "created": 1733945430, "owned_by": "system" }, { "id": "gpt-4o-mini-realtime-preview", "object": "model", "created": 1734387380, "owned_by": "system" }, { "id": "gpt-4o-mini-realtime-preview-2024-12-17", "object": "model", "created": 1734112601, "owned_by": "system" }, { "id": "o1-mini", "object": "model", "created": 1725649008, "owned_by": "system" }, { "id": "gpt-4o-2024-11-20", "object": "model", "created": 1731975040, "owned_by": "system" }, { "id": "o1-mini-2024-09-12", "object": "model", "created": 1725648979, "owned_by": "system" }, { "id": "gpt-4o-mini-2024-07-18", "object": "model", "created": 1721172717, "owned_by": "system" }, { "id": "gpt-4o-mini", "object": "model", "created": 1721172741, "owned_by": "system" }, { "id": "gpt-4o-audio-preview-2024-12-17", "object": "model", "created": 1734034239, "owned_by": "system" }, { "id": "gpt-4o-mini-audio-preview", "object": "model", "created": 1734387424, "owned_by": "system" }, { "id": "gpt-4o-mini-audio-preview-2024-12-17", "object": "model", "created": 1734115920, "owned_by": "system" }, { "id": "gpt-4-turbo-2024-04-09", "object": "model", "created": 1712601677, "owned_by": "" }]; +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbmFpLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vLi4vc3JjL21vZGVscy9jYWNoZS9vcGVuYWkudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsTUFBTSxDQUFDLE1BQU0sTUFBTSxHQUFHLENBQUMsRUFBQyxJQUFJLEVBQUMsaUNBQWlDLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyx5QkFBeUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG9DQUFvQyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsbUJBQW1CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxhQUFhLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLFFBQVEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGVBQWUsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG9CQUFvQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQywyQkFBMkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGFBQWEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLFdBQVcsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLGlCQUFpQixFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxtQkFBbUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLGlCQUFpQixFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsb0JBQW9CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxxQkFBcUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLG1CQUFtQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxtQkFBbUIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHVCQUF1QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsNEJBQTRCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxlQUFlLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxPQUFPLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxzQkFBc0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsaUJBQWlCLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHNCQUFzQixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsT0FBTyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsaUJBQWlCLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxZQUFZLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyw2QkFBNkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLGFBQWEsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsb0NBQW9DLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyw4QkFBOEIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHlDQUF5QyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsU0FBUyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsbUJBQW1CLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQyxvQkFBb0IsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHdCQUF3QixFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsYUFBYSxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsaUNBQWlDLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxRQUFRLEVBQUMsRUFBQyxFQUFDLElBQUksRUFBQywyQkFBMkIsRUFBQyxRQUFRLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxVQUFVLEVBQUMsVUFBVSxFQUFDLFFBQVEsRUFBQyxFQUFDLEVBQUMsSUFBSSxFQUFDLHNDQUFzQyxFQUFDLFFBQVEsRUFBQyxPQUFPLEVBQUMsU0FBUyxFQUFDLFVBQVUsRUFBQyxVQUFVLEVBQUMsUUFBUSxFQUFDLEVBQUMsRUFBQyxJQUFJLEVBQUMsd0JBQXdCLEVBQUMsUUFBUSxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsVUFBVSxFQUFDLFVBQVUsRUFBQyxFQUFFLEVBQUMsQ0FBQyxDQUFBIn0= ;// ./dist-in/models/cache/openrouter.js const openrouter_models = [{ "id": "deepseek/deepseek-chat", "name": "DeepSeek V3", "pricing": { "prompt": "0.00000014", "completion": "0.00000028", "image": "0", "request": "0" }, "created": 1735241320 }, { "id": "qwen/qvq-72b-preview", "name": "Qwen: QvQ 72B Preview", "pricing": { "prompt": "0.00000025", "completion": "0.0000005", "image": "0", "request": "0" }, "created": 1735088567 }, { "id": "google/gemini-2.0-flash-thinking-exp:free", "name": "Google: Gemini 2.0 Flash Thinking Experimental (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1734650026 }, { "id": "sao10k/l3.3-euryale-70b", "name": "Sao10K: Llama 3.3 Euryale 70B", "pricing": { "prompt": "0.0000015", "completion": "0.0000015", "image": "0", "request": "0" }, "created": 1734535928 }, { "id": "inflatebot/mn-mag-mell-r1", "name": "Inflatebot: Mag Mell R1 12B", "pricing": { "prompt": "0.0000009", "completion": "0.0000009", "image": "0", "request": "0" }, "created": 1734535439 }, { "id": "openai/o1", "name": "OpenAI: o1", "pricing": { "prompt": "0.000015", "completion": "0.00006", "image": "0.021675", "request": "0" }, "created": 1734459999 }, { "id": "eva-unit-01/eva-llama-3.33-70b", "name": "EVA Llama 3.33 70b", "pricing": { "prompt": "0.000004", "completion": "0.000006", "image": "0", "request": "0" }, "created": 1734377303 }, { "id": "x-ai/grok-2-vision-1212", "name": "xAI: Grok 2 Vision 1212", "pricing": { "prompt": "0.000002", "completion": "0.00001", "image": "0.0036", "request": "0" }, "created": 1734237338 }, { "id": "x-ai/grok-2-1212", "name": "xAI: Grok 2 1212", "pricing": { "prompt": "0.000002", "completion": "0.00001", "image": "0", "request": "0" }, "created": 1734232814 }, { "id": "cohere/command-r7b-12-2024", "name": "Cohere: Command R7B (12-2024)", "pricing": { "prompt": "0.0000000375", "completion": "0.00000015", "image": "0", "request": "0" }, "created": 1734158152 }, { "id": "google/gemini-2.0-flash-exp:free", "name": "Google: Gemini Flash 2.0 Experimental (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1733937523 }, { "id": "google/gemini-exp-1206:free", "name": "Google: Gemini Experimental 1206 (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1733507713 }, { "id": "meta-llama/llama-3.3-70b-instruct", "name": "Meta: Llama 3.3 70B Instruct", "pricing": { "prompt": "0.00000012", "completion": "0.0000003", "image": "0", "request": "0" }, "created": 1733506137 }, { "id": "amazon/nova-lite-v1", "name": "Amazon: Nova Lite 1.0", "pricing": { "prompt": "0.00000006", "completion": "0.00000024", "image": "0.00009", "request": "0" }, "created": 1733437363 }, { "id": "amazon/nova-micro-v1", "name": "Amazon: Nova Micro 1.0", "pricing": { "prompt": "0.000000035", "completion": "0.00000014", "image": "0", "request": "0" }, "created": 1733437237 }, { "id": "amazon/nova-pro-v1", "name": "Amazon: Nova Pro 1.0", "pricing": { "prompt": "0.0000008", "completion": "0.0000032", "image": "0.0012", "request": "0" }, "created": 1733436303 }, { "id": "qwen/qwq-32b-preview", "name": "Qwen: QwQ 32B Preview", "pricing": { "prompt": "0.00000012", "completion": "0.00000018", "image": "0", "request": "0" }, "created": 1732754541 }, { "id": "google/gemini-exp-1121:free", "name": "Google: Gemini Experimental 1121 (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1732216725 }, { "id": "google/learnlm-1.5-pro-experimental:free", "name": "Google: LearnLM 1.5 Pro Experimental (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1732216551 }, { "id": "eva-unit-01/eva-qwen-2.5-72b", "name": "EVA Qwen2.5 72B", "pricing": { "prompt": "0.000004", "completion": "0.000006", "image": "0", "request": "0" }, "created": 1732210606 }, { "id": "openai/gpt-4o-2024-11-20", "name": "OpenAI: GPT-4o (2024-11-20)", "pricing": { "prompt": "0.0000025", "completion": "0.00001", "image": "0.003613", "request": "0" }, "created": 1732127594 }, { "id": "mistralai/mistral-large-2411", "name": "Mistral Large 2411", "pricing": { "prompt": "0.000002", "completion": "0.000006", "image": "0", "request": "0" }, "created": 1731978685 }, { "id": "mistralai/mistral-large-2407", "name": "Mistral Large 2407", "pricing": { "prompt": "0.000002", "completion": "0.000006", "image": "0", "request": "0" }, "created": 1731978415 }, { "id": "mistralai/pixtral-large-2411", "name": "Mistral: Pixtral Large 2411", "pricing": { "prompt": "0.000002", "completion": "0.000006", "image": "0.002888", "request": "0" }, "created": 1731977388 }, { "id": "x-ai/grok-vision-beta", "name": "xAI: Grok Vision Beta", "pricing": { "prompt": "0.000005", "completion": "0.000015", "image": "0.009", "request": "0" }, "created": 1731976624 }, { "id": "google/gemini-exp-1114:free", "name": "Google: Gemini Experimental 1114 (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1731714740 }, { "id": "infermatic/mn-inferor-12b", "name": "Infermatic: Mistral Nemo Inferor 12B", "pricing": { "prompt": "0.00000025", "completion": "0.0000005", "image": "0", "request": "0" }, "created": 1731464428 }, { "id": "qwen/qwen-2.5-coder-32b-instruct", "name": "Qwen2.5 Coder 32B Instruct", "pricing": { "prompt": "0.00000007", "completion": "0.00000016", "image": "0", "request": "0" }, "created": 1731368400 }, { "id": "raifle/sorcererlm-8x22b", "name": "SorcererLM 8x22B", "pricing": { "prompt": "0.0000045", "completion": "0.0000045", "image": "0", "request": "0" }, "created": 1731105083 }, { "id": "eva-unit-01/eva-qwen-2.5-32b", "name": "EVA Qwen2.5 32B", "pricing": { "prompt": "0.0000026", "completion": "0.0000034", "image": "0", "request": "0" }, "created": 1731104847 }, { "id": "thedrummer/unslopnemo-12b", "name": "Unslopnemo 12b", "pricing": { "prompt": "0.0000005", "completion": "0.0000005", "image": "0", "request": "0" }, "created": 1731103448 }, { "id": "anthropic/claude-3.5-haiku-20241022:beta", "name": "Anthropic: Claude 3.5 Haiku (2024-10-22) (self-moderated)", "pricing": { "prompt": "0.0000008", "completion": "0.000004", "image": "0", "request": "0" }, "created": 1730678400 }, { "id": "anthropic/claude-3.5-haiku-20241022", "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)", "pricing": { "prompt": "0.0000008", "completion": "0.000004", "image": "0", "request": "0" }, "created": 1730678400 }, { "id": "anthropic/claude-3.5-haiku:beta", "name": "Anthropic: Claude 3.5 Haiku (self-moderated)", "pricing": { "prompt": "0.0000008", "completion": "0.000004", "image": "0", "request": "0" }, "created": 1730678400 }, { "id": "anthropic/claude-3.5-haiku", "name": "Anthropic: Claude 3.5 Haiku", "pricing": { "prompt": "0.0000008", "completion": "0.000004", "image": "0", "request": "0" }, "created": 1730678400 }, { "id": "neversleep/llama-3.1-lumimaid-70b", "name": "NeverSleep: Lumimaid v0.2 70B", "pricing": { "prompt": "0.000003375", "completion": "0.0000045", "image": "0", "request": "0" }, "created": 1729555200 }, { "id": "anthracite-org/magnum-v4-72b", "name": "Magnum v4 72B", "pricing": { "prompt": "0.000001875", "completion": "0.00000225", "image": "0", "request": "0" }, "created": 1729555200 }, { "id": "anthropic/claude-3.5-sonnet:beta", "name": "Anthropic: Claude 3.5 Sonnet (self-moderated)", "pricing": { "prompt": "0.000003", "completion": "0.000015", "image": "0.0048", "request": "0" }, "created": 1729555200 }, { "id": "anthropic/claude-3.5-sonnet", "name": "Anthropic: Claude 3.5 Sonnet", "pricing": { "prompt": "0.000003", "completion": "0.000015", "image": "0.0048", "request": "0" }, "created": 1729555200 }, { "id": "x-ai/grok-beta", "name": "xAI: Grok Beta", "pricing": { "prompt": "0.000005", "completion": "0.000015", "image": "0", "request": "0" }, "created": 1729382400 }, { "id": "mistralai/ministral-8b", "name": "Mistral: Ministral 8B", "pricing": { "prompt": "0.0000001", "completion": "0.0000001", "image": "0", "request": "0" }, "created": 1729123200 }, { "id": "mistralai/ministral-3b", "name": "Mistral: Ministral 3B", "pricing": { "prompt": "0.00000004", "completion": "0.00000004", "image": "0", "request": "0" }, "created": 1729123200 }, { "id": "qwen/qwen-2.5-7b-instruct", "name": "Qwen2.5 7B Instruct", "pricing": { "prompt": "0.00000027", "completion": "0.00000027", "image": "0", "request": "0" }, "created": 1729036800 }, { "id": "nvidia/llama-3.1-nemotron-70b-instruct", "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct", "pricing": { "prompt": "0.00000012", "completion": "0.0000003", "image": "0", "request": "0" }, "created": 1728950400 }, { "id": "inflection/inflection-3-pi", "name": "Inflection: Inflection 3 Pi", "pricing": { "prompt": "0.0000025", "completion": "0.00001", "image": "0", "request": "0" }, "created": 1728604800 }, { "id": "inflection/inflection-3-productivity", "name": "Inflection: Inflection 3 Productivity", "pricing": { "prompt": "0.0000025", "completion": "0.00001", "image": "0", "request": "0" }, "created": 1728604800 }, { "id": "google/gemini-flash-1.5-8b", "name": "Google: Gemini Flash 1.5 8B", "pricing": { "prompt": "0.0000000375", "completion": "0.00000015", "image": "0", "request": "0" }, "created": 1727913600 }, { "id": "anthracite-org/magnum-v2-72b", "name": "Magnum v2 72B", "pricing": { "prompt": "0.000003", "completion": "0.000003", "image": "0", "request": "0" }, "created": 1727654400 }, { "id": "liquid/lfm-40b", "name": "Liquid: LFM 40B MoE", "pricing": { "prompt": "0.00000015", "completion": "0.00000015", "image": "0", "request": "0" }, "created": 1727654400 }, { "id": "thedrummer/rocinante-12b", "name": "Rocinante 12B", "pricing": { "prompt": "0.00000025", "completion": "0.0000005", "image": "0", "request": "0" }, "created": 1727654400 }, { "id": "meta-llama/llama-3.2-3b-instruct:free", "name": "Meta: Llama 3.2 3B Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1727222400 }, { "id": "meta-llama/llama-3.2-3b-instruct", "name": "Meta: Llama 3.2 3B Instruct", "pricing": { "prompt": "0.000000015", "completion": "0.000000025", "image": "0", "request": "0" }, "created": 1727222400 }, { "id": "meta-llama/llama-3.2-1b-instruct:free", "name": "Meta: Llama 3.2 1B Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1727222400 }, { "id": "meta-llama/llama-3.2-1b-instruct", "name": "Meta: Llama 3.2 1B Instruct", "pricing": { "prompt": "0.00000001", "completion": "0.00000001", "image": "0", "request": "0" }, "created": 1727222400 }, { "id": "meta-llama/llama-3.2-90b-vision-instruct:free", "name": "Meta: Llama 3.2 90B Vision Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1727222400 }, { "id": "meta-llama/llama-3.2-90b-vision-instruct", "name": "Meta: Llama 3.2 90B Vision Instruct", "pricing": { "prompt": "0.0000009", "completion": "0.0000009", "image": "0.001301", "request": "0" }, "created": 1727222400 }, { "id": "meta-llama/llama-3.2-11b-vision-instruct:free", "name": "Meta: Llama 3.2 11B Vision Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1727222400 }, { "id": "meta-llama/llama-3.2-11b-vision-instruct", "name": "Meta: Llama 3.2 11B Vision Instruct", "pricing": { "prompt": "0.000000055", "completion": "0.000000055", "image": "0.00007948", "request": "0" }, "created": 1727222400 }, { "id": "qwen/qwen-2.5-72b-instruct", "name": "Qwen2.5 72B Instruct", "pricing": { "prompt": "0.00000023", "completion": "0.0000004", "image": "0", "request": "0" }, "created": 1726704000 }, { "id": "qwen/qwen-2-vl-72b-instruct", "name": "Qwen2-VL 72B Instruct", "pricing": { "prompt": "0.0000004", "completion": "0.0000004", "image": "0.000578", "request": "0" }, "created": 1726617600 }, { "id": "neversleep/llama-3.1-lumimaid-8b", "name": "NeverSleep: Lumimaid v0.2 8B", "pricing": { "prompt": "0.0000001875", "completion": "0.000001125", "image": "0", "request": "0" }, "created": 1726358400 }, { "id": "openai/o1-mini-2024-09-12", "name": "OpenAI: o1-mini (2024-09-12)", "pricing": { "prompt": "0.000003", "completion": "0.000012", "image": "0", "request": "0" }, "created": 1726099200 }, { "id": "openai/o1-preview", "name": "OpenAI: o1-preview", "pricing": { "prompt": "0.000015", "completion": "0.00006", "image": "0", "request": "0" }, "created": 1726099200 }, { "id": "openai/o1-preview-2024-09-12", "name": "OpenAI: o1-preview (2024-09-12)", "pricing": { "prompt": "0.000015", "completion": "0.00006", "image": "0", "request": "0" }, "created": 1726099200 }, { "id": "openai/o1-mini", "name": "OpenAI: o1-mini", "pricing": { "prompt": "0.000003", "completion": "0.000012", "image": "0", "request": "0" }, "created": 1726099200 }, { "id": "mistralai/pixtral-12b", "name": "Mistral: Pixtral 12B", "pricing": { "prompt": "0.0000001", "completion": "0.0000001", "image": "0.0001445", "request": "0" }, "created": 1725926400 }, { "id": "cohere/command-r-08-2024", "name": "Cohere: Command R (08-2024)", "pricing": { "prompt": "0.0000001425", "completion": "0.00000057", "image": "0", "request": "0" }, "created": 1724976000 }, { "id": "cohere/command-r-plus-08-2024", "name": "Cohere: Command R+ (08-2024)", "pricing": { "prompt": "0.000002375", "completion": "0.0000095", "image": "0", "request": "0" }, "created": 1724976000 }, { "id": "qwen/qwen-2-vl-7b-instruct", "name": "Qwen2-VL 7B Instruct", "pricing": { "prompt": "0.0000001", "completion": "0.0000001", "image": "0.0001445", "request": "0" }, "created": 1724803200 }, { "id": "google/gemini-flash-1.5-exp", "name": "Google: Gemini Flash 1.5 Experimental", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1724803200 }, { "id": "sao10k/l3.1-euryale-70b", "name": "Sao10K: Llama 3.1 Euryale 70B v2.2", "pricing": { "prompt": "0.00000035", "completion": "0.0000004", "image": "0", "request": "0" }, "created": 1724803200 }, { "id": "google/gemini-flash-1.5-8b-exp", "name": "Google: Gemini Flash 1.5 8B Experimental", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1724803200 }, { "id": "ai21/jamba-1-5-large", "name": "AI21: Jamba 1.5 Large", "pricing": { "prompt": "0.000002", "completion": "0.000008", "image": "0", "request": "0" }, "created": 1724371200 }, { "id": "ai21/jamba-1-5-mini", "name": "AI21: Jamba 1.5 Mini", "pricing": { "prompt": "0.0000002", "completion": "0.0000004", "image": "0", "request": "0" }, "created": 1724371200 }, { "id": "microsoft/phi-3.5-mini-128k-instruct", "name": "Microsoft: Phi-3.5 Mini 128K Instruct", "pricing": { "prompt": "0.0000001", "completion": "0.0000001", "image": "0", "request": "0" }, "created": 1724198400 }, { "id": "nousresearch/hermes-3-llama-3.1-70b", "name": "Nous: Hermes 3 70B Instruct", "pricing": { "prompt": "0.00000012", "completion": "0.0000003", "image": "0", "request": "0" }, "created": 1723939200 }, { "id": "nousresearch/hermes-3-llama-3.1-405b", "name": "Nous: Hermes 3 405B Instruct", "pricing": { "prompt": "0.0000008", "completion": "0.0000008", "image": "0", "request": "0" }, "created": 1723766400 }, { "id": "perplexity/llama-3.1-sonar-huge-128k-online", "name": "Perplexity: Llama 3.1 Sonar 405B Online", "pricing": { "prompt": "0.000005", "completion": "0.000005", "image": "0", "request": "0.005" }, "created": 1723593600 }, { "id": "openai/chatgpt-4o-latest", "name": "OpenAI: ChatGPT-4o", "pricing": { "prompt": "0.000005", "completion": "0.000015", "image": "0.007225", "request": "0" }, "created": 1723593600 }, { "id": "sao10k/l3-lunaris-8b", "name": "Sao10K: Llama 3 8B Lunaris", "pricing": { "prompt": "0.00000003", "completion": "0.00000006", "image": "0", "request": "0" }, "created": 1723507200 }, { "id": "aetherwiing/mn-starcannon-12b", "name": "Aetherwiing: Starcannon 12B", "pricing": { "prompt": "0.0000008", "completion": "0.0000012", "image": "0", "request": "0" }, "created": 1723507200 }, { "id": "openai/gpt-4o-2024-08-06", "name": "OpenAI: GPT-4o (2024-08-06)", "pricing": { "prompt": "0.0000025", "completion": "0.00001", "image": "0.003613", "request": "0" }, "created": 1722902400 }, { "id": "meta-llama/llama-3.1-405b", "name": "Meta: Llama 3.1 405B (base)", "pricing": { "prompt": "0.000002", "completion": "0.000002", "image": "0", "request": "0" }, "created": 1722556800 }, { "id": "nothingiisreal/mn-celeste-12b", "name": "Mistral Nemo 12B Celeste", "pricing": { "prompt": "0.0000008", "completion": "0.0000012", "image": "0", "request": "0" }, "created": 1722556800 }, { "id": "perplexity/llama-3.1-sonar-small-128k-chat", "name": "Perplexity: Llama 3.1 Sonar 8B", "pricing": { "prompt": "0.0000002", "completion": "0.0000002", "image": "0", "request": "0" }, "created": 1722470400 }, { "id": "google/gemini-pro-1.5-exp", "name": "Google: Gemini Pro 1.5 Experimental", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1722470400 }, { "id": "perplexity/llama-3.1-sonar-large-128k-chat", "name": "Perplexity: Llama 3.1 Sonar 70B", "pricing": { "prompt": "0.000001", "completion": "0.000001", "image": "0", "request": "0" }, "created": 1722470400 }, { "id": "perplexity/llama-3.1-sonar-large-128k-online", "name": "Perplexity: Llama 3.1 Sonar 70B Online", "pricing": { "prompt": "0.000001", "completion": "0.000001", "image": "0", "request": "0.005" }, "created": 1722470400 }, { "id": "perplexity/llama-3.1-sonar-small-128k-online", "name": "Perplexity: Llama 3.1 Sonar 8B Online", "pricing": { "prompt": "0.0000002", "completion": "0.0000002", "image": "0", "request": "0.005" }, "created": 1722470400 }, { "id": "meta-llama/llama-3.1-405b-instruct:free", "name": "Meta: Llama 3.1 405B Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1721692800 }, { "id": "meta-llama/llama-3.1-405b-instruct", "name": "Meta: Llama 3.1 405B Instruct", "pricing": { "prompt": "0.0000008", "completion": "0.0000008", "image": "0", "request": "0" }, "created": 1721692800 }, { "id": "meta-llama/llama-3.1-405b-instruct:nitro", "name": "Meta: Llama 3.1 405B Instruct (nitro)", "pricing": { "prompt": "0.00001462", "completion": "0.00001462", "image": "0", "request": "0" }, "created": 1721692800 }, { "id": "meta-llama/llama-3.1-8b-instruct:free", "name": "Meta: Llama 3.1 8B Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1721692800 }, { "id": "meta-llama/llama-3.1-8b-instruct", "name": "Meta: Llama 3.1 8B Instruct", "pricing": { "prompt": "0.00000002", "completion": "0.00000005", "image": "0", "request": "0" }, "created": 1721692800 }, { "id": "meta-llama/llama-3.1-70b-instruct:free", "name": "Meta: Llama 3.1 70B Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1721692800 }, { "id": "meta-llama/llama-3.1-70b-instruct", "name": "Meta: Llama 3.1 70B Instruct", "pricing": { "prompt": "0.00000012", "completion": "0.0000003", "image": "0", "request": "0" }, "created": 1721692800 }, { "id": "meta-llama/llama-3.1-70b-instruct:nitro", "name": "Meta: Llama 3.1 70B Instruct (nitro)", "pricing": { "prompt": "0.00000325", "completion": "0.00000325", "image": "0", "request": "0" }, "created": 1721692800 }, { "id": "mistralai/mistral-nemo", "name": "Mistral: Mistral Nemo", "pricing": { "prompt": "0.000000035", "completion": "0.00000008", "image": "0", "request": "0" }, "created": 1721347200 }, { "id": "mistralai/codestral-mamba", "name": "Mistral: Codestral Mamba", "pricing": { "prompt": "0.00000025", "completion": "0.00000025", "image": "0", "request": "0" }, "created": 1721347200 }, { "id": "openai/gpt-4o-mini", "name": "OpenAI: GPT-4o-mini", "pricing": { "prompt": "0.00000015", "completion": "0.0000006", "image": "0.007225", "request": "0" }, "created": 1721260800 }, { "id": "openai/gpt-4o-mini-2024-07-18", "name": "OpenAI: GPT-4o-mini (2024-07-18)", "pricing": { "prompt": "0.00000015", "completion": "0.0000006", "image": "0.007225", "request": "0" }, "created": 1721260800 }, { "id": "qwen/qwen-2-7b-instruct:free", "name": "Qwen 2 7B Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1721088000 }, { "id": "qwen/qwen-2-7b-instruct", "name": "Qwen 2 7B Instruct", "pricing": { "prompt": "0.000000054", "completion": "0.000000054", "image": "0", "request": "0" }, "created": 1721088000 }, { "id": "google/gemma-2-27b-it", "name": "Google: Gemma 2 27B", "pricing": { "prompt": "0.00000027", "completion": "0.00000027", "image": "0", "request": "0" }, "created": 1720828800 }, { "id": "alpindale/magnum-72b", "name": "Magnum 72B", "pricing": { "prompt": "0.000001875", "completion": "0.00000225", "image": "0", "request": "0" }, "created": 1720656000 }, { "id": "google/gemma-2-9b-it:free", "name": "Google: Gemma 2 9B (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1719532800 }, { "id": "google/gemma-2-9b-it", "name": "Google: Gemma 2 9B", "pricing": { "prompt": "0.00000003", "completion": "0.00000006", "image": "0", "request": "0" }, "created": 1719532800 }, { "id": "01-ai/yi-large", "name": "01.AI: Yi Large", "pricing": { "prompt": "0.000003", "completion": "0.000003", "image": "0", "request": "0" }, "created": 1719273600 }, { "id": "ai21/jamba-instruct", "name": "AI21: Jamba Instruct", "pricing": { "prompt": "0.0000005", "completion": "0.0000007", "image": "0", "request": "0" }, "created": 1719273600 }, { "id": "anthropic/claude-3.5-sonnet-20240620:beta", "name": "Anthropic: Claude 3.5 Sonnet (2024-06-20) (self-moderated)", "pricing": { "prompt": "0.000003", "completion": "0.000015", "image": "0.0048", "request": "0" }, "created": 1718841600 }, { "id": "anthropic/claude-3.5-sonnet-20240620", "name": "Anthropic: Claude 3.5 Sonnet (2024-06-20)", "pricing": { "prompt": "0.000003", "completion": "0.000015", "image": "0.0048", "request": "0" }, "created": 1718841600 }, { "id": "sao10k/l3-euryale-70b", "name": "Sao10k: Llama 3 Euryale 70B v2.1", "pricing": { "prompt": "0.00000035", "completion": "0.0000004", "image": "0", "request": "0" }, "created": 1718668800 }, { "id": "cognitivecomputations/dolphin-mixtral-8x22b", "name": "Dolphin 2.9.2 Mixtral 8x22B 🐬", "pricing": { "prompt": "0.0000009", "completion": "0.0000009", "image": "0", "request": "0" }, "created": 1717804800 }, { "id": "qwen/qwen-2-72b-instruct", "name": "Qwen 2 72B Instruct", "pricing": { "prompt": "0.00000034", "completion": "0.00000039", "image": "0", "request": "0" }, "created": 1717718400 }, { "id": "mistralai/mistral-7b-instruct:free", "name": "Mistral: Mistral 7B Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1716768000 }, { "id": "mistralai/mistral-7b-instruct", "name": "Mistral: Mistral 7B Instruct", "pricing": { "prompt": "0.00000003", "completion": "0.000000055", "image": "0", "request": "0" }, "created": 1716768000 }, { "id": "mistralai/mistral-7b-instruct:nitro", "name": "Mistral: Mistral 7B Instruct (nitro)", "pricing": { "prompt": "0.00000007", "completion": "0.00000007", "image": "0", "request": "0" }, "created": 1716768000 }, { "id": "mistralai/mistral-7b-instruct-v0.3", "name": "Mistral: Mistral 7B Instruct v0.3", "pricing": { "prompt": "0.00000003", "completion": "0.000000055", "image": "0", "request": "0" }, "created": 1716768000 }, { "id": "nousresearch/hermes-2-pro-llama-3-8b", "name": "NousResearch: Hermes 2 Pro - Llama-3 8B", "pricing": { "prompt": "0.000000025", "completion": "0.00000004", "image": "0", "request": "0" }, "created": 1716768000 }, { "id": "microsoft/phi-3-mini-128k-instruct:free", "name": "Microsoft: Phi-3 Mini 128K Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1716681600 }, { "id": "microsoft/phi-3-mini-128k-instruct", "name": "Microsoft: Phi-3 Mini 128K Instruct", "pricing": { "prompt": "0.0000001", "completion": "0.0000001", "image": "0", "request": "0" }, "created": 1716681600 }, { "id": "microsoft/phi-3-medium-128k-instruct:free", "name": "Microsoft: Phi-3 Medium 128K Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1716508800 }, { "id": "microsoft/phi-3-medium-128k-instruct", "name": "Microsoft: Phi-3 Medium 128K Instruct", "pricing": { "prompt": "0.000001", "completion": "0.000001", "image": "0", "request": "0" }, "created": 1716508800 }, { "id": "neversleep/llama-3-lumimaid-70b", "name": "NeverSleep: Llama 3 Lumimaid 70B", "pricing": { "prompt": "0.000003375", "completion": "0.0000045", "image": "0", "request": "0" }, "created": 1715817600 }, { "id": "google/gemini-flash-1.5", "name": "Google: Gemini Flash 1.5", "pricing": { "prompt": "0.000000075", "completion": "0.0000003", "image": "0.00004", "request": "0" }, "created": 1715644800 }, { "id": "perplexity/llama-3-sonar-large-32k-chat", "name": "Perplexity: Llama3 Sonar 70B", "pricing": { "prompt": "0.000001", "completion": "0.000001", "image": "0", "request": "0" }, "created": 1715644800 }, { "id": "perplexity/llama-3-sonar-large-32k-online", "name": "Perplexity: Llama3 Sonar 70B Online", "pricing": { "prompt": "0.000001", "completion": "0.000001", "image": "0", "request": "0.005" }, "created": 1715644800 }, { "id": "deepseek/deepseek-chat-v2.5", "name": "DeepSeek V2.5", "pricing": { "prompt": "0.000002", "completion": "0.000002", "image": "0", "request": "0" }, "created": 1715644800 }, { "id": "perplexity/llama-3-sonar-small-32k-chat", "name": "Perplexity: Llama3 Sonar 8B", "pricing": { "prompt": "0.0000002", "completion": "0.0000002", "image": "0", "request": "0" }, "created": 1715644800 }, { "id": "openai/gpt-4o-2024-05-13", "name": "OpenAI: GPT-4o (2024-05-13)", "pricing": { "prompt": "0.000005", "completion": "0.000015", "image": "0.007225", "request": "0" }, "created": 1715558400 }, { "id": "meta-llama/llama-guard-2-8b", "name": "Meta: LlamaGuard 2 8B", "pricing": { "prompt": "0.00000018", "completion": "0.00000018", "image": "0", "request": "0" }, "created": 1715558400 }, { "id": "openai/gpt-4o", "name": "OpenAI: GPT-4o", "pricing": { "prompt": "0.0000025", "completion": "0.00001", "image": "0.003613", "request": "0" }, "created": 1715558400 }, { "id": "openai/gpt-4o:extended", "name": "OpenAI: GPT-4o (extended)", "pricing": { "prompt": "0.000006", "completion": "0.000018", "image": "0.007225", "request": "0" }, "created": 1715558400 }, { "id": "neversleep/llama-3-lumimaid-8b:extended", "name": "NeverSleep: Llama 3 Lumimaid 8B (extended)", "pricing": { "prompt": "0.0000001875", "completion": "0.000001125", "image": "0", "request": "0" }, "created": 1714780800 }, { "id": "neversleep/llama-3-lumimaid-8b", "name": "NeverSleep: Llama 3 Lumimaid 8B", "pricing": { "prompt": "0.0000001875", "completion": "0.000001125", "image": "0", "request": "0" }, "created": 1714780800 }, { "id": "meta-llama/llama-3-8b-instruct:free", "name": "Meta: Llama 3 8B Instruct (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1713398400 }, { "id": "meta-llama/llama-3-8b-instruct", "name": "Meta: Llama 3 8B Instruct", "pricing": { "prompt": "0.00000003", "completion": "0.00000006", "image": "0", "request": "0" }, "created": 1713398400 }, { "id": "meta-llama/llama-3-8b-instruct:extended", "name": "Meta: Llama 3 8B Instruct (extended)", "pricing": { "prompt": "0.0000001875", "completion": "0.000001125", "image": "0", "request": "0" }, "created": 1713398400 }, { "id": "meta-llama/llama-3-8b-instruct:nitro", "name": "Meta: Llama 3 8B Instruct (nitro)", "pricing": { "prompt": "0.0000002", "completion": "0.0000002", "image": "0", "request": "0" }, "created": 1713398400 }, { "id": "meta-llama/llama-3-70b-instruct", "name": "Meta: Llama 3 70B Instruct", "pricing": { "prompt": "0.00000023", "completion": "0.0000004", "image": "0", "request": "0" }, "created": 1713398400 }, { "id": "meta-llama/llama-3-70b-instruct:nitro", "name": "Meta: Llama 3 70B Instruct (nitro)", "pricing": { "prompt": "0.000000792", "completion": "0.000000792", "image": "0", "request": "0" }, "created": 1713398400 }, { "id": "mistralai/mixtral-8x22b-instruct", "name": "Mistral: Mixtral 8x22B Instruct", "pricing": { "prompt": "0.0000009", "completion": "0.0000009", "image": "0", "request": "0" }, "created": 1713312000 }, { "id": "microsoft/wizardlm-2-8x22b", "name": "WizardLM-2 8x22B", "pricing": { "prompt": "0.0000005", "completion": "0.0000005", "image": "0", "request": "0" }, "created": 1713225600 }, { "id": "microsoft/wizardlm-2-7b", "name": "WizardLM-2 7B", "pricing": { "prompt": "0.000000055", "completion": "0.000000055", "image": "0", "request": "0" }, "created": 1713225600 }, { "id": "google/gemini-pro-1.5", "name": "Google: Gemini Pro 1.5", "pricing": { "prompt": "0.00000125", "completion": "0.000005", "image": "0.0006575", "request": "0" }, "created": 1712620800 }, { "id": "openai/gpt-4-turbo", "name": "OpenAI: GPT-4 Turbo", "pricing": { "prompt": "0.00001", "completion": "0.00003", "image": "0.01445", "request": "0" }, "created": 1712620800 }, { "id": "cohere/command-r-plus", "name": "Cohere: Command R+", "pricing": { "prompt": "0.00000285", "completion": "0.00001425", "image": "0", "request": "0" }, "created": 1712188800 }, { "id": "cohere/command-r-plus-04-2024", "name": "Cohere: Command R+ (04-2024)", "pricing": { "prompt": "0.00000285", "completion": "0.00001425", "image": "0", "request": "0" }, "created": 1712016000 }, { "id": "databricks/dbrx-instruct", "name": "Databricks: DBRX 132B Instruct", "pricing": { "prompt": "0.00000108", "completion": "0.00000108", "image": "0", "request": "0" }, "created": 1711670400 }, { "id": "sophosympatheia/midnight-rose-70b", "name": "Midnight Rose 70B", "pricing": { "prompt": "0.0000008", "completion": "0.0000008", "image": "0", "request": "0" }, "created": 1711065600 }, { "id": "cohere/command", "name": "Cohere: Command", "pricing": { "prompt": "0.00000095", "completion": "0.0000019", "image": "0", "request": "0" }, "created": 1710374400 }, { "id": "cohere/command-r", "name": "Cohere: Command R", "pricing": { "prompt": "0.000000475", "completion": "0.000001425", "image": "0", "request": "0" }, "created": 1710374400 }, { "id": "anthropic/claude-3-haiku:beta", "name": "Anthropic: Claude 3 Haiku (self-moderated)", "pricing": { "prompt": "0.00000025", "completion": "0.00000125", "image": "0.0004", "request": "0" }, "created": 1710288000 }, { "id": "anthropic/claude-3-haiku", "name": "Anthropic: Claude 3 Haiku", "pricing": { "prompt": "0.00000025", "completion": "0.00000125", "image": "0.0004", "request": "0" }, "created": 1710288000 }, { "id": "anthropic/claude-3-opus:beta", "name": "Anthropic: Claude 3 Opus (self-moderated)", "pricing": { "prompt": "0.000015", "completion": "0.000075", "image": "0.024", "request": "0" }, "created": 1709596800 }, { "id": "anthropic/claude-3-opus", "name": "Anthropic: Claude 3 Opus", "pricing": { "prompt": "0.000015", "completion": "0.000075", "image": "0.024", "request": "0" }, "created": 1709596800 }, { "id": "anthropic/claude-3-sonnet:beta", "name": "Anthropic: Claude 3 Sonnet (self-moderated)", "pricing": { "prompt": "0.000003", "completion": "0.000015", "image": "0.0048", "request": "0" }, "created": 1709596800 }, { "id": "anthropic/claude-3-sonnet", "name": "Anthropic: Claude 3 Sonnet", "pricing": { "prompt": "0.000003", "completion": "0.000015", "image": "0.0048", "request": "0" }, "created": 1709596800 }, { "id": "cohere/command-r-03-2024", "name": "Cohere: Command R (03-2024)", "pricing": { "prompt": "0.000000475", "completion": "0.000001425", "image": "0", "request": "0" }, "created": 1709341200 }, { "id": "mistralai/mistral-large", "name": "Mistral Large", "pricing": { "prompt": "0.000002", "completion": "0.000006", "image": "0", "request": "0" }, "created": 1708905600 }, { "id": "openai/gpt-3.5-turbo-0613", "name": "OpenAI: GPT-3.5 Turbo (older v0613)", "pricing": { "prompt": "0.000001", "completion": "0.000002", "image": "0", "request": "0" }, "created": 1706140800 }, { "id": "openai/gpt-4-turbo-preview", "name": "OpenAI: GPT-4 Turbo Preview", "pricing": { "prompt": "0.00001", "completion": "0.00003", "image": "0", "request": "0" }, "created": 1706140800 }, { "id": "nousresearch/nous-hermes-2-mixtral-8x7b-dpo", "name": "Nous: Hermes 2 Mixtral 8x7B DPO", "pricing": { "prompt": "0.00000054", "completion": "0.00000054", "image": "0", "request": "0" }, "created": 1705363200 }, { "id": "mistralai/mistral-small", "name": "Mistral Small", "pricing": { "prompt": "0.0000002", "completion": "0.0000006", "image": "0", "request": "0" }, "created": 1704844800 }, { "id": "mistralai/mistral-tiny", "name": "Mistral Tiny", "pricing": { "prompt": "0.00000025", "completion": "0.00000025", "image": "0", "request": "0" }, "created": 1704844800 }, { "id": "mistralai/mistral-medium", "name": "Mistral Medium", "pricing": { "prompt": "0.00000275", "completion": "0.0000081", "image": "0", "request": "0" }, "created": 1704844800 }, { "id": "mistralai/mistral-7b-instruct-v0.2", "name": "Mistral: Mistral 7B Instruct v0.2", "pricing": { "prompt": "0.00000018", "completion": "0.00000018", "image": "0", "request": "0" }, "created": 1703721600 }, { "id": "cognitivecomputations/dolphin-mixtral-8x7b", "name": "Dolphin 2.6 Mixtral 8x7B 🐬", "pricing": { "prompt": "0.0000005", "completion": "0.0000005", "image": "0", "request": "0" }, "created": 1703116800 }, { "id": "google/gemini-pro-vision", "name": "Google: Gemini Pro Vision 1.0", "pricing": { "prompt": "0.0000005", "completion": "0.0000015", "image": "0.0025", "request": "0" }, "created": 1702425600 }, { "id": "google/gemini-pro", "name": "Google: Gemini Pro 1.0", "pricing": { "prompt": "0.0000005", "completion": "0.0000015", "image": "0.0025", "request": "0" }, "created": 1702425600 }, { "id": "mistralai/mixtral-8x7b", "name": "Mistral: Mixtral 8x7B (base)", "pricing": { "prompt": "0.00000054", "completion": "0.00000054", "image": "0", "request": "0" }, "created": 1702166400 }, { "id": "mistralai/mixtral-8x7b-instruct", "name": "Mistral: Mixtral 8x7B Instruct", "pricing": { "prompt": "0.00000024", "completion": "0.00000024", "image": "0", "request": "0" }, "created": 1702166400 }, { "id": "mistralai/mixtral-8x7b-instruct:nitro", "name": "Mistral: Mixtral 8x7B Instruct (nitro)", "pricing": { "prompt": "0.00000054", "completion": "0.00000054", "image": "0", "request": "0" }, "created": 1702166400 }, { "id": "openchat/openchat-7b:free", "name": "OpenChat 3.5 7B (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1701129600 }, { "id": "openchat/openchat-7b", "name": "OpenChat 3.5 7B", "pricing": { "prompt": "0.000000055", "completion": "0.000000055", "image": "0", "request": "0" }, "created": 1701129600 }, { "id": "neversleep/noromaid-20b", "name": "Noromaid 20B", "pricing": { "prompt": "0.0000015", "completion": "0.00000225", "image": "0", "request": "0" }, "created": 1700956800 }, { "id": "anthropic/claude-2:beta", "name": "Anthropic: Claude v2 (self-moderated)", "pricing": { "prompt": "0.000008", "completion": "0.000024", "image": "0", "request": "0" }, "created": 1700611200 }, { "id": "anthropic/claude-2", "name": "Anthropic: Claude v2", "pricing": { "prompt": "0.000008", "completion": "0.000024", "image": "0", "request": "0" }, "created": 1700611200 }, { "id": "anthropic/claude-2.1:beta", "name": "Anthropic: Claude v2.1 (self-moderated)", "pricing": { "prompt": "0.000008", "completion": "0.000024", "image": "0", "request": "0" }, "created": 1700611200 }, { "id": "anthropic/claude-2.1", "name": "Anthropic: Claude v2.1", "pricing": { "prompt": "0.000008", "completion": "0.000024", "image": "0", "request": "0" }, "created": 1700611200 }, { "id": "teknium/openhermes-2.5-mistral-7b", "name": "OpenHermes 2.5 Mistral 7B", "pricing": { "prompt": "0.00000017", "completion": "0.00000017", "image": "0", "request": "0" }, "created": 1700438400 }, { "id": "lizpreciatior/lzlv-70b-fp16-hf", "name": "lzlv 70B", "pricing": { "prompt": "0.00000035", "completion": "0.0000004", "image": "0", "request": "0" }, "created": 1699747200 }, { "id": "undi95/toppy-m-7b:free", "name": "Toppy M 7B (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1699574400 }, { "id": "undi95/toppy-m-7b:nitro", "name": "Toppy M 7B (nitro)", "pricing": { "prompt": "0.00000007", "completion": "0.00000007", "image": "0", "request": "0" }, "created": 1699574400 }, { "id": "undi95/toppy-m-7b", "name": "Toppy M 7B", "pricing": { "prompt": "0.00000007", "completion": "0.00000007", "image": "0", "request": "0" }, "created": 1699574400 }, { "id": "alpindale/goliath-120b", "name": "Goliath 120B", "pricing": { "prompt": "0.000009375", "completion": "0.000009375", "image": "0", "request": "0" }, "created": 1699574400 }, { "id": "openrouter/auto", "name": "Auto Router (best for prompt)", "pricing": { "prompt": "-1", "completion": "-1", "request": "-1", "image": "-1" }, "created": 1699401600 }, { "id": "openai/gpt-3.5-turbo-1106", "name": "OpenAI: GPT-3.5 Turbo 16k (older v1106)", "pricing": { "prompt": "0.000001", "completion": "0.000002", "image": "0", "request": "0" }, "created": 1699228800 }, { "id": "openai/gpt-4-1106-preview", "name": "OpenAI: GPT-4 Turbo (older v1106)", "pricing": { "prompt": "0.00001", "completion": "0.00003", "image": "0", "request": "0" }, "created": 1699228800 }, { "id": "google/palm-2-chat-bison-32k", "name": "Google: PaLM 2 Chat 32k", "pricing": { "prompt": "0.000001", "completion": "0.000002", "image": "0", "request": "0" }, "created": 1698969600 }, { "id": "google/palm-2-codechat-bison-32k", "name": "Google: PaLM 2 Code Chat 32k", "pricing": { "prompt": "0.000001", "completion": "0.000002", "image": "0", "request": "0" }, "created": 1698969600 }, { "id": "jondurbin/airoboros-l2-70b", "name": "Airoboros 70B", "pricing": { "prompt": "0.0000005", "completion": "0.0000005", "image": "0", "request": "0" }, "created": 1698537600 }, { "id": "xwin-lm/xwin-lm-70b", "name": "Xwin 70B", "pricing": { "prompt": "0.00000375", "completion": "0.00000375", "image": "0", "request": "0" }, "created": 1697328000 }, { "id": "openai/gpt-3.5-turbo-instruct", "name": "OpenAI: GPT-3.5 Turbo Instruct", "pricing": { "prompt": "0.0000015", "completion": "0.000002", "image": "0", "request": "0" }, "created": 1695859200 }, { "id": "mistralai/mistral-7b-instruct-v0.1", "name": "Mistral: Mistral 7B Instruct v0.1", "pricing": { "prompt": "0.00000018", "completion": "0.00000018", "image": "0", "request": "0" }, "created": 1695859200 }, { "id": "pygmalionai/mythalion-13b", "name": "Pygmalion: Mythalion 13B", "pricing": { "prompt": "0.0000008", "completion": "0.0000012", "image": "0", "request": "0" }, "created": 1693612800 }, { "id": "openai/gpt-3.5-turbo-16k", "name": "OpenAI: GPT-3.5 Turbo 16k", "pricing": { "prompt": "0.000003", "completion": "0.000004", "image": "0", "request": "0" }, "created": 1693180800 }, { "id": "openai/gpt-4-32k", "name": "OpenAI: GPT-4 32k", "pricing": { "prompt": "0.00006", "completion": "0.00012", "image": "0", "request": "0" }, "created": 1693180800 }, { "id": "openai/gpt-4-32k-0314", "name": "OpenAI: GPT-4 32k (older v0314)", "pricing": { "prompt": "0.00006", "completion": "0.00012", "image": "0", "request": "0" }, "created": 1693180800 }, { "id": "nousresearch/nous-hermes-llama2-13b", "name": "Nous: Hermes 13B", "pricing": { "prompt": "0.00000017", "completion": "0.00000017", "image": "0", "request": "0" }, "created": 1692489600 }, { "id": "mancer/weaver", "name": "Mancer: Weaver (alpha)", "pricing": { "prompt": "0.0000015", "completion": "0.00000225", "image": "0", "request": "0" }, "created": 1690934400 }, { "id": "huggingfaceh4/zephyr-7b-beta:free", "name": "Hugging Face: Zephyr 7B (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1690934400 }, { "id": "anthropic/claude-2.0:beta", "name": "Anthropic: Claude v2.0 (self-moderated)", "pricing": { "prompt": "0.000008", "completion": "0.000024", "image": "0", "request": "0" }, "created": 1690502400 }, { "id": "anthropic/claude-2.0", "name": "Anthropic: Claude v2.0", "pricing": { "prompt": "0.000008", "completion": "0.000024", "image": "0", "request": "0" }, "created": 1690502400 }, { "id": "undi95/remm-slerp-l2-13b", "name": "ReMM SLERP 13B", "pricing": { "prompt": "0.0000008", "completion": "0.0000012", "image": "0", "request": "0" }, "created": 1689984000 }, { "id": "undi95/remm-slerp-l2-13b:extended", "name": "ReMM SLERP 13B (extended)", "pricing": { "prompt": "0.000001125", "completion": "0.000001125", "image": "0", "request": "0" }, "created": 1689984000 }, { "id": "google/palm-2-chat-bison", "name": "Google: PaLM 2 Chat", "pricing": { "prompt": "0.000001", "completion": "0.000002", "image": "0", "request": "0" }, "created": 1689811200 }, { "id": "google/palm-2-codechat-bison", "name": "Google: PaLM 2 Code Chat", "pricing": { "prompt": "0.000001", "completion": "0.000002", "image": "0", "request": "0" }, "created": 1689811200 }, { "id": "gryphe/mythomax-l2-13b:free", "name": "MythoMax 13B (free)", "pricing": { "prompt": "0", "completion": "0", "image": "0", "request": "0" }, "created": 1688256000 }, { "id": "gryphe/mythomax-l2-13b", "name": "MythoMax 13B", "pricing": { "prompt": "0.000000065", "completion": "0.000000065", "image": "0", "request": "0" }, "created": 1688256000 }, { "id": "gryphe/mythomax-l2-13b:nitro", "name": "MythoMax 13B (nitro)", "pricing": { "prompt": "0.0000002", "completion": "0.0000002", "image": "0", "request": "0" }, "created": 1688256000 }, { "id": "gryphe/mythomax-l2-13b:extended", "name": "MythoMax 13B (extended)", "pricing": { "prompt": "0.000001125", "completion": "0.000001125", "image": "0", "request": "0" }, "created": 1688256000 }, { "id": "meta-llama/llama-2-13b-chat", "name": "Meta: Llama 2 13B Chat", "pricing": { "prompt": "0.000000198", "completion": "0.000000198", "image": "0", "request": "0" }, "created": 1687219200 }, { "id": "openai/gpt-3.5-turbo", "name": "OpenAI: GPT-3.5 Turbo", "pricing": { "prompt": "0.0000005", "completion": "0.0000015", "image": "0", "request": "0" }, "created": 1685232000 }, { "id": "openai/gpt-3.5-turbo-0125", "name": "OpenAI: GPT-3.5 Turbo 16k", "pricing": { "prompt": "0.0000005", "completion": "0.0000015", "image": "0", "request": "0" }, "created": 1685232000 }, { "id": "openai/gpt-4", "name": "OpenAI: GPT-4", "pricing": { "prompt": "0.00003", "completion": "0.00006", "image": "0", "request": "0" }, "created": 1685232000 }, { "id": "openai/gpt-4-0314", "name": "OpenAI: GPT-4 (older v0314)", "pricing": { "prompt": "0.00003", "completion": "0.00006", "image": "0", "request": "0" }, "created": 1685232000 }]; //# sourceMappingURL=data:application/json;base64,{"version":3,"file":"openrouter.js","sourceRoot":"","sources":["../../../src/models/cache/openrouter.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,EAAC,IAAI,EAAC,wBAAwB,EAAC,MAAM,EAAC,aAAa,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sBAAsB,EAAC,MAAM,EAAC,uBAAuB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2CAA2C,EAAC,MAAM,EAAC,uDAAuD,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,+BAA+B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,WAAW,EAAC,MAAM,EAAC,YAAY,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,gCAAgC,EAAC,MAAM,EAAC,oBAAoB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,yBAAyB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kBAAkB,EAAC,MAAM,EAAC,kBAAkB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4BAA4B,EAAC,MAAM,EAAC,+BAA+B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,cAAc,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kCAAkC,EAAC,MAAM,EAAC,8CAA8C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6BAA6B,EAAC,MAAM,EAAC,yCAAyC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,mCAAmC,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,qBAAqB,EAAC,MAAM,EAAC,uBAAuB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,SAAS,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sBAAsB,EAAC,MAAM,EAAC,wBAAwB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,oBAAoB,EAAC,MAAM,EAAC,sBAAsB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sBAAsB,EAAC,MAAM,EAAC,uBAAuB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6BAA6B,EAAC,MAAM,EAAC,yCAAyC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0CAA0C,EAAC,MAAM,EAAC,6CAA6C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,iBAAiB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,oBAAoB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,oBAAoB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uBAAuB,EAAC,MAAM,EAAC,uBAAuB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,OAAO,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6BAA6B,EAAC,MAAM,EAAC,yCAAyC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,sCAAsC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kCAAkC,EAAC,MAAM,EAAC,4BAA4B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,kBAAkB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,iBAAiB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,gBAAgB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0CAA0C,EAAC,MAAM,EAAC,2DAA2D,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,qCAAqC,EAAC,MAAM,EAAC,0CAA0C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,iCAAiC,EAAC,MAAM,EAAC,8CAA8C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4BAA4B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,mCAAmC,EAAC,MAAM,EAAC,+BAA+B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,eAAe,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kCAAkC,EAAC,MAAM,EAAC,+CAA+C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6BAA6B,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,gBAAgB,EAAC,MAAM,EAAC,gBAAgB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,wBAAwB,EAAC,MAAM,EAAC,uBAAuB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,wBAAwB,EAAC,MAAM,EAAC,uBAAuB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,qBAAqB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,wCAAwC,EAAC,MAAM,EAAC,yCAAyC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4BAA4B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sCAAsC,EAAC,MAAM,EAAC,uCAAuC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4BAA4B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,cAAc,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,eAAe,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,gBAAgB,EAAC,MAAM,EAAC,qBAAqB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,eAAe,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uCAAuC,EAAC,MAAM,EAAC,oCAAoC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kCAAkC,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uCAAuC,EAAC,MAAM,EAAC,oCAAoC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kCAAkC,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,+CAA+C,EAAC,MAAM,EAAC,4CAA4C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0CAA0C,EAAC,MAAM,EAAC,qCAAqC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,+CAA+C,EAAC,MAAM,EAAC,4CAA4C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0CAA0C,EAAC,MAAM,EAAC,qCAAqC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,YAAY,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4BAA4B,EAAC,MAAM,EAAC,sBAAsB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6BAA6B,EAAC,MAAM,EAAC,uBAAuB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kCAAkC,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,cAAc,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,mBAAmB,EAAC,MAAM,EAAC,oBAAoB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,iCAAiC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,gBAAgB,EAAC,MAAM,EAAC,iBAAiB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uBAAuB,EAAC,MAAM,EAAC,sBAAsB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,WAAW,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,cAAc,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,+BAA+B,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4BAA4B,EAAC,MAAM,EAAC,sBAAsB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,WAAW,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6BAA6B,EAAC,MAAM,EAAC,uCAAuC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,oCAAoC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,gCAAgC,EAAC,MAAM,EAAC,0CAA0C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sBAAsB,EAAC,MAAM,EAAC,uBAAuB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,qBAAqB,EAAC,MAAM,EAAC,sBAAsB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sCAAsC,EAAC,MAAM,EAAC,uCAAuC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,qCAAqC,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sCAAsC,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6CAA6C,EAAC,MAAM,EAAC,yCAAyC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,OAAO,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,oBAAoB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sBAAsB,EAAC,MAAM,EAAC,4BAA4B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,+BAA+B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,+BAA+B,EAAC,MAAM,EAAC,0BAA0B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4CAA4C,EAAC,MAAM,EAAC,gCAAgC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,qCAAqC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4CAA4C,EAAC,MAAM,EAAC,iCAAiC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8CAA8C,EAAC,MAAM,EAAC,wCAAwC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,OAAO,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8CAA8C,EAAC,MAAM,EAAC,uCAAuC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,OAAO,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yCAAyC,EAAC,MAAM,EAAC,sCAAsC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,oCAAoC,EAAC,MAAM,EAAC,+BAA+B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0CAA0C,EAAC,MAAM,EAAC,uCAAuC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uCAAuC,EAAC,MAAM,EAAC,oCAAoC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kCAAkC,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,wCAAwC,EAAC,MAAM,EAAC,qCAAqC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,mCAAmC,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yCAAyC,EAAC,MAAM,EAAC,sCAAsC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,wBAAwB,EAAC,MAAM,EAAC,uBAAuB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,0BAA0B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,oBAAoB,EAAC,MAAM,EAAC,qBAAqB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,+BAA+B,EAAC,MAAM,EAAC,kCAAkC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,2BAA2B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,oBAAoB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uBAAuB,EAAC,MAAM,EAAC,qBAAqB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sBAAsB,EAAC,MAAM,EAAC,YAAY,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,2BAA2B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sBAAsB,EAAC,MAAM,EAAC,oBAAoB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,gBAAgB,EAAC,MAAM,EAAC,iBAAiB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,qBAAqB,EAAC,MAAM,EAAC,sBAAsB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2CAA2C,EAAC,MAAM,EAAC,4DAA4D,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sCAAsC,EAAC,MAAM,EAAC,2CAA2C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uBAAuB,EAAC,MAAM,EAAC,kCAAkC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6CAA6C,EAAC,MAAM,EAAC,gCAAgC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,qBAAqB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,oCAAoC,EAAC,MAAM,EAAC,qCAAqC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,+BAA+B,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,qCAAqC,EAAC,MAAM,EAAC,sCAAsC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,oCAAoC,EAAC,MAAM,EAAC,mCAAmC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sCAAsC,EAAC,MAAM,EAAC,yCAAyC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yCAAyC,EAAC,MAAM,EAAC,4CAA4C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,oCAAoC,EAAC,MAAM,EAAC,qCAAqC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2CAA2C,EAAC,MAAM,EAAC,8CAA8C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sCAAsC,EAAC,MAAM,EAAC,uCAAuC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,iCAAiC,EAAC,MAAM,EAAC,kCAAkC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,0BAA0B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,SAAS,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yCAAyC,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2CAA2C,EAAC,MAAM,EAAC,qCAAqC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,OAAO,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6BAA6B,EAAC,MAAM,EAAC,eAAe,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yCAAyC,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6BAA6B,EAAC,MAAM,EAAC,uBAAuB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,eAAe,EAAC,MAAM,EAAC,gBAAgB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,wBAAwB,EAAC,MAAM,EAAC,2BAA2B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,UAAU,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yCAAyC,EAAC,MAAM,EAAC,4CAA4C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,cAAc,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,gCAAgC,EAAC,MAAM,EAAC,iCAAiC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,cAAc,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,qCAAqC,EAAC,MAAM,EAAC,kCAAkC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,gCAAgC,EAAC,MAAM,EAAC,2BAA2B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yCAAyC,EAAC,MAAM,EAAC,sCAAsC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,cAAc,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sCAAsC,EAAC,MAAM,EAAC,mCAAmC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,iCAAiC,EAAC,MAAM,EAAC,4BAA4B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uCAAuC,EAAC,MAAM,EAAC,oCAAoC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kCAAkC,EAAC,MAAM,EAAC,iCAAiC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4BAA4B,EAAC,MAAM,EAAC,kBAAkB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,eAAe,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uBAAuB,EAAC,MAAM,EAAC,wBAAwB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,WAAW,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,oBAAoB,EAAC,MAAM,EAAC,qBAAqB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,SAAS,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,SAAS,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uBAAuB,EAAC,MAAM,EAAC,oBAAoB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,+BAA+B,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,gCAAgC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,mCAAmC,EAAC,MAAM,EAAC,mBAAmB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,gBAAgB,EAAC,MAAM,EAAC,iBAAiB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kBAAkB,EAAC,MAAM,EAAC,mBAAmB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,+BAA+B,EAAC,MAAM,EAAC,4CAA4C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,2BAA2B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,2CAA2C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,OAAO,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,0BAA0B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,OAAO,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,gCAAgC,EAAC,MAAM,EAAC,6CAA6C,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,4BAA4B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,eAAe,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,qCAAqC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4BAA4B,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,SAAS,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6CAA6C,EAAC,MAAM,EAAC,iCAAiC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,eAAe,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,wBAAwB,EAAC,MAAM,EAAC,cAAc,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,gBAAgB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,oCAAoC,EAAC,MAAM,EAAC,mCAAmC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4CAA4C,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,+BAA+B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,mBAAmB,EAAC,MAAM,EAAC,wBAAwB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,QAAQ,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,wBAAwB,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,iCAAiC,EAAC,MAAM,EAAC,gCAAgC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uCAAuC,EAAC,MAAM,EAAC,wCAAwC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,wBAAwB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sBAAsB,EAAC,MAAM,EAAC,iBAAiB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,cAAc,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,uCAAuC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,oBAAoB,EAAC,MAAM,EAAC,sBAAsB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,yCAAyC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sBAAsB,EAAC,MAAM,EAAC,wBAAwB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,mCAAmC,EAAC,MAAM,EAAC,2BAA2B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,gCAAgC,EAAC,MAAM,EAAC,UAAU,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,wBAAwB,EAAC,MAAM,EAAC,mBAAmB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,yBAAyB,EAAC,MAAM,EAAC,oBAAoB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,mBAAmB,EAAC,MAAM,EAAC,YAAY,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,wBAAwB,EAAC,MAAM,EAAC,cAAc,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,iBAAiB,EAAC,MAAM,EAAC,+BAA+B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,IAAI,EAAC,YAAY,EAAC,IAAI,EAAC,SAAS,EAAC,IAAI,EAAC,OAAO,EAAC,IAAI,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,yCAAyC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,mCAAmC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,SAAS,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,yBAAyB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kCAAkC,EAAC,MAAM,EAAC,8BAA8B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,4BAA4B,EAAC,MAAM,EAAC,eAAe,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,qBAAqB,EAAC,MAAM,EAAC,UAAU,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,+BAA+B,EAAC,MAAM,EAAC,gCAAgC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,oCAAoC,EAAC,MAAM,EAAC,mCAAmC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,0BAA0B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,2BAA2B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,kBAAkB,EAAC,MAAM,EAAC,mBAAmB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,SAAS,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,uBAAuB,EAAC,MAAM,EAAC,iCAAiC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,SAAS,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,qCAAqC,EAAC,MAAM,EAAC,kBAAkB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,YAAY,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,eAAe,EAAC,MAAM,EAAC,wBAAwB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,YAAY,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,mCAAmC,EAAC,MAAM,EAAC,gCAAgC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,yCAAyC,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sBAAsB,EAAC,MAAM,EAAC,wBAAwB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,gBAAgB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,mCAAmC,EAAC,MAAM,EAAC,2BAA2B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,0BAA0B,EAAC,MAAM,EAAC,qBAAqB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,0BAA0B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,UAAU,EAAC,YAAY,EAAC,UAAU,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6BAA6B,EAAC,MAAM,EAAC,qBAAqB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,GAAG,EAAC,YAAY,EAAC,GAAG,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,wBAAwB,EAAC,MAAM,EAAC,cAAc,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,8BAA8B,EAAC,MAAM,EAAC,sBAAsB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,iCAAiC,EAAC,MAAM,EAAC,yBAAyB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,6BAA6B,EAAC,MAAM,EAAC,wBAAwB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,aAAa,EAAC,YAAY,EAAC,aAAa,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,sBAAsB,EAAC,MAAM,EAAC,uBAAuB,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,2BAA2B,EAAC,MAAM,EAAC,2BAA2B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,WAAW,EAAC,YAAY,EAAC,WAAW,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,cAAc,EAAC,MAAM,EAAC,eAAe,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,SAAS,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,EAAC,EAAC,IAAI,EAAC,mBAAmB,EAAC,MAAM,EAAC,6BAA6B,EAAC,SAAS,EAAC,EAAC,QAAQ,EAAC,SAAS,EAAC,YAAY,EAAC,SAAS,EAAC,OAAO,EAAC,GAAG,EAAC,SAAS,EAAC,GAAG,EAAC,EAAC,SAAS,EAAC,UAAU,EAAC,CAAC,CAAA"} @@ -211849,7 +213389,7 @@ const models_all = () => { } return models; }; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvbW9kZWxzL2luZGV4LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxNQUFNLE9BQU8sQ0FBQTtBQUN6QixPQUFPLEtBQUssSUFBSSxNQUFNLFdBQVcsQ0FBQTtBQUNqQyxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksTUFBTSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFFcEQsT0FBTyxFQUFFLE1BQU0sRUFBRSxXQUFXLEVBQUUsTUFBTSxhQUFhLENBQUE7QUFDakQsT0FBTyxFQUFFLFVBQVUsSUFBSSxxQkFBcUIsRUFBMEMscUJBQXFCLEVBQUUsbUJBQW1CLElBQUksNkJBQTZCLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQTtBQUMxTCxPQUFPLEVBQUUsVUFBVSxJQUFJLGlCQUFpQixFQUFzQyxtQkFBbUIsSUFBSSx5QkFBeUIsRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQUVuSixPQUFPLEVBQUUsaUJBQWlCLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUN2RCxPQUFPLEVBQUUsY0FBYyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFFbEQsT0FBTyxFQUFFLE1BQU0sSUFBSSxZQUFZLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUMxRCxPQUFPLEVBQUUsTUFBTSxJQUFJLGdCQUFnQixFQUFFLE1BQU0sdUJBQXVCLENBQUE7QUFFbEUsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLEdBQUcsRUFBRTtJQUU5QixJQUFJLFNBQVMsR0FBRyxnQkFBZ0IsQ0FBQTtJQUNoQyxJQUFJLFVBQVUsR0FBRyxZQUFZLENBQUE7SUFDN0IsSUFBSSxlQUFlLEdBQUc7UUFDcEI7WUFDRSxJQUFJLEVBQUUsZUFBZTtZQUNyQixNQUFNLEVBQUUsZUFBZTtTQUN4QjtRQUNEO1lBQ0UsSUFBSSxFQUFFLG1CQUFtQjtZQUN6QixNQUFNLEVBQUUsbUJBQW1CO1NBQzVCO0tBQ0YsQ0FBQTtJQUVELE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxXQUFXLEVBQUUsRUFBRSxhQUFhLENBQUMsQ0FBQTtJQUNuRSxJQUFJLE1BQU0sQ0FBQyxnQkFBZ0IsQ0FBQyxFQUFFLENBQUM7UUFDN0IsVUFBVSxHQUFHLElBQUksQ0FBQyxnQkFBZ0IsRUFBRSxNQUFNLENBQVEsQ0FBQTtJQUNwRCxDQUFDO0lBRUQsTUFBTSxnQkFBZ0IsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLFdBQVcsRUFBRSxFQUFFLGlCQUFpQixDQUFDLENBQUE7SUFDdkUsSUFBSSxNQUFNLENBQUMsZ0JBQWdCLENBQUMsRUFBRSxDQUFDO1FBQzdCLFNBQVMsR0FBRyxJQUFJLENBQUMsZ0JBQWdCLEVBQUUsTUFBTSxDQUFRLENBQUE7SUFDbkQsQ0FBQztJQUNELE1BQU0sTUFBTSxHQUFhLEVBQUUsQ0FBQTtJQUMzQixNQUFNLENBQUMsSUFBSSxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLHlCQUF5QixDQUFDLENBQUMsQ0FBQTtJQUMxRCxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsNkJBQTZCLENBQUMsU0FBZ0IsQ0FBQyxDQUFDLENBQUE7SUFFL0QsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxxQkFBcUIsQ0FBQyxDQUFDLENBQUE7SUFDdEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLFVBQWlCLENBQUMsQ0FBQyxDQUFBO0lBQzVELE1BQU0sQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUE7SUFFdEIsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyx1QkFBdUIsQ0FBQyxDQUFDLENBQUE7SUFDeEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLGVBQXNCLENBQUMsQ0FBQyxDQUFBO0lBQ2pFLE1BQU0sQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUE7SUFDdEIsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUE7QUFJRCxNQUFNLENBQUMsTUFBTSxNQUFNLEdBQUcsR0FBRyxFQUFFO0lBQ3pCLE1BQU0sTUFBTSxHQUFhLEVBQUUsQ0FBQTtJQUMzQixNQUFNLGNBQWMsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLHFCQUFxQixDQUFDLENBQUE7SUFDMUQsSUFBSSxDQUFDLE1BQU0sQ0FBQyxjQUFjLENBQUMsRUFBRSxDQUFDO1FBQzVCLHFCQUFxQixFQUFFLENBQUE7SUFDekIsQ0FBQztJQUNELElBQUksTUFBTSxDQUFDLGNBQWMsQ0FBQyxFQUFFLENBQUM7UUFDM0IsTUFBTSxTQUFTLEdBQTJCLElBQUksQ0FBQyxjQUFjLEVBQUUsTUFBTSxDQUEyQixDQUFBO1FBQ2hHLE1BQU0sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMseUJBQXlCLENBQUMsQ0FBQyxDQUFBO1FBQzFELE1BQU0sQ0FBQyxJQUFJLENBQUMsR0FBRyw2QkFBNkIsQ0FBQyxTQUFTLENBQUMsTUFBTSxDQUFDLENBQUMsQ0FBQTtJQUNqRSxDQUFDO0lBQ0QsTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsRUFBRSxpQkFBaUIsQ0FBQyxDQUFBO0lBRTVELE1BQU0sVUFBVSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsaUJBQWlCLENBQUMsQ0FBQTtJQUNsRCxNQUFNLE1BQU0sR0FBRyxjQUFjLEVBQVMsQ0FBQTtJQUN0QyxJQUFJLENBQUMsTUFBTSxDQUFDLFVBQVUsQ0FBQyxJQUFJLE1BQU0sRUFBRSxNQUFNLEVBQUUsR0FBRyxFQUFFLENBQUM7UUFDL0MsaUJBQWlCLENBQUMsTUFBTSxDQUFDLE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQTtJQUN0QyxDQUFDO0lBRUQsSUFBSSxNQUFNLENBQUMsVUFBVSxDQUFDLEVBQUUsQ0FBQztRQUN2QixNQUFNLFNBQVMsR0FBdUIsSUFBSSxDQUFDLFVBQVUsRUFBRSxNQUFNLENBQXVCLENBQUE7UUFDcEYsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxxQkFBcUIsQ0FBQyxDQUFDLENBQUE7UUFDdEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQyxDQUFBO0lBQzdELENBQUM7SUFDRCxNQUFNLENBQUMsS0FBSyxDQUFDLHVCQUF1QixFQUFFLGlCQUFpQixDQUFDLENBQUE7SUFDeEQsTUFBTSxDQUFDLElBQUksQ0FBQyxTQUFTLENBQUMsQ0FBQTtJQUN0QixPQUFPLE1BQU0sQ0FBQTtBQUNmLENBQUMsQ0FBQTtBQUVELE1BQU0sQ0FBQyxNQUFNLEdBQUcsR0FBRyxHQUFHLEVBQUU7SUFDdEIsSUFBSSxNQUFNLEdBQVUsRUFBRSxDQUFBO0lBQ3RCLE1BQU0sY0FBYyxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMscUJBQXFCLENBQUMsQ0FBQTtJQUMxRCxJQUFJLENBQUMsTUFBTSxDQUFDLGNBQWMsQ0FBQyxFQUFFLENBQUM7UUFDNUIscUJBQXFCLEVBQUUsQ0FBQTtJQUN6QixDQUFDO0lBQ0QsSUFBSSxNQUFNLENBQUMsY0FBYyxDQUFDLEVBQUUsQ0FBQztRQUMzQixNQUFNLFNBQVMsR0FBMkIsSUFBSSxDQUFDLGNBQWMsRUFBRSxNQUFNLENBQTJCLENBQUE7UUFDaEcsTUFBTSxHQUFHLE1BQU0sQ0FBQyxNQUFNLENBQUMsU0FBUyxDQUFDLE1BQU0sQ0FBQyxDQUFBO0lBQzFDLENBQUM7SUFDRCxNQUFNLFVBQVUsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLGlCQUFpQixDQUFDLENBQUE7SUFDbEQsTUFBTSxNQUFNLEdBQUcsY0FBYyxFQUFTLENBQUE7SUFDdEMsSUFBSSxDQUFDLE1BQU0sQ0FBQyxVQUFVLENBQUMsSUFBSSxNQUFNLEVBQUUsTUFBTSxFQUFFLEdBQUcsRUFBRSxDQUFDO1FBQy9DLGlCQUFpQixDQUFDLE1BQU0sQ0FBQyxNQUFNLENBQUMsR0FBRyxDQUFDLENBQUE7SUFDdEMsQ0FBQztJQUVELElBQUksTUFBTSxDQUFDLFVBQVUsQ0FBQyxFQUFFLENBQUM7UUFDdkIsTUFBTSxTQUFTLEdBQXVCLElBQUksQ0FBQyxVQUFVLEVBQUUsTUFBTSxDQUF1QixDQUFBO1FBQ3BGLE1BQU0sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMscUJBQXFCLENBQUMsQ0FBQyxDQUFBO1FBQ3RELE1BQU0sR0FBRyxNQUFNLENBQUMsTUFBTSxDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQTtJQUMxQyxDQUFDO0lBQ0QsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUEifQ== +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvbW9kZWxzL2luZGV4LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxNQUFNLE9BQU8sQ0FBQTtBQUN6QixPQUFPLEtBQUssSUFBSSxNQUFNLFdBQVcsQ0FBQTtBQUNqQyxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksTUFBTSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFFcEQsT0FBTyxFQUFFLE1BQU0sRUFBRSxXQUFXLEVBQUUsTUFBTSxhQUFhLENBQUE7QUFDakQsT0FBTyxFQUFFLFVBQVUsSUFBSSxxQkFBcUIsRUFBMEMscUJBQXFCLEVBQUUsbUJBQW1CLElBQUksNkJBQTZCLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQTtBQUMxTCxPQUFPLEVBQUUsVUFBVSxJQUFJLGlCQUFpQixFQUFzQyxtQkFBbUIsSUFBSSx5QkFBeUIsRUFBRSxNQUFNLGFBQWEsQ0FBQTtBQUVuSixPQUFPLEVBQUUsaUJBQWlCLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUN2RCxPQUFPLEVBQUUsY0FBYyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFFbEQsT0FBTyxFQUFFLE1BQU0sSUFBSSxZQUFZLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUMxRCxPQUFPLEVBQUUsTUFBTSxJQUFJLGdCQUFnQixFQUFFLE1BQU0sdUJBQXVCLENBQUE7QUFFbEUsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLEdBQUcsRUFBRTtJQUU5QixJQUFJLFNBQVMsR0FBRyxnQkFBZ0IsQ0FBQTtJQUNoQyxJQUFJLFVBQVUsR0FBRyxZQUFZLENBQUE7SUFDN0IsSUFBSSxlQUFlLEdBQUc7UUFDcEI7WUFDRSxJQUFJLEVBQUUsZUFBZTtZQUNyQixNQUFNLEVBQUUsZUFBZTtTQUN4QjtRQUNEO1lBQ0UsSUFBSSxFQUFFLG1CQUFtQjtZQUN6QixNQUFNLEVBQUUsbUJBQW1CO1NBQzVCO0tBQ0YsQ0FBQTtJQUVELE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxXQUFXLEVBQUUsRUFBRSxhQUFhLENBQUMsQ0FBQTtJQUNuRSxJQUFJLE1BQU0sQ0FBQyxnQkFBZ0IsQ0FBQyxFQUFFLENBQUM7UUFDN0IsVUFBVSxHQUFHLElBQUksQ0FBQyxnQkFBZ0IsRUFBRSxNQUFNLENBQVEsQ0FBQTtJQUNwRCxDQUFDO0lBRUQsTUFBTSxnQkFBZ0IsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLFdBQVcsRUFBRSxFQUFFLGlCQUFpQixDQUFDLENBQUE7SUFDdkUsSUFBSSxNQUFNLENBQUMsZ0JBQWdCLENBQUMsRUFBRSxDQUFDO1FBQzdCLFNBQVMsR0FBRyxJQUFJLENBQUMsZ0JBQWdCLEVBQUUsTUFBTSxDQUFRLENBQUE7SUFDbkQsQ0FBQztJQUNELE1BQU0sTUFBTSxHQUFhLEVBQUUsQ0FBQTtJQUMzQixNQUFNLENBQUMsSUFBSSxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLHlCQUF5QixDQUFDLENBQUMsQ0FBQTtJQUMxRCxNQUFNLENBQUMsSUFBSSxDQUFDLEdBQUcsNkJBQTZCLENBQUMsU0FBZ0IsQ0FBQyxDQUFDLENBQUE7SUFFL0QsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxxQkFBcUIsQ0FBQyxDQUFDLENBQUE7SUFDdEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLFVBQWlCLENBQUMsQ0FBQyxDQUFBO0lBQzVELE1BQU0sQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUE7SUFFdEIsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyx1QkFBdUIsQ0FBQyxDQUFDLENBQUE7SUFDeEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLGVBQXNCLENBQUMsQ0FBQyxDQUFBO0lBQ2pFLE1BQU0sQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUE7SUFDdEIsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUE7QUFFRCxNQUFNLENBQUMsTUFBTSxNQUFNLEdBQUcsR0FBRyxFQUFFO0lBQ3pCLE1BQU0sTUFBTSxHQUFhLEVBQUUsQ0FBQTtJQUMzQixNQUFNLGNBQWMsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLHFCQUFxQixDQUFDLENBQUE7SUFDMUQsSUFBSSxDQUFDLE1BQU0sQ0FBQyxjQUFjLENBQUMsRUFBRSxDQUFDO1FBQzVCLHFCQUFxQixFQUFFLENBQUE7SUFDekIsQ0FBQztJQUNELElBQUksTUFBTSxDQUFDLGNBQWMsQ0FBQyxFQUFFLENBQUM7UUFDM0IsTUFBTSxTQUFTLEdBQTJCLElBQUksQ0FBQyxjQUFjLEVBQUUsTUFBTSxDQUEyQixDQUFBO1FBQ2hHLE1BQU0sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMseUJBQXlCLENBQUMsQ0FBQyxDQUFBO1FBQzFELE1BQU0sQ0FBQyxJQUFJLENBQUMsR0FBRyw2QkFBNkIsQ0FBQyxTQUFTLENBQUMsTUFBTSxDQUFDLENBQUMsQ0FBQTtJQUNqRSxDQUFDO0lBQ0QsTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsRUFBRSxpQkFBaUIsQ0FBQyxDQUFBO0lBRTVELE1BQU0sVUFBVSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsaUJBQWlCLENBQUMsQ0FBQTtJQUNsRCxNQUFNLE1BQU0sR0FBRyxjQUFjLEVBQVMsQ0FBQTtJQUN0QyxJQUFJLENBQUMsTUFBTSxDQUFDLFVBQVUsQ0FBQyxJQUFJLE1BQU0sRUFBRSxNQUFNLEVBQUUsR0FBRyxFQUFFLENBQUM7UUFDL0MsaUJBQWlCLENBQUMsTUFBTSxDQUFDLE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQTtJQUN0QyxDQUFDO0lBRUQsSUFBSSxNQUFNLENBQUMsVUFBVSxDQUFDLEVBQUUsQ0FBQztRQUN2QixNQUFNLFNBQVMsR0FBdUIsSUFBSSxDQUFDLFVBQVUsRUFBRSxNQUFNLENBQXVCLENBQUE7UUFDcEYsTUFBTSxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxxQkFBcUIsQ0FBQyxDQUFDLENBQUE7UUFDdEQsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLHlCQUF5QixDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQyxDQUFBO0lBQzdELENBQUM7SUFDRCxNQUFNLENBQUMsS0FBSyxDQUFDLHVCQUF1QixFQUFFLGlCQUFpQixDQUFDLENBQUE7SUFDeEQsTUFBTSxDQUFDLElBQUksQ0FBQyxTQUFTLENBQUMsQ0FBQTtJQUN0QixPQUFPLE1BQU0sQ0FBQTtBQUNmLENBQUMsQ0FBQTtBQUVELE1BQU0sQ0FBQyxNQUFNLEdBQUcsR0FBRyxHQUFHLEVBQUU7SUFDdEIsSUFBSSxNQUFNLEdBQVUsRUFBRSxDQUFBO0lBQ3RCLE1BQU0sY0FBYyxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMscUJBQXFCLENBQUMsQ0FBQTtJQUMxRCxJQUFJLENBQUMsTUFBTSxDQUFDLGNBQWMsQ0FBQyxFQUFFLENBQUM7UUFDNUIscUJBQXFCLEVBQUUsQ0FBQTtJQUN6QixDQUFDO0lBQ0QsSUFBSSxNQUFNLENBQUMsY0FBYyxDQUFDLEVBQUUsQ0FBQztRQUMzQixNQUFNLFNBQVMsR0FBMkIsSUFBSSxDQUFDLGNBQWMsRUFBRSxNQUFNLENBQTJCLENBQUE7UUFDaEcsTUFBTSxHQUFHLE1BQU0sQ0FBQyxNQUFNLENBQUMsU0FBUyxDQUFDLE1BQU0sQ0FBQyxDQUFBO0lBQzFDLENBQUM7SUFDRCxNQUFNLFVBQVUsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLGlCQUFpQixDQUFDLENBQUE7SUFDbEQsTUFBTSxNQUFNLEdBQUcsY0FBYyxFQUFTLENBQUE7SUFDdEMsSUFBSSxDQUFDLE1BQU0sQ0FBQyxVQUFVLENBQUMsSUFBSSxNQUFNLEVBQUUsTUFBTSxFQUFFLEdBQUcsRUFBRSxDQUFDO1FBQy9DLGlCQUFpQixDQUFDLE1BQU0sQ0FBQyxNQUFNLENBQUMsR0FBRyxDQUFDLENBQUE7SUFDdEMsQ0FBQztJQUVELElBQUksTUFBTSxDQUFDLFVBQVUsQ0FBQyxFQUFFLENBQUM7UUFDdkIsTUFBTSxTQUFTLEdBQXVCLElBQUksQ0FBQyxVQUFVLEVBQUUsTUFBTSxDQUF1QixDQUFBO1FBQ3BGLE1BQU0sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMscUJBQXFCLENBQUMsQ0FBQyxDQUFBO1FBQ3RELE1BQU0sR0FBRyxNQUFNLENBQUMsTUFBTSxDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQTtJQUMxQyxDQUFDO0lBQ0QsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUEifQ== ;// ./dist-in/commands/run.js @@ -211922,7 +213462,7 @@ const processRun = async (opts) => { const logDir = external_node_path_namespaceObject.resolve(variables_resolve(opts.logs)); const paramsPath = external_node_path_namespaceObject.join(logDir, 'params.json'); write_sync(paramsPath, JSON.stringify({ ...params }, null, 2)); - dist_in_logger.debug(`Read ${files.length} files from project ${external_node_path_namespaceObject.resolve(options.path)} with ${options.include}`, files.map(f => f.path), options.variables, params.tools.map(t => `${t.function.name} : ${t.function.description}`)); + dist_in_logger.debug(`Read ${files.length} files from project ${external_node_path_namespaceObject.resolve(options.path)} with ${options.include}`, files.map(f => f.path), params.tools.map(t => `${t.function.name} : ${t.function.description}`)); let ret = null; try { switch (options.mode) { @@ -212046,7 +213586,432 @@ const run = async (opts) => { } return ret; }; -//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"run.js","sourceRoot":"","sources":["../../src/commands/run.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,IAAI,MAAM,WAAW,CAAA;AACjC,OAAO,EAAE,QAAQ,EAAE,MAAM,MAAM,CAAA;AAC/B,OAAO,EAAE,IAAI,IAAI,GAAG,EAAE,MAAM,kBAAkB,CAAA;AAC9C,OAAO,EAAE,IAAI,IAAI,MAAM,EAAE,MAAM,qBAAqB,CAAA;AACpD,OAAO,EAAE,IAAI,IAAI,KAAK,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,IAAI,IAAI,IAAI,EAAE,MAAM,mBAAmB,CAAA;AAChD,OAAO,EAAE,aAAa,EAAE,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAA;AAC9E,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAA;AAM7D,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,cAAc,CAAA;AAC3C,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAA;AAChD,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAClC,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAA;AAC3C,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EAAE,IAAI,IAAI,WAAW,EAAE,MAAM,eAAe,CAAA;AACnD,OAAO,EAAE,IAAI,IAAI,SAAS,EAAE,MAAM,aAAa,CAAA;AAC/C,OAAO,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,cAAc,CAAA;AAClD,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAA;AAExC,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AACnD,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AACzC,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAA;AACjD,OAAO,EAAE,GAAG,EAAE,MAAM,oBAAoB,CAAA;AAExC,MAAM,CAAC,MAAM,UAAU,GAAG,KAAK,EAAE,IAAe,EAAE,EAAE;IAClD,IAAI,OAAO,GAAc,IAAI,CAAA;IAC7B,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,IAAI,CAAC,CAAA;IACrD,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC;QACpB,GAAG,CAAC,MAAM,CAAC,CAAA;IACb,CAAC;IACD,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;IACpC,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;IAC9C,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;IACpC,IAAI,CAAC,SAAS,GAAG,MAAM,WAAW,CAAC,IAAI,CAAC,CAAA;IACxC,IAAI,CAAC;QACH,OAAO,GAAG,aAAa,EAAE,CAAC,KAAK,CAAC,IAAI,CAAQ,CAAA;IAC9C,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,CAAC,KAAK,CAAC,0BAA0B,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,CAAC,CAAA;QACrE,OAAM;IACR,CAAC;IAED,MAAM,MAAM,GAAG,YAAY,CAAC,OAAO,CAAC,CAAA;IACpC,OAAO,CAAC,SAAS,GAAG,EAAE,GAAG,OAAO,CAAC,SAAS,EAAE,GAAG,SAAS,CAAC,OAAO,CAAC,EAAE,CAAA;IAEnE,IAAI,CAAC,MAAM,EAAE,CAAC;QACZ,MAAM,CAAC,KAAK,CAAC,yBAAyB,CAAC,CAAA;QACvC,OAAM;IACR,CAAC;IACD,OAAO,CAAC,MAAM,GAAG,MAAM,CAAA;IACvB,OAAO,CAAC,SAAS,GAAG,SAAS,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;IAE9C,IAAI,QAAQ,GAAsC,EAAE,CAAA;IAEpD,MAAM,aAAa,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,CAAA;IACxC,IAAG,CAAC,aAAa,CAAC,OAAO,EAAC,CAAC;QACzB,OAAO,EAAE,CAAA;IACX,CAAC;IACD,QAAQ,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;IAC5B,QAAQ,CAAC,IAAI,CAAC,MAAM,WAAW,CAAC,IAAI,CAAC,CAAC,CAAA;IAEtC,IAAI,KAAK,GAAG,MAAM,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,CAAA;IACxE,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,OAAO,EAAE,GAAG,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,CAAA,CAAC,CAAC,CAAC,CAAA;IACzD,QAAQ,GAAG,CAAC,GAAG,QAAe,EAAE,GAAG,KAAK,CAAC,CAAA;IAEzC,MAAM,MAAM,GAAG;QACb,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,QAAQ;QACR,KAAK,EAAE,EAAE;KAC6B,CAAA;IAExC,IAAI,OAAO,CAAC,IAAI,KAAK,KAAK,CAAC,KAAK,IAAI,OAAO,CAAC,IAAI,KAAK,KAAK,CAAC,SAAS,EAAE,CAAC;QACrE,MAAM,CAAC,KAAK,GAAG,MAAM,SAAS,CAAC,OAAO,CAAC,CAAA;QACvC,MAAM,CAAC,WAAW,GAAG,MAAM,CAAA;QAC3B,MAAM,CAAC,mBAAmB,GAAG,KAAK,CAAA;IACpC,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAA;IAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,CAAA;IACnD,KAAK,CAAC,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAA;IACzD,MAAM,CAAC,KAAK,CAAC,QAAQ,KAAK,CAAC,MAAM,uBAAuB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,OAAO,CAAC,OAAO,EAAE,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,MAAM,CAAC,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC,CAAA;IACjO,IAAI,GAAG,GAAG,IAAI,CAAA;IACd,IAAI,CAAC;QACH,QAAQ,OAAO,CAAC,IAAI,EAAE,CAAC;YACrB,KAAK,KAAK,CAAC,UAAU;gBACnB,GAAG,GAAG,MAAM,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA;gBAClD,MAAK;YAEP,KAAK,KAAK,CAAC,KAAK;gBACd,GAAG,GAAG,MAAM,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA;gBAC7C,MAAK;YAEP,KAAK,KAAK,CAAC,SAAS;gBAClB,GAAG,GAAG,MAAM,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA;gBACjD,MAAK;YAEP;gBACE,MAAM,IAAI,KAAK,CAAC,qBAAqB,OAAO,CAAC,IAAI,EAAE,CAAC,CAAA;QACxD,CAAC;IACH,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,CAAC,KAAK,CAAC,iBAAiB,OAAO,CAAC,IAAI,UAAU,CAAC,CAAC,OAAO,EAAE,CAAC,CAAA;IAClE,CAAC;IACD,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG,GAAG,CAAA;IAC5B,OAAO,GAAG,CAAA;AACZ,CAAC,CAAA;AACD;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,KAAa;IAEjC,IAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAC,CAAC;QACvC,OAAO,CAAC,KAAK,CAAC,CAAA;IAChB,CAAC;IAED,4EAA4E;IAC5E,6DAA6D;IAC7D,MAAM,cAAc,GAAG,mBAAmB,CAAC;IAE3C,MAAM,YAAY,GAAa,EAAE,CAAC;IAClC,IAAI,KAA6B,CAAC;IAElC,uCAAuC;IACvC,OAAO,CAAC,KAAK,GAAG,cAAc,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,KAAK,IAAI,EAAE,CAAC;QACrD,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IACjC,CAAC;IAED,8BAA8B;IAC9B,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,CAAC;QACzB,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,gDAAgD;IAChD,MAAM,KAAK,GAAa,EAAE,CAAC;IAC3B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QAC7C,MAAM,KAAK,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;QAC9B,MAAM,GAAG,GAAG,CAAC,GAAG,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC;QAC7E,MAAM,IAAI,GAAG,KAAK,CAAC,SAAS,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC;QAChD,IAAI,IAAI,EAAE,CAAC;YACT,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACnB,CAAC;IACH,CAAC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,aAAa,CAAI,MAAa;IACrC,OAAO,MAAM,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,OAAO,EAAE,EAAE;QAC5C,OAAO,WAAW,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;IACrC,CAAC,EAAE,EAAS,CAAC,CAAC;AAChB,CAAC;AAED,MAAM,CAAC,MAAM,GAAG,GAAG,KAAK,EAAE,IAAe,EAAE,EAAE;IAC3C,MAAM,GAAG,GAAG,EAAE,CAAA;IACd,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;QACjB,IAAI,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC;YAC3B,IAAI,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;QAC/B,CAAC;QACD,IAAI,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC;YAC1B,IAAI,CAAC,OAAO,GAAG,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,CAAA;QAC9D,CAAC;QACD,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAA;IAClD,CAAC;SAAI,CAAC;QACJ,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;IACnB,CAAC;IACD,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;QACd,IAAI,KAAK,GAAa,EAAE,CAAA;QACxB,IAAI,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;YACvB,KAAK,GAAG,IAAI,CAAC,IAAI,CAAA;QACnB,CAAC;aAAM,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,KAAK,OAAO,EAAE,CAAC;YAC3F,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,MAAM,CAAO,IAAI,EAAE,CAAA;QAC7C,CAAC;aAAM,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;YAC/B,MAAM,IAAI,GAAG,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;YACxE,KAAK,GAAG,IAAI,CAAC,KAAK,CAAA;QACpB,CAAC;aAAM,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;YAClD,KAAK,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QACrB,CAAC;aAAM,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;YAC/B,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QAC9B,CAAC;QACD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACvB,MAAM,CAAC,IAAI,CAAC,6BAA6B,IAAI,CAAC,IAAI,aAAa,IAAI,CAAC,IAAI,EAAE,CAAC,CAAA;YAC3E,OAAO,GAAG,CAAA;QACZ,CAAC;QACD,MAAM,CAAC,IAAI,CAAC,cAAc,KAAK,CAAC,MAAM,2BAA2B,IAAI,CAAC,IAAI,KAAK,CAAC,CAAA;QAChF,MAAM,OAAO,GAAG,GAAG,EAAE,CAAA;QACrB,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACzB,MAAM,QAAQ,GAAG;gBACf,GAAG,IAAI;gBACP,IAAI,EAAE,IAAI;gBACV,SAAS,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE;aAC1B,CAAA;YACD,sCAAsC;YACtC,MAAM,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,IAAI,CAAC,CAAA;YAC9C,IAAI,KAAK,EAAE,CAAC;gBACV,QAAQ,CAAC,KAAK,GAAG,IAAI,CAAA;YACvB,CAAC;YACD,QAAQ,CAAC,OAAO,GAAG,CAAC,GAAG,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;YAC9D,GAAG,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAA;QACtC,CAAC;IACH,CAAC;SAAM,CAAC;QACN,GAAG,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,IAAI,CAAC,CAAC,CAAA;IAClC,CAAC;IACD,OAAO,GAAG,CAAA;AACZ,CAAC,CAAA"} +//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"run.js","sourceRoot":"","sources":["../../src/commands/run.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,IAAI,MAAM,WAAW,CAAA;AACjC,OAAO,EAAE,QAAQ,EAAE,MAAM,MAAM,CAAA;AAC/B,OAAO,EAAE,IAAI,IAAI,GAAG,EAAE,MAAM,kBAAkB,CAAA;AAC9C,OAAO,EAAE,IAAI,IAAI,MAAM,EAAE,MAAM,qBAAqB,CAAA;AACpD,OAAO,EAAE,IAAI,IAAI,KAAK,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,IAAI,IAAI,IAAI,EAAE,MAAM,mBAAmB,CAAA;AAChD,OAAO,EAAE,aAAa,EAAE,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAA;AAC9E,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAA;AAM7D,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,cAAc,CAAA;AAC3C,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAA;AAChD,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAClC,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAA;AAC3C,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EAAE,IAAI,IAAI,WAAW,EAAE,MAAM,eAAe,CAAA;AACnD,OAAO,EAAE,IAAI,IAAI,SAAS,EAAE,MAAM,aAAa,CAAA;AAC/C,OAAO,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,cAAc,CAAA;AAClD,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAA;AAExC,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AACnD,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AACzC,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAA;AACjD,OAAO,EAAE,GAAG,EAAE,MAAM,oBAAoB,CAAA;AAExC,MAAM,CAAC,MAAM,UAAU,GAAG,KAAK,EAAE,IAAe,EAAE,EAAE;IAClD,IAAI,OAAO,GAAc,IAAI,CAAA;IAC7B,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,IAAI,CAAC,CAAA;IACrD,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC;QACpB,GAAG,CAAC,MAAM,CAAC,CAAA;IACb,CAAC;IACD,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;IACpC,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;IAC9C,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;IACpC,IAAI,CAAC,SAAS,GAAG,MAAM,WAAW,CAAC,IAAI,CAAC,CAAA;IACxC,IAAI,CAAC;QACH,OAAO,GAAG,aAAa,EAAE,CAAC,KAAK,CAAC,IAAI,CAAQ,CAAA;IAC9C,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,CAAC,KAAK,CAAC,0BAA0B,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,CAAC,CAAA;QACrE,OAAM;IACR,CAAC;IAED,MAAM,MAAM,GAAG,YAAY,CAAC,OAAO,CAAC,CAAA;IACpC,OAAO,CAAC,SAAS,GAAG,EAAE,GAAG,OAAO,CAAC,SAAS,EAAE,GAAG,SAAS,CAAC,OAAO,CAAC,EAAE,CAAA;IAEnE,IAAI,CAAC,MAAM,EAAE,CAAC;QACZ,MAAM,CAAC,KAAK,CAAC,yBAAyB,CAAC,CAAA;QACvC,OAAM;IACR,CAAC;IACD,OAAO,CAAC,MAAM,GAAG,MAAM,CAAA;IACvB,OAAO,CAAC,SAAS,GAAG,SAAS,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;IAE9C,IAAI,QAAQ,GAAsC,EAAE,CAAA;IAEpD,MAAM,aAAa,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,CAAA;IACxC,IAAG,CAAC,aAAa,CAAC,OAAO,EAAC,CAAC;QACzB,OAAO,EAAE,CAAA;IACX,CAAC;IACD,QAAQ,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;IAC5B,QAAQ,CAAC,IAAI,CAAC,MAAM,WAAW,CAAC,IAAI,CAAC,CAAC,CAAA;IAEtC,IAAI,KAAK,GAAG,MAAM,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,CAAA;IACxE,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,OAAO,EAAE,GAAG,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,CAAA,CAAC,CAAC,CAAC,CAAA;IACzD,QAAQ,GAAG,CAAC,GAAG,QAAe,EAAE,GAAG,KAAK,CAAC,CAAA;IAEzC,MAAM,MAAM,GAAG;QACb,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,QAAQ;QACR,KAAK,EAAE,EAAE;KAC6B,CAAA;IAExC,IAAI,OAAO,CAAC,IAAI,KAAK,KAAK,CAAC,KAAK,IAAI,OAAO,CAAC,IAAI,KAAK,KAAK,CAAC,SAAS,EAAE,CAAC;QACrE,MAAM,CAAC,KAAK,GAAG,MAAM,SAAS,CAAC,OAAO,CAAC,CAAA;QACvC,MAAM,CAAC,WAAW,GAAG,MAAM,CAAA;QAC3B,MAAM,CAAC,mBAAmB,GAAG,KAAK,CAAA;IACpC,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAA;IAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,CAAA;IACnD,KAAK,CAAC,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAA;IACzD,MAAM,CAAC,KAAK,CAAC,QAAQ,KAAK,CAAC,MAAM,uBAAuB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,OAAO,CAAC,OAAO,EAAE,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,MAAM,CAAC,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC,CAAA;IAC9M,IAAI,GAAG,GAAG,IAAI,CAAA;IACd,IAAI,CAAC;QACH,QAAQ,OAAO,CAAC,IAAI,EAAE,CAAC;YACrB,KAAK,KAAK,CAAC,UAAU;gBACnB,GAAG,GAAG,MAAM,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA;gBAClD,MAAK;YAEP,KAAK,KAAK,CAAC,KAAK;gBACd,GAAG,GAAG,MAAM,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA;gBAC7C,MAAK;YAEP,KAAK,KAAK,CAAC,SAAS;gBAClB,GAAG,GAAG,MAAM,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA;gBACjD,MAAK;YAEP;gBACE,MAAM,IAAI,KAAK,CAAC,qBAAqB,OAAO,CAAC,IAAI,EAAE,CAAC,CAAA;QACxD,CAAC;IACH,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,CAAC,KAAK,CAAC,iBAAiB,OAAO,CAAC,IAAI,UAAU,CAAC,CAAC,OAAO,EAAE,CAAC,CAAA;IAClE,CAAC;IACD,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG,GAAG,CAAA;IAC5B,OAAO,GAAG,CAAA;AACZ,CAAC,CAAA;AACD;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,KAAa;IAEjC,IAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAC,CAAC;QACvC,OAAO,CAAC,KAAK,CAAC,CAAA;IAChB,CAAC;IAED,4EAA4E;IAC5E,6DAA6D;IAC7D,MAAM,cAAc,GAAG,mBAAmB,CAAC;IAE3C,MAAM,YAAY,GAAa,EAAE,CAAC;IAClC,IAAI,KAA6B,CAAC;IAElC,uCAAuC;IACvC,OAAO,CAAC,KAAK,GAAG,cAAc,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,KAAK,IAAI,EAAE,CAAC;QACrD,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IACjC,CAAC;IAED,8BAA8B;IAC9B,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,CAAC;QACzB,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,gDAAgD;IAChD,MAAM,KAAK,GAAa,EAAE,CAAC;IAC3B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QAC7C,MAAM,KAAK,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;QAC9B,MAAM,GAAG,GAAG,CAAC,GAAG,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC;QAC7E,MAAM,IAAI,GAAG,KAAK,CAAC,SAAS,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC;QAChD,IAAI,IAAI,EAAE,CAAC;YACT,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACnB,CAAC;IACH,CAAC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,aAAa,CAAI,MAAa;IACrC,OAAO,MAAM,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,OAAO,EAAE,EAAE;QAC5C,OAAO,WAAW,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;IACrC,CAAC,EAAE,EAAS,CAAC,CAAC;AAChB,CAAC;AAED,MAAM,CAAC,MAAM,GAAG,GAAG,KAAK,EAAE,IAAe,EAAE,EAAE;IAC3C,MAAM,GAAG,GAAG,EAAE,CAAA;IACd,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;QACjB,IAAI,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC;YAC3B,IAAI,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;QAC/B,CAAC;QACD,IAAI,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC;YAC1B,IAAI,CAAC,OAAO,GAAG,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,CAAA;QAC9D,CAAC;QACD,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAA;IAClD,CAAC;SAAI,CAAC;QACJ,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;IACnB,CAAC;IACD,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;QACd,IAAI,KAAK,GAAa,EAAE,CAAA;QACxB,IAAI,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;YACvB,KAAK,GAAG,IAAI,CAAC,IAAI,CAAA;QACnB,CAAC;aAAM,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,KAAK,OAAO,EAAE,CAAC;YAC3F,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,MAAM,CAAO,IAAI,EAAE,CAAA;QAC7C,CAAC;aAAM,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;YAC/B,MAAM,IAAI,GAAG,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;YACxE,KAAK,GAAG,IAAI,CAAC,KAAK,CAAA;QACpB,CAAC;aAAM,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;YAClD,KAAK,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QACrB,CAAC;aAAM,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;YAC/B,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QAC9B,CAAC;QACD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACvB,MAAM,CAAC,IAAI,CAAC,6BAA6B,IAAI,CAAC,IAAI,aAAa,IAAI,CAAC,IAAI,EAAE,CAAC,CAAA;YAC3E,OAAO,GAAG,CAAA;QACZ,CAAC;QACD,MAAM,CAAC,IAAI,CAAC,cAAc,KAAK,CAAC,MAAM,2BAA2B,IAAI,CAAC,IAAI,KAAK,CAAC,CAAA;QAChF,MAAM,OAAO,GAAG,GAAG,EAAE,CAAA;QACrB,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACzB,MAAM,QAAQ,GAAG;gBACf,GAAG,IAAI;gBACP,IAAI,EAAE,IAAI;gBACV,SAAS,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE;aAC1B,CAAA;YACD,sCAAsC;YACtC,MAAM,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,IAAI,CAAC,CAAA;YAC9C,IAAI,KAAK,EAAE,CAAC;gBACV,QAAQ,CAAC,KAAK,GAAG,IAAI,CAAA;YACvB,CAAC;YACD,QAAQ,CAAC,OAAO,GAAG,CAAC,GAAG,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;YAC9D,GAAG,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAA;QACtC,CAAC;IACH,CAAC;SAAM,CAAC;QACN,GAAG,CAAC,IAAI,CAAC,MAAM,UAAU,CAAC,IAAI,CAAC,CAAC,CAAA;IAClC,CAAC;IACD,OAAO,GAAG,CAAA;AACZ,CAAC,CAAA"} +;// ./dist-in/models/cache/openai-models.js +var E_OPENAI_MODEL; +(function (E_OPENAI_MODEL) { + E_OPENAI_MODEL["MODEL_GPT_4O_REALTIME_PREVIEW_2024_12_17"] = "gpt-4o-realtime-preview-2024-12-17"; + E_OPENAI_MODEL["MODEL_GPT_4O_AUDIO_PREVIEW_2024_12_17"] = "gpt-4o-audio-preview-2024-12-17"; + E_OPENAI_MODEL["MODEL_DALL_E_3"] = "dall-e-3"; + E_OPENAI_MODEL["MODEL_DALL_E_2"] = "dall-e-2"; + E_OPENAI_MODEL["MODEL_GPT_4O_AUDIO_PREVIEW_2024_10_01"] = "gpt-4o-audio-preview-2024-10-01"; + E_OPENAI_MODEL["MODEL_O3_MINI"] = "o3-mini"; + E_OPENAI_MODEL["MODEL_O3_MINI_2025_01_31"] = "o3-mini-2025-01-31"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_REALTIME_PREVIEW_2024_12_17"] = "gpt-4o-mini-realtime-preview-2024-12-17"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_REALTIME_PREVIEW"] = "gpt-4o-mini-realtime-preview"; + E_OPENAI_MODEL["MODEL_GPT_4O_REALTIME_PREVIEW_2024_10_01"] = "gpt-4o-realtime-preview-2024-10-01"; + E_OPENAI_MODEL["MODEL_GPT_4O_TRANSCRIBE"] = "gpt-4o-transcribe"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_TRANSCRIBE"] = "gpt-4o-mini-transcribe"; + E_OPENAI_MODEL["MODEL_GPT_4O_REALTIME_PREVIEW"] = "gpt-4o-realtime-preview"; + E_OPENAI_MODEL["MODEL_BABBAGE_002"] = "babbage-002"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_TTS"] = "gpt-4o-mini-tts"; + E_OPENAI_MODEL["MODEL_TTS_1_HD_1106"] = "tts-1-hd-1106"; + E_OPENAI_MODEL["MODEL_TEXT_EMBEDDING_3_LARGE"] = "text-embedding-3-large"; + E_OPENAI_MODEL["MODEL_GPT_4"] = "gpt-4"; + E_OPENAI_MODEL["MODEL_TEXT_EMBEDDING_ADA_002"] = "text-embedding-ada-002"; + E_OPENAI_MODEL["MODEL_TTS_1_HD"] = "tts-1-hd"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_AUDIO_PREVIEW"] = "gpt-4o-mini-audio-preview"; + E_OPENAI_MODEL["MODEL_GPT_4O_AUDIO_PREVIEW"] = "gpt-4o-audio-preview"; + E_OPENAI_MODEL["MODEL_O1_PREVIEW_2024_09_12"] = "o1-preview-2024-09-12"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_INSTRUCT_0914"] = "gpt-3.5-turbo-instruct-0914"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_SEARCH_PREVIEW"] = "gpt-4o-mini-search-preview"; + E_OPENAI_MODEL["MODEL_TTS_1_1106"] = "tts-1-1106"; + E_OPENAI_MODEL["MODEL_DAVINCI_002"] = "davinci-002"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_1106"] = "gpt-3.5-turbo-1106"; + E_OPENAI_MODEL["MODEL_GPT_4_TURBO"] = "gpt-4-turbo"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_INSTRUCT"] = "gpt-3.5-turbo-instruct"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO"] = "gpt-3.5-turbo"; + E_OPENAI_MODEL["MODEL_CHATGPT_4O_LATEST"] = "chatgpt-4o-latest"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11"] = "gpt-4o-mini-search-preview-2025-03-11"; + E_OPENAI_MODEL["MODEL_GPT_4O_2024_11_20"] = "gpt-4o-2024-11-20"; + E_OPENAI_MODEL["MODEL_WHISPER_1"] = "whisper-1"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_0125"] = "gpt-3.5-turbo-0125"; + E_OPENAI_MODEL["MODEL_GPT_4O_2024_05_13"] = "gpt-4o-2024-05-13"; + E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_16K"] = "gpt-3.5-turbo-16k"; + E_OPENAI_MODEL["MODEL_GPT_4_TURBO_2024_04_09"] = "gpt-4-turbo-2024-04-09"; + E_OPENAI_MODEL["MODEL_GPT_4_1106_PREVIEW"] = "gpt-4-1106-preview"; + E_OPENAI_MODEL["MODEL_O1_PREVIEW"] = "o1-preview"; + E_OPENAI_MODEL["MODEL_GPT_4_0613"] = "gpt-4-0613"; + E_OPENAI_MODEL["MODEL_GPT_4O_SEARCH_PREVIEW"] = "gpt-4o-search-preview"; + E_OPENAI_MODEL["MODEL_GPT_4_5_PREVIEW"] = "gpt-4.5-preview"; + E_OPENAI_MODEL["MODEL_GPT_4_5_PREVIEW_2025_02_27"] = "gpt-4.5-preview-2025-02-27"; + E_OPENAI_MODEL["MODEL_GPT_4O_SEARCH_PREVIEW_2025_03_11"] = "gpt-4o-search-preview-2025-03-11"; + E_OPENAI_MODEL["MODEL_OMNI_MODERATION_LATEST"] = "omni-moderation-latest"; + E_OPENAI_MODEL["MODEL_TTS_1"] = "tts-1"; + E_OPENAI_MODEL["MODEL_OMNI_MODERATION_2024_09_26"] = "omni-moderation-2024-09-26"; + E_OPENAI_MODEL["MODEL_TEXT_EMBEDDING_3_SMALL"] = "text-embedding-3-small"; + E_OPENAI_MODEL["MODEL_GPT_4O"] = "gpt-4o"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI"] = "gpt-4o-mini"; + E_OPENAI_MODEL["MODEL_GPT_4O_2024_08_06"] = "gpt-4o-2024-08-06"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_2024_07_18"] = "gpt-4o-mini-2024-07-18"; + E_OPENAI_MODEL["MODEL_GPT_4_TURBO_PREVIEW"] = "gpt-4-turbo-preview"; + E_OPENAI_MODEL["MODEL_O1_MINI"] = "o1-mini"; + E_OPENAI_MODEL["MODEL_GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17"] = "gpt-4o-mini-audio-preview-2024-12-17"; + E_OPENAI_MODEL["MODEL_O1_MINI_2024_09_12"] = "o1-mini-2024-09-12"; + E_OPENAI_MODEL["MODEL_GPT_4_0125_PREVIEW"] = "gpt-4-0125-preview"; + E_OPENAI_MODEL["MODEL_O1"] = "o1"; + E_OPENAI_MODEL["MODEL_O1_2024_12_17"] = "o1-2024-12-17"; + E_OPENAI_MODEL["MODEL_O1_PRO"] = "o1-pro"; + E_OPENAI_MODEL["MODEL_O1_PRO_2025_03_19"] = "o1-pro-2025-03-19"; +})(E_OPENAI_MODEL || (E_OPENAI_MODEL = {})); +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbmFpLW1vZGVscy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbmFpLW1vZGVscy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSxjQWdFWDtBQWhFRCxXQUFZLGNBQWM7SUFDeEIsaUdBQStFLENBQUE7SUFDL0UsMkZBQXlFLENBQUE7SUFDekUsNkNBQTJCLENBQUE7SUFDM0IsNkNBQTJCLENBQUE7SUFDM0IsMkZBQXlFLENBQUE7SUFDekUsMkNBQXlCLENBQUE7SUFDekIsaUVBQStDLENBQUE7SUFDL0MsMkdBQXlGLENBQUE7SUFDekYscUZBQW1FLENBQUE7SUFDbkUsaUdBQStFLENBQUE7SUFDL0UsK0RBQTZDLENBQUE7SUFDN0MseUVBQXVELENBQUE7SUFDdkQsMkVBQXlELENBQUE7SUFDekQsbURBQWlDLENBQUE7SUFDakMsMkRBQXlDLENBQUE7SUFDekMsdURBQXFDLENBQUE7SUFDckMseUVBQXVELENBQUE7SUFDdkQsdUNBQXFCLENBQUE7SUFDckIseUVBQXVELENBQUE7SUFDdkQsNkNBQTJCLENBQUE7SUFDM0IsK0VBQTZELENBQUE7SUFDN0QscUVBQW1ELENBQUE7SUFDbkQsdUVBQXFELENBQUE7SUFDckQsbUZBQWlFLENBQUE7SUFDakUsaUZBQStELENBQUE7SUFDL0QsaURBQStCLENBQUE7SUFDL0IsbURBQWlDLENBQUE7SUFDakMsaUVBQStDLENBQUE7SUFDL0MsbURBQWlDLENBQUE7SUFDakMseUVBQXVELENBQUE7SUFDdkQsdURBQXFDLENBQUE7SUFDckMsK0RBQTZDLENBQUE7SUFDN0MsdUdBQXFGLENBQUE7SUFDckYsK0RBQTZDLENBQUE7SUFDN0MsK0NBQTZCLENBQUE7SUFDN0IsaUVBQStDLENBQUE7SUFDL0MsK0RBQTZDLENBQUE7SUFDN0MsK0RBQTZDLENBQUE7SUFDN0MseUVBQXVELENBQUE7SUFDdkQsaUVBQStDLENBQUE7SUFDL0MsaURBQStCLENBQUE7SUFDL0IsaURBQStCLENBQUE7SUFDL0IsdUVBQXFELENBQUE7SUFDckQsMkRBQXlDLENBQUE7SUFDekMsaUZBQStELENBQUE7SUFDL0QsNkZBQTJFLENBQUE7SUFDM0UseUVBQXVELENBQUE7SUFDdkQsdUNBQXFCLENBQUE7SUFDckIsaUZBQStELENBQUE7SUFDL0QseUVBQXVELENBQUE7SUFDdkQseUNBQXVCLENBQUE7SUFDdkIsbURBQWlDLENBQUE7SUFDakMsK0RBQTZDLENBQUE7SUFDN0MseUVBQXVELENBQUE7SUFDdkQsbUVBQWlELENBQUE7SUFDakQsMkNBQXlCLENBQUE7SUFDekIscUdBQW1GLENBQUE7SUFDbkYsaUVBQStDLENBQUE7SUFDL0MsaUVBQStDLENBQUE7SUFDL0MsaUNBQWUsQ0FBQTtJQUNmLHVEQUFxQyxDQUFBO0lBQ3JDLHlDQUF1QixDQUFBO0lBQ3ZCLCtEQUE2QyxDQUFBO0FBQy9DLENBQUMsRUFoRVcsY0FBYyxLQUFkLGNBQWMsUUFnRXpCIn0= +;// ./dist-in/models/cache/openrouter-models.js +var E_OPENROUTER_MODEL; +(function (E_OPENROUTER_MODEL) { + E_OPENROUTER_MODEL["MODEL_MISTRAL_MINISTRAL_8B"] = "mistral/ministral-8b"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_V3_BASE_FREE"] = "deepseek/deepseek-v3-base:free"; + E_OPENROUTER_MODEL["MODEL_SCB10X_LLAMA3_1_TYPHOON2_8B_INSTRUCT"] = "scb10x/llama3.1-typhoon2-8b-instruct"; + E_OPENROUTER_MODEL["MODEL_SCB10X_LLAMA3_1_TYPHOON2_70B_INSTRUCT"] = "scb10x/llama3.1-typhoon2-70b-instruct"; + E_OPENROUTER_MODEL["MODEL_ALLENAI_MOLMO_7B_D_FREE"] = "allenai/molmo-7b-d:free"; + E_OPENROUTER_MODEL["MODEL_BYTEDANCE_RESEARCH_UI_TARS_72B_FREE"] = "bytedance-research/ui-tars-72b:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_3B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-3b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_5_PRO_EXP_03_25_FREE"] = "google/gemini-2.5-pro-exp-03-25:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-32b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE"] = "deepseek/deepseek-chat-v3-0324:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324"] = "deepseek/deepseek-chat-v3-0324"; + E_OPENROUTER_MODEL["MODEL_FEATHERLESS_QWERKY_72B_FREE"] = "featherless/qwerky-72b:free"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_PRO"] = "openai/o1-pro"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE"] = "mistralai/mistral-small-3.1-24b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT"] = "mistralai/mistral-small-3.1-24b-instruct"; + E_OPENROUTER_MODEL["MODEL_OPEN_R1_OLYMPICCODER_7B_FREE"] = "open-r1/olympiccoder-7b:free"; + E_OPENROUTER_MODEL["MODEL_OPEN_R1_OLYMPICCODER_32B_FREE"] = "open-r1/olympiccoder-32b:free"; + E_OPENROUTER_MODEL["MODEL_STEELSKULL_L3_3_ELECTRA_R1_70B"] = "steelskull/l3.3-electra-r1-70b"; + E_OPENROUTER_MODEL["MODEL_ALLENAI_OLMO_2_0325_32B_INSTRUCT"] = "allenai/olmo-2-0325-32b-instruct"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_1B_IT_FREE"] = "google/gemma-3-1b-it:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_4B_IT_FREE"] = "google/gemma-3-4b-it:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_4B_IT"] = "google/gemma-3-4b-it"; + E_OPENROUTER_MODEL["MODEL_AI21_JAMBA_1_6_LARGE"] = "ai21/jamba-1.6-large"; + E_OPENROUTER_MODEL["MODEL_AI21_JAMBA_1_6_MINI"] = "ai21/jamba-1.6-mini"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_12B_IT_FREE"] = "google/gemma-3-12b-it:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_12B_IT"] = "google/gemma-3-12b-it"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_A"] = "cohere/command-a"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_MINI_SEARCH_PREVIEW"] = "openai/gpt-4o-mini-search-preview"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_SEARCH_PREVIEW"] = "openai/gpt-4o-search-preview"; + E_OPENROUTER_MODEL["MODEL_TOKYOTECH_LLM_LLAMA_3_1_SWALLOW_70B_INSTRUCT_V0_3"] = "tokyotech-llm/llama-3.1-swallow-70b-instruct-v0.3"; + E_OPENROUTER_MODEL["MODEL_REKAAI_REKA_FLASH_3_FREE"] = "rekaai/reka-flash-3:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_27B_IT_FREE"] = "google/gemma-3-27b-it:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_27B_IT"] = "google/gemma-3-27b-it"; + E_OPENROUTER_MODEL["MODEL_THEDRUMMER_ANUBIS_PRO_105B_V1"] = "thedrummer/anubis-pro-105b-v1"; + E_OPENROUTER_MODEL["MODEL_LATITUDEGAMES_WAYFARER_LARGE_70B_LLAMA_3_3"] = "latitudegames/wayfarer-large-70b-llama-3.3"; + E_OPENROUTER_MODEL["MODEL_THEDRUMMER_SKYFALL_36B_V2"] = "thedrummer/skyfall-36b-v2"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_4_MULTIMODAL_INSTRUCT"] = "microsoft/phi-4-multimodal-instruct"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_SONAR_REASONING_PRO"] = "perplexity/sonar-reasoning-pro"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_SONAR_PRO"] = "perplexity/sonar-pro"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_SONAR_DEEP_RESEARCH"] = "perplexity/sonar-deep-research"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_ZERO_FREE"] = "deepseek/deepseek-r1-zero:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWQ_32B_FREE"] = "qwen/qwq-32b:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWQ_32B"] = "qwen/qwq-32b"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_32B_INSTRUCT"] = "qwen/qwen2.5-32b-instruct"; + E_OPENROUTER_MODEL["MODEL_MOONSHOTAI_MOONLIGHT_16B_A3B_INSTRUCT_FREE"] = "moonshotai/moonlight-16b-a3b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE"] = "nousresearch/deephermes-3-llama-3-8b-preview:free"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_5_PREVIEW"] = "openai/gpt-4.5-preview"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_FLASH_LITE_001"] = "google/gemini-2.0-flash-lite-001"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_BETA"] = "anthropic/claude-3.7-sonnet:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_7_SONNET"] = "anthropic/claude-3.7-sonnet"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_THINKING"] = "anthropic/claude-3.7-sonnet:thinking"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_R1_1776"] = "perplexity/r1-1776"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SABA"] = "mistralai/mistral-saba"; + E_OPENROUTER_MODEL["MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE"] = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free"; + E_OPENROUTER_MODEL["MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE"] = "cognitivecomputations/dolphin3.0-mistral-24b:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_GUARD_3_8B"] = "meta-llama/llama-guard-3-8b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O3_MINI_HIGH"] = "openai/o3-mini-high"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_8B"] = "deepseek/deepseek-r1-distill-llama-8b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_FLASH_001"] = "google/gemini-2.0-flash-001"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_PRO_EXP_02_05_FREE"] = "google/gemini-2.0-pro-exp-02-05:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_VL_PLUS"] = "qwen/qwen-vl-plus"; + E_OPENROUTER_MODEL["MODEL_AION_LABS_AION_1_0"] = "aion-labs/aion-1.0"; + E_OPENROUTER_MODEL["MODEL_AION_LABS_AION_1_0_MINI"] = "aion-labs/aion-1.0-mini"; + E_OPENROUTER_MODEL["MODEL_AION_LABS_AION_RP_LLAMA_3_1_8B"] = "aion-labs/aion-rp-llama-3.1-8b"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_VL_MAX"] = "qwen/qwen-vl-max"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_TURBO"] = "qwen/qwen-turbo"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-72b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_72B_INSTRUCT"] = "qwen/qwen2.5-vl-72b-instruct"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_PLUS"] = "qwen/qwen-plus"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_MAX"] = "qwen/qwen-max"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O3_MINI"] = "openai/o3-mini"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_1_5B"] = "deepseek/deepseek-r1-distill-qwen-1.5b"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE"] = "mistralai/mistral-small-24b-instruct-2501:free"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501"] = "mistralai/mistral-small-24b-instruct-2501"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B_FREE"] = "deepseek/deepseek-r1-distill-qwen-32b:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B"] = "deepseek/deepseek-r1-distill-qwen-32b"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE"] = "deepseek/deepseek-r1-distill-qwen-14b:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B"] = "deepseek/deepseek-r1-distill-qwen-14b"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_SONAR_REASONING"] = "perplexity/sonar-reasoning"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_SONAR"] = "perplexity/sonar"; + E_OPENROUTER_MODEL["MODEL_LIQUID_LFM_7B"] = "liquid/lfm-7b"; + E_OPENROUTER_MODEL["MODEL_LIQUID_LFM_3B"] = "liquid/lfm-3b"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B_FREE"] = "deepseek/deepseek-r1-distill-llama-70b:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B"] = "deepseek/deepseek-r1-distill-llama-70b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_FREE"] = "google/gemini-2.0-flash-thinking-exp:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1_FREE"] = "deepseek/deepseek-r1:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_R1"] = "deepseek/deepseek-r1"; + E_OPENROUTER_MODEL["MODEL_SOPHOSYMPATHEIA_ROGUE_ROSE_103B_V0_2_FREE"] = "sophosympatheia/rogue-rose-103b-v0.2:free"; + E_OPENROUTER_MODEL["MODEL_MINIMAX_MINIMAX_01"] = "minimax/minimax-01"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_CODESTRAL_2501"] = "mistralai/codestral-2501"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_4"] = "microsoft/phi-4"; + E_OPENROUTER_MODEL["MODEL_SAO10K_L3_1_70B_HANAMI_X1"] = "sao10k/l3.1-70b-hanami-x1"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_CHAT_FREE"] = "deepseek/deepseek-chat:free"; + E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_CHAT"] = "deepseek/deepseek-chat"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_1219_FREE"] = "google/gemini-2.0-flash-thinking-exp-1219:free"; + E_OPENROUTER_MODEL["MODEL_SAO10K_L3_3_EURYALE_70B"] = "sao10k/l3.3-euryale-70b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1"] = "openai/o1"; + E_OPENROUTER_MODEL["MODEL_EVA_UNIT_01_EVA_LLAMA_3_33_70B"] = "eva-unit-01/eva-llama-3.33-70b"; + E_OPENROUTER_MODEL["MODEL_X_AI_GROK_2_VISION_1212"] = "x-ai/grok-2-vision-1212"; + E_OPENROUTER_MODEL["MODEL_X_AI_GROK_2_1212"] = "x-ai/grok-2-1212"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R7B_12_2024"] = "cohere/command-r7b-12-2024"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE"] = "google/gemini-2.0-flash-exp:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE"] = "meta-llama/llama-3.3-70b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_3_70B_INSTRUCT"] = "meta-llama/llama-3.3-70b-instruct"; + E_OPENROUTER_MODEL["MODEL_AMAZON_NOVA_LITE_V1"] = "amazon/nova-lite-v1"; + E_OPENROUTER_MODEL["MODEL_AMAZON_NOVA_MICRO_V1"] = "amazon/nova-micro-v1"; + E_OPENROUTER_MODEL["MODEL_AMAZON_NOVA_PRO_V1"] = "amazon/nova-pro-v1"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWQ_32B_PREVIEW_FREE"] = "qwen/qwq-32b-preview:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWQ_32B_PREVIEW"] = "qwen/qwq-32b-preview"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_LEARNLM_1_5_PRO_EXPERIMENTAL_FREE"] = "google/learnlm-1.5-pro-experimental:free"; + E_OPENROUTER_MODEL["MODEL_EVA_UNIT_01_EVA_QWEN_2_5_72B"] = "eva-unit-01/eva-qwen-2.5-72b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_2024_11_20"] = "openai/gpt-4o-2024-11-20"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE_2411"] = "mistralai/mistral-large-2411"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE_2407"] = "mistralai/mistral-large-2407"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_PIXTRAL_LARGE_2411"] = "mistralai/pixtral-large-2411"; + E_OPENROUTER_MODEL["MODEL_X_AI_GROK_VISION_BETA"] = "x-ai/grok-vision-beta"; + E_OPENROUTER_MODEL["MODEL_INFERMATIC_MN_INFEROR_12B"] = "infermatic/mn-inferor-12b"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE"] = "qwen/qwen-2.5-coder-32b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT"] = "qwen/qwen-2.5-coder-32b-instruct"; + E_OPENROUTER_MODEL["MODEL_RAIFLE_SORCERERLM_8X22B"] = "raifle/sorcererlm-8x22b"; + E_OPENROUTER_MODEL["MODEL_EVA_UNIT_01_EVA_QWEN_2_5_32B"] = "eva-unit-01/eva-qwen-2.5-32b"; + E_OPENROUTER_MODEL["MODEL_THEDRUMMER_UNSLOPNEMO_12B"] = "thedrummer/unslopnemo-12b"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_BETA"] = "anthropic/claude-3.5-haiku:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU"] = "anthropic/claude-3.5-haiku"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022_BETA"] = "anthropic/claude-3.5-haiku-20241022:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022"] = "anthropic/claude-3.5-haiku-20241022"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_70B"] = "neversleep/llama-3.1-lumimaid-70b"; + E_OPENROUTER_MODEL["MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B"] = "anthracite-org/magnum-v4-72b"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_BETA"] = "anthropic/claude-3.5-sonnet:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_SONNET"] = "anthropic/claude-3.5-sonnet"; + E_OPENROUTER_MODEL["MODEL_X_AI_GROK_BETA"] = "x-ai/grok-beta"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MINISTRAL_8B"] = "mistralai/ministral-8b"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MINISTRAL_3B"] = "mistralai/ministral-3b"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_7B_INSTRUCT"] = "qwen/qwen-2.5-7b-instruct"; + E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT_FREE"] = "nvidia/llama-3.1-nemotron-70b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT"] = "nvidia/llama-3.1-nemotron-70b-instruct"; + E_OPENROUTER_MODEL["MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY"] = "inflection/inflection-3-productivity"; + E_OPENROUTER_MODEL["MODEL_INFLECTION_INFLECTION_3_PI"] = "inflection/inflection-3-pi"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_FLASH_1_5_8B"] = "google/gemini-flash-1.5-8b"; + E_OPENROUTER_MODEL["MODEL_THEDRUMMER_ROCINANTE_12B"] = "thedrummer/rocinante-12b"; + E_OPENROUTER_MODEL["MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B"] = "anthracite-org/magnum-v2-72b"; + E_OPENROUTER_MODEL["MODEL_LIQUID_LFM_40B"] = "liquid/lfm-40b"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE"] = "meta-llama/llama-3.2-3b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT"] = "meta-llama/llama-3.2-3b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE"] = "meta-llama/llama-3.2-1b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT"] = "meta-llama/llama-3.2-1b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT"] = "meta-llama/llama-3.2-90b-vision-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE"] = "meta-llama/llama-3.2-11b-vision-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT"] = "meta-llama/llama-3.2-11b-vision-instruct"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE"] = "qwen/qwen-2.5-72b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_72B_INSTRUCT"] = "qwen/qwen-2.5-72b-instruct"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_VL_72B_INSTRUCT"] = "qwen/qwen-2.5-vl-72b-instruct"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B"] = "neversleep/llama-3.1-lumimaid-8b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_PREVIEW"] = "openai/o1-preview"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_PREVIEW_2024_09_12"] = "openai/o1-preview-2024-09-12"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_MINI"] = "openai/o1-mini"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_MINI_2024_09_12"] = "openai/o1-mini-2024-09-12"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_PIXTRAL_12B"] = "mistralai/pixtral-12b"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS_08_2024"] = "cohere/command-r-plus-08-2024"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_08_2024"] = "cohere/command-r-08-2024"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT_FREE"] = "qwen/qwen-2.5-vl-7b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT"] = "qwen/qwen-2.5-vl-7b-instruct"; + E_OPENROUTER_MODEL["MODEL_SAO10K_L3_1_EURYALE_70B"] = "sao10k/l3.1-euryale-70b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_FLASH_1_5_8B_EXP"] = "google/gemini-flash-1.5-8b-exp"; + E_OPENROUTER_MODEL["MODEL_AI21_JAMBA_1_5_MINI"] = "ai21/jamba-1-5-mini"; + E_OPENROUTER_MODEL["MODEL_AI21_JAMBA_1_5_LARGE"] = "ai21/jamba-1-5-large"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT"] = "microsoft/phi-3.5-mini-128k-instruct"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B"] = "nousresearch/hermes-3-llama-3.1-70b"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B"] = "nousresearch/hermes-3-llama-3.1-405b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_CHATGPT_4O_LATEST"] = "openai/chatgpt-4o-latest"; + E_OPENROUTER_MODEL["MODEL_SAO10K_L3_LUNARIS_8B"] = "sao10k/l3-lunaris-8b"; + E_OPENROUTER_MODEL["MODEL_AETHERWIING_MN_STARCANNON_12B"] = "aetherwiing/mn-starcannon-12b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_2024_08_06"] = "openai/gpt-4o-2024-08-06"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_405B"] = "meta-llama/llama-3.1-405b"; + E_OPENROUTER_MODEL["MODEL_NOTHINGIISREAL_MN_CELESTE_12B"] = "nothingiisreal/mn-celeste-12b"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_LLAMA_3_1_SONAR_SMALL_128K_ONLINE"] = "perplexity/llama-3.1-sonar-small-128k-online"; + E_OPENROUTER_MODEL["MODEL_PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE"] = "perplexity/llama-3.1-sonar-large-128k-online"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE"] = "meta-llama/llama-3.1-8b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT"] = "meta-llama/llama-3.1-8b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT"] = "meta-llama/llama-3.1-405b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT"] = "meta-llama/llama-3.1-70b-instruct"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_CODESTRAL_MAMBA"] = "mistralai/codestral-mamba"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_NEMO_FREE"] = "mistralai/mistral-nemo:free"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_NEMO"] = "mistralai/mistral-nemo"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_MINI"] = "openai/gpt-4o-mini"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_MINI_2024_07_18"] = "openai/gpt-4o-mini-2024-07-18"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_7B_INSTRUCT_FREE"] = "qwen/qwen-2-7b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_7B_INSTRUCT"] = "qwen/qwen-2-7b-instruct"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_2_27B_IT"] = "google/gemma-2-27b-it"; + E_OPENROUTER_MODEL["MODEL_ALPINDALE_MAGNUM_72B"] = "alpindale/magnum-72b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_2_9B_IT_FREE"] = "google/gemma-2-9b-it:free"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_2_9B_IT"] = "google/gemma-2-9b-it"; + E_OPENROUTER_MODEL["MODEL_01_AI_YI_LARGE"] = "01-ai/yi-large"; + E_OPENROUTER_MODEL["MODEL_AI21_JAMBA_INSTRUCT"] = "ai21/jamba-instruct"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620_BETA"] = "anthropic/claude-3.5-sonnet-20240620:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620"] = "anthropic/claude-3.5-sonnet-20240620"; + E_OPENROUTER_MODEL["MODEL_SAO10K_L3_EURYALE_70B"] = "sao10k/l3-euryale-70b"; + E_OPENROUTER_MODEL["MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B"] = "cognitivecomputations/dolphin-mixtral-8x22b"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_72B_INSTRUCT"] = "qwen/qwen-2-72b-instruct"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT"] = "mistralai/mistral-7b-instruct"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3"] = "mistralai/mistral-7b-instruct-v0.3"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B"] = "nousresearch/hermes-2-pro-llama-3-8b"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT_FREE"] = "microsoft/phi-3-mini-128k-instruct:free"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT"] = "microsoft/phi-3-mini-128k-instruct"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT_FREE"] = "microsoft/phi-3-medium-128k-instruct:free"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT"] = "microsoft/phi-3-medium-128k-instruct"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B"] = "neversleep/llama-3-lumimaid-70b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_FLASH_1_5"] = "google/gemini-flash-1.5"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O"] = "openai/gpt-4o"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_EXTENDED"] = "openai/gpt-4o:extended"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_2024_05_13"] = "openai/gpt-4o-2024-05-13"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_GUARD_2_8B"] = "meta-llama/llama-guard-2-8b"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_8B_EXTENDED"] = "neversleep/llama-3-lumimaid-8b:extended"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_8B"] = "neversleep/llama-3-lumimaid-8b"; + E_OPENROUTER_MODEL["MODEL_SAO10K_FIMBULVETR_11B_V2"] = "sao10k/fimbulvetr-11b-v2"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT_FREE"] = "meta-llama/llama-3-8b-instruct:free"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT"] = "meta-llama/llama-3-8b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT"] = "meta-llama/llama-3-70b-instruct"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT"] = "mistralai/mixtral-8x22b-instruct"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_WIZARDLM_2_8X22B"] = "microsoft/wizardlm-2-8x22b"; + E_OPENROUTER_MODEL["MODEL_MICROSOFT_WIZARDLM_2_7B"] = "microsoft/wizardlm-2-7b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_PRO_1_5"] = "google/gemini-pro-1.5"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_TURBO"] = "openai/gpt-4-turbo"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS"] = "cohere/command-r-plus"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS_04_2024"] = "cohere/command-r-plus-04-2024"; + E_OPENROUTER_MODEL["MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B"] = "sophosympatheia/midnight-rose-70b"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND"] = "cohere/command"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R"] = "cohere/command-r"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_HAIKU_BETA"] = "anthropic/claude-3-haiku:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_HAIKU"] = "anthropic/claude-3-haiku"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_OPUS_BETA"] = "anthropic/claude-3-opus:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_OPUS"] = "anthropic/claude-3-opus"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_SONNET_BETA"] = "anthropic/claude-3-sonnet:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_SONNET"] = "anthropic/claude-3-sonnet"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_03_2024"] = "cohere/command-r-03-2024"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE"] = "mistralai/mistral-large"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_7B_IT"] = "google/gemma-7b-it"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_0613"] = "openai/gpt-3.5-turbo-0613"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_TURBO_PREVIEW"] = "openai/gpt-4-turbo-preview"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_NOUS_HERMES_2_MIXTRAL_8X7B_DPO"] = "nousresearch/nous-hermes-2-mixtral-8x7b-dpo"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_MEDIUM"] = "mistralai/mistral-medium"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL"] = "mistralai/mistral-small"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_TINY"] = "mistralai/mistral-tiny"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_2"] = "mistralai/mistral-7b-instruct-v0.2"; + E_OPENROUTER_MODEL["MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X7B"] = "cognitivecomputations/dolphin-mixtral-8x7b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_PRO_VISION"] = "google/gemini-pro-vision"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_PRO"] = "google/gemini-pro"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MIXTRAL_8X7B"] = "mistralai/mixtral-8x7b"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT"] = "mistralai/mixtral-8x7b-instruct"; + E_OPENROUTER_MODEL["MODEL_OPENCHAT_OPENCHAT_7B_FREE"] = "openchat/openchat-7b:free"; + E_OPENROUTER_MODEL["MODEL_OPENCHAT_OPENCHAT_7B"] = "openchat/openchat-7b"; + E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_NOROMAID_20B"] = "neversleep/noromaid-20b"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2_1_BETA"] = "anthropic/claude-2.1:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2_1"] = "anthropic/claude-2.1"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2_BETA"] = "anthropic/claude-2:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2"] = "anthropic/claude-2"; + E_OPENROUTER_MODEL["MODEL_TEKNIUM_OPENHERMES_2_5_MISTRAL_7B"] = "teknium/openhermes-2.5-mistral-7b"; + E_OPENROUTER_MODEL["MODEL_UNDI95_TOPPY_M_7B_FREE"] = "undi95/toppy-m-7b:free"; + E_OPENROUTER_MODEL["MODEL_UNDI95_TOPPY_M_7B"] = "undi95/toppy-m-7b"; + E_OPENROUTER_MODEL["MODEL_ALPINDALE_GOLIATH_120B"] = "alpindale/goliath-120b"; + E_OPENROUTER_MODEL["MODEL_OPENROUTER_AUTO"] = "openrouter/auto"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_1106"] = "openai/gpt-3.5-turbo-1106"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_1106_PREVIEW"] = "openai/gpt-4-1106-preview"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_PALM_2_CHAT_BISON_32K"] = "google/palm-2-chat-bison-32k"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_PALM_2_CODECHAT_BISON_32K"] = "google/palm-2-codechat-bison-32k"; + E_OPENROUTER_MODEL["MODEL_JONDURBIN_AIROBOROS_L2_70B"] = "jondurbin/airoboros-l2-70b"; + E_OPENROUTER_MODEL["MODEL_XWIN_LM_XWIN_LM_70B"] = "xwin-lm/xwin-lm-70b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT"] = "openai/gpt-3.5-turbo-instruct"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1"] = "mistralai/mistral-7b-instruct-v0.1"; + E_OPENROUTER_MODEL["MODEL_PYGMALIONAI_MYTHALION_13B"] = "pygmalionai/mythalion-13b"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_16K"] = "openai/gpt-3.5-turbo-16k"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_32K"] = "openai/gpt-4-32k"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_32K_0314"] = "openai/gpt-4-32k-0314"; + E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_NOUS_HERMES_LLAMA2_13B"] = "nousresearch/nous-hermes-llama2-13b"; + E_OPENROUTER_MODEL["MODEL_MANCER_WEAVER"] = "mancer/weaver"; + E_OPENROUTER_MODEL["MODEL_HUGGINGFACEH4_ZEPHYR_7B_BETA_FREE"] = "huggingfaceh4/zephyr-7b-beta:free"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2_0_BETA"] = "anthropic/claude-2.0:beta"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_2_0"] = "anthropic/claude-2.0"; + E_OPENROUTER_MODEL["MODEL_UNDI95_REMM_SLERP_L2_13B"] = "undi95/remm-slerp-l2-13b"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_PALM_2_CHAT_BISON"] = "google/palm-2-chat-bison"; + E_OPENROUTER_MODEL["MODEL_GOOGLE_PALM_2_CODECHAT_BISON"] = "google/palm-2-codechat-bison"; + E_OPENROUTER_MODEL["MODEL_GRYPHE_MYTHOMAX_L2_13B_FREE"] = "gryphe/mythomax-l2-13b:free"; + E_OPENROUTER_MODEL["MODEL_GRYPHE_MYTHOMAX_L2_13B"] = "gryphe/mythomax-l2-13b"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_2_13B_CHAT"] = "meta-llama/llama-2-13b-chat"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_2_70B_CHAT"] = "meta-llama/llama-2-70b-chat"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO"] = "openai/gpt-3.5-turbo"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_0125"] = "openai/gpt-3.5-turbo-0125"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4"] = "openai/gpt-4"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_0314"] = "openai/gpt-4-0314"; +})(E_OPENROUTER_MODEL || (E_OPENROUTER_MODEL = {})); +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvbW9kZWxzL2NhY2hlL29wZW5yb3V0ZXItbW9kZWxzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE1BQU0sQ0FBTixJQUFZLGtCQW1TWDtBQW5TRCxXQUFZLGtCQUFrQjtJQUM1Qix5RUFBbUQsQ0FBQTtJQUNuRCw2RkFBdUUsQ0FBQTtJQUN2RSx5R0FBbUYsQ0FBQTtJQUNuRiwyR0FBcUYsQ0FBQTtJQUNyRiwrRUFBeUQsQ0FBQTtJQUN6RCx1R0FBaUYsQ0FBQTtJQUNqRixpR0FBMkUsQ0FBQTtJQUMzRSx5R0FBbUYsQ0FBQTtJQUNuRixtR0FBNkUsQ0FBQTtJQUM3RSx1R0FBaUYsQ0FBQTtJQUNqRiw2RkFBdUUsQ0FBQTtJQUN2RSx1RkFBaUUsQ0FBQTtJQUNqRSwyREFBcUMsQ0FBQTtJQUNyQywySEFBcUcsQ0FBQTtJQUNyRyxpSEFBMkYsQ0FBQTtJQUMzRix5RkFBbUUsQ0FBQTtJQUNuRSwyRkFBcUUsQ0FBQTtJQUNyRSw2RkFBdUUsQ0FBQTtJQUN2RSxpR0FBMkUsQ0FBQTtJQUMzRSxtRkFBNkQsQ0FBQTtJQUM3RCxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCx5RUFBbUQsQ0FBQTtJQUNuRCx1RUFBaUQsQ0FBQTtJQUNqRCxxRkFBK0QsQ0FBQTtJQUMvRCwyRUFBcUQsQ0FBQTtJQUNyRCxpRUFBMkMsQ0FBQTtJQUMzQyxtR0FBNkUsQ0FBQTtJQUM3RSx5RkFBbUUsQ0FBQTtJQUNuRSxtSUFBNkcsQ0FBQTtJQUM3RyxpRkFBMkQsQ0FBQTtJQUMzRCxxRkFBK0QsQ0FBQTtJQUMvRCwyRUFBcUQsQ0FBQTtJQUNyRCwyRkFBcUUsQ0FBQTtJQUNyRSxxSEFBK0YsQ0FBQTtJQUMvRixtRkFBNkQsQ0FBQTtJQUM3RCx1R0FBaUYsQ0FBQTtJQUNqRiw2RkFBdUUsQ0FBQTtJQUN2RSx5RUFBbUQsQ0FBQTtJQUNuRCw2RkFBdUUsQ0FBQTtJQUN2RSw2RkFBdUUsQ0FBQTtJQUN2RSxtRUFBNkMsQ0FBQTtJQUM3Qyx5REFBbUMsQ0FBQTtJQUNuQyxtRkFBNkQsQ0FBQTtJQUM3RCxxSEFBK0YsQ0FBQTtJQUMvRixtSUFBNkcsQ0FBQTtJQUM3Ryw2RUFBdUQsQ0FBQTtJQUN2RCxpR0FBMkUsQ0FBQTtJQUMzRSxpR0FBMkUsQ0FBQTtJQUMzRSx1RkFBaUUsQ0FBQTtJQUNqRSx5R0FBbUYsQ0FBQTtJQUNuRixxRUFBK0MsQ0FBQTtJQUMvQyw2RUFBdUQsQ0FBQTtJQUN2RCx5SUFBbUgsQ0FBQTtJQUNuSCxtSUFBNkcsQ0FBQTtJQUM3Ryx1RkFBaUUsQ0FBQTtJQUNqRSx1RUFBaUQsQ0FBQTtJQUNqRCwyR0FBcUYsQ0FBQTtJQUNyRix1RkFBaUUsQ0FBQTtJQUNqRSx5R0FBbUYsQ0FBQTtJQUNuRixtRUFBNkMsQ0FBQTtJQUM3QyxxRUFBK0MsQ0FBQTtJQUMvQywrRUFBeUQsQ0FBQTtJQUN6RCw2RkFBdUUsQ0FBQTtJQUN2RSxpRUFBMkMsQ0FBQTtJQUMzQywrREFBeUMsQ0FBQTtJQUN6QyxtR0FBNkUsQ0FBQTtJQUM3RSx5RkFBbUUsQ0FBQTtJQUNuRSw2REFBdUMsQ0FBQTtJQUN2QywyREFBcUMsQ0FBQTtJQUNyQyw2REFBdUMsQ0FBQTtJQUN2Qyw2R0FBdUYsQ0FBQTtJQUN2Riw2SEFBdUcsQ0FBQTtJQUN2RyxtSEFBNkYsQ0FBQTtJQUM3RixxSEFBK0YsQ0FBQTtJQUMvRiwyR0FBcUYsQ0FBQTtJQUNyRixxSEFBK0YsQ0FBQTtJQUMvRiwyR0FBcUYsQ0FBQTtJQUNyRixxRkFBK0QsQ0FBQTtJQUMvRCxpRUFBMkMsQ0FBQTtJQUMzQywyREFBcUMsQ0FBQTtJQUNyQywyREFBcUMsQ0FBQTtJQUNyQyx1SEFBaUcsQ0FBQTtJQUNqRyw2R0FBdUYsQ0FBQTtJQUN2RixtSEFBNkYsQ0FBQTtJQUM3RixtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCxtSEFBNkYsQ0FBQTtJQUM3RixxRUFBK0MsQ0FBQTtJQUMvQyxpRkFBMkQsQ0FBQTtJQUMzRCwrREFBeUMsQ0FBQTtJQUN6QyxtRkFBNkQsQ0FBQTtJQUM3RCx1RkFBaUUsQ0FBQTtJQUNqRSw2RUFBdUQsQ0FBQTtJQUN2RCw2SEFBdUcsQ0FBQTtJQUN2RywrRUFBeUQsQ0FBQTtJQUN6RCxtREFBNkIsQ0FBQTtJQUM3Qiw2RkFBdUUsQ0FBQTtJQUN2RSwrRUFBeUQsQ0FBQTtJQUN6RCxpRUFBMkMsQ0FBQTtJQUMzQyxxRkFBK0QsQ0FBQTtJQUMvRCxpR0FBMkUsQ0FBQTtJQUMzRSw2R0FBdUYsQ0FBQTtJQUN2RixtR0FBNkUsQ0FBQTtJQUM3RSx1RUFBaUQsQ0FBQTtJQUNqRCx5RUFBbUQsQ0FBQTtJQUNuRCxxRUFBK0MsQ0FBQTtJQUMvQyxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCxpSEFBMkYsQ0FBQTtJQUMzRix5RkFBbUUsQ0FBQTtJQUNuRSxpRkFBMkQsQ0FBQTtJQUMzRCx5RkFBbUUsQ0FBQTtJQUNuRSx5RkFBbUUsQ0FBQTtJQUNuRSx5RkFBbUUsQ0FBQTtJQUNuRSwyRUFBcUQsQ0FBQTtJQUNyRCxtRkFBNkQsQ0FBQTtJQUM3RCwyR0FBcUYsQ0FBQTtJQUNyRixpR0FBMkUsQ0FBQTtJQUMzRSwrRUFBeUQsQ0FBQTtJQUN6RCx5RkFBbUUsQ0FBQTtJQUNuRSxtRkFBNkQsQ0FBQTtJQUM3RCwrRkFBeUUsQ0FBQTtJQUN6RSxxRkFBK0QsQ0FBQTtJQUMvRCxpSEFBMkYsQ0FBQTtJQUMzRix1R0FBaUYsQ0FBQTtJQUNqRixtR0FBNkUsQ0FBQTtJQUM3RSx5RkFBbUUsQ0FBQTtJQUNuRSxpR0FBMkUsQ0FBQTtJQUMzRSx1RkFBaUUsQ0FBQTtJQUNqRSw2REFBdUMsQ0FBQTtJQUN2Qyw2RUFBdUQsQ0FBQTtJQUN2RCw2RUFBdUQsQ0FBQTtJQUN2RCxtRkFBNkQsQ0FBQTtJQUM3RCx1SEFBaUcsQ0FBQTtJQUNqRyw2R0FBdUYsQ0FBQTtJQUN2Rix5R0FBbUYsQ0FBQTtJQUNuRixxRkFBK0QsQ0FBQTtJQUMvRCxxRkFBK0QsQ0FBQTtJQUMvRCxpRkFBMkQsQ0FBQTtJQUMzRCx5RkFBbUUsQ0FBQTtJQUNuRSw2REFBdUMsQ0FBQTtJQUN2QywyR0FBcUYsQ0FBQTtJQUNyRixpR0FBMkUsQ0FBQTtJQUMzRSwyR0FBcUYsQ0FBQTtJQUNyRixpR0FBMkUsQ0FBQTtJQUMzRSxpSEFBMkYsQ0FBQTtJQUMzRiwySEFBcUcsQ0FBQTtJQUNyRyxpSEFBMkYsQ0FBQTtJQUMzRiwrRkFBeUUsQ0FBQTtJQUN6RSxxRkFBK0QsQ0FBQTtJQUMvRCwyRkFBcUUsQ0FBQTtJQUNyRSxpR0FBMkUsQ0FBQTtJQUMzRSxtRUFBNkMsQ0FBQTtJQUM3Qyx5RkFBbUUsQ0FBQTtJQUNuRSw2REFBdUMsQ0FBQTtJQUN2QyxtRkFBNkQsQ0FBQTtJQUM3RCwyRUFBcUQsQ0FBQTtJQUNyRCwyRkFBcUUsQ0FBQTtJQUNyRSxpRkFBMkQsQ0FBQTtJQUMzRCxtR0FBNkUsQ0FBQTtJQUM3RSx5RkFBbUUsQ0FBQTtJQUNuRSwrRUFBeUQsQ0FBQTtJQUN6RCw2RkFBdUUsQ0FBQTtJQUN2RSx1RUFBaUQsQ0FBQTtJQUNqRCx5RUFBbUQsQ0FBQTtJQUNuRCx5R0FBbUYsQ0FBQTtJQUNuRix1R0FBaUYsQ0FBQTtJQUNqRix5R0FBbUYsQ0FBQTtJQUNuRixpRkFBMkQsQ0FBQTtJQUMzRCx5RUFBbUQsQ0FBQTtJQUNuRCwyRkFBcUUsQ0FBQTtJQUNyRSxpRkFBMkQsQ0FBQTtJQUMzRCxtRkFBNkQsQ0FBQTtJQUM3RCwyRkFBcUUsQ0FBQTtJQUNyRSx5SEFBbUcsQ0FBQTtJQUNuRyx5SEFBbUcsQ0FBQTtJQUNuRywyR0FBcUYsQ0FBQTtJQUNyRixpR0FBMkUsQ0FBQTtJQUMzRSxxR0FBK0UsQ0FBQTtJQUMvRSxtR0FBNkUsQ0FBQTtJQUM3RSxtRkFBNkQsQ0FBQTtJQUM3RCx1RkFBaUUsQ0FBQTtJQUNqRSw2RUFBdUQsQ0FBQTtJQUN2RCxxRUFBK0MsQ0FBQTtJQUMvQywyRkFBcUUsQ0FBQTtJQUNyRSx5RkFBbUUsQ0FBQTtJQUNuRSwrRUFBeUQsQ0FBQTtJQUN6RCwyRUFBcUQsQ0FBQTtJQUNyRCx5RUFBbUQsQ0FBQTtJQUNuRCxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCw2REFBdUMsQ0FBQTtJQUN2Qyx1RUFBaUQsQ0FBQTtJQUNqRCxtSEFBNkYsQ0FBQTtJQUM3Rix5R0FBbUYsQ0FBQTtJQUNuRiwyRUFBcUQsQ0FBQTtJQUNyRCx1SEFBaUcsQ0FBQTtJQUNqRyxpRkFBMkQsQ0FBQTtJQUMzRCxxR0FBK0UsQ0FBQTtJQUMvRSwyRkFBcUUsQ0FBQTtJQUNyRSxxR0FBK0UsQ0FBQTtJQUMvRSx5R0FBbUYsQ0FBQTtJQUNuRiwrR0FBeUYsQ0FBQTtJQUN6RixxR0FBK0UsQ0FBQTtJQUMvRSxtSEFBNkYsQ0FBQTtJQUM3Rix5R0FBbUYsQ0FBQTtJQUNuRiwrRkFBeUUsQ0FBQTtJQUN6RSwrRUFBeUQsQ0FBQTtJQUN6RCwyREFBcUMsQ0FBQTtJQUNyQyw2RUFBdUQsQ0FBQTtJQUN2RCxpRkFBMkQsQ0FBQTtJQUMzRCx1RkFBaUUsQ0FBQTtJQUNqRSwrR0FBeUYsQ0FBQTtJQUN6Riw2RkFBdUUsQ0FBQTtJQUN2RSxpRkFBMkQsQ0FBQTtJQUMzRCx1R0FBaUYsQ0FBQTtJQUNqRiw2RkFBdUUsQ0FBQTtJQUN2RSwrRkFBeUUsQ0FBQTtJQUN6RSxpR0FBMkUsQ0FBQTtJQUMzRSxxRkFBK0QsQ0FBQTtJQUMvRCwrRUFBeUQsQ0FBQTtJQUN6RCwyRUFBcUQsQ0FBQTtJQUNyRCxxRUFBK0MsQ0FBQTtJQUMvQywyRUFBcUQsQ0FBQTtJQUNyRCwyRkFBcUUsQ0FBQTtJQUNyRSxtR0FBNkUsQ0FBQTtJQUM3RSw2REFBdUMsQ0FBQTtJQUN2QyxpRUFBMkMsQ0FBQTtJQUMzQywyRkFBcUUsQ0FBQTtJQUNyRSxpRkFBMkQsQ0FBQTtJQUMzRCx5RkFBbUUsQ0FBQTtJQUNuRSwrRUFBeUQsQ0FBQTtJQUN6RCw2RkFBdUUsQ0FBQTtJQUN2RSxtRkFBNkQsQ0FBQTtJQUM3RCxpRkFBMkQsQ0FBQTtJQUMzRCwrRUFBeUQsQ0FBQTtJQUN6RCxxRUFBK0MsQ0FBQTtJQUMvQyxtRkFBNkQsQ0FBQTtJQUM3RCxxRkFBK0QsQ0FBQTtJQUMvRCx1SEFBaUcsQ0FBQTtJQUNqRyxpRkFBMkQsQ0FBQTtJQUMzRCwrRUFBeUQsQ0FBQTtJQUN6RCw2RUFBdUQsQ0FBQTtJQUN2RCxxR0FBK0UsQ0FBQTtJQUMvRSxxSEFBK0YsQ0FBQTtJQUMvRixpRkFBMkQsQ0FBQTtJQUMzRCxtRUFBNkMsQ0FBQTtJQUM3Qyw2RUFBdUQsQ0FBQTtJQUN2RCwrRkFBeUUsQ0FBQTtJQUN6RSxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCwrRUFBeUQsQ0FBQTtJQUN6RCxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCwrRUFBeUQsQ0FBQTtJQUN6RCxxRUFBK0MsQ0FBQTtJQUMvQyxtR0FBNkUsQ0FBQTtJQUM3RSw2RUFBdUQsQ0FBQTtJQUN2RCxtRUFBNkMsQ0FBQTtJQUM3Qyw2RUFBdUQsQ0FBQTtJQUN2RCwrREFBeUMsQ0FBQTtJQUN6QyxtRkFBNkQsQ0FBQTtJQUM3RCxtRkFBNkQsQ0FBQTtJQUM3RCx5RkFBbUUsQ0FBQTtJQUNuRSxpR0FBMkUsQ0FBQTtJQUMzRSxxRkFBK0QsQ0FBQTtJQUMvRCx1RUFBaUQsQ0FBQTtJQUNqRCwyRkFBcUUsQ0FBQTtJQUNyRSxxR0FBK0UsQ0FBQTtJQUMvRSxtRkFBNkQsQ0FBQTtJQUM3RCxpRkFBMkQsQ0FBQTtJQUMzRCxpRUFBMkMsQ0FBQTtJQUMzQywyRUFBcUQsQ0FBQTtJQUNyRCx1R0FBaUYsQ0FBQTtJQUNqRiwyREFBcUMsQ0FBQTtJQUNyQyxtR0FBNkUsQ0FBQTtJQUM3RSxtRkFBNkQsQ0FBQTtJQUM3RCx5RUFBbUQsQ0FBQTtJQUNuRCxpRkFBMkQsQ0FBQTtJQUMzRCxpRkFBMkQsQ0FBQTtJQUMzRCx5RkFBbUUsQ0FBQTtJQUNuRSx1RkFBaUUsQ0FBQTtJQUNqRSw2RUFBdUQsQ0FBQTtJQUN2RCx1RkFBaUUsQ0FBQTtJQUNqRSx1RkFBaUUsQ0FBQTtJQUNqRSx5RUFBbUQsQ0FBQTtJQUNuRCxtRkFBNkQsQ0FBQTtJQUM3RCx5REFBbUMsQ0FBQTtJQUNuQyxtRUFBNkMsQ0FBQTtBQUMvQyxDQUFDLEVBblNXLGtCQUFrQixLQUFsQixrQkFBa0IsUUFtUzdCIn0= +;// ./dist-in/models/cache/openrouter-models-free.js +var E_OPENROUTER_MODEL_FREE; +(function (E_OPENROUTER_MODEL_FREE) { + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_V3_BASE_FREE"] = "deepseek/deepseek-v3-base:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_ALLENAI_MOLMO_7B_D_FREE"] = "allenai/molmo-7b-d:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_BYTEDANCE_RESEARCH_UI_TARS_72B_FREE"] = "bytedance-research/ui-tars-72b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN2_5_VL_3B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-3b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25_FREE"] = "google/gemini-2.5-pro-exp-03-25:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-32b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE"] = "deepseek/deepseek-chat-v3-0324:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_FEATHERLESS_QWERKY_72B_FREE"] = "featherless/qwerky-72b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE"] = "mistralai/mistral-small-3.1-24b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPEN_R1_OLYMPICCODER_7B_FREE"] = "open-r1/olympiccoder-7b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPEN_R1_OLYMPICCODER_32B_FREE"] = "open-r1/olympiccoder-32b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_1B_IT_FREE"] = "google/gemma-3-1b-it:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_4B_IT_FREE"] = "google/gemma-3-4b-it:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_12B_IT_FREE"] = "google/gemma-3-12b-it:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_REKAAI_REKA_FLASH_3_FREE"] = "rekaai/reka-flash-3:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_27B_IT_FREE"] = "google/gemma-3-27b-it:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_ZERO_FREE"] = "deepseek/deepseek-r1-zero:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWQ_32B_FREE"] = "qwen/qwq-32b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MOONSHOTAI_MOONLIGHT_16B_A3B_INSTRUCT_FREE"] = "moonshotai/moonlight-16b-a3b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE"] = "nousresearch/deephermes-3-llama-3-8b-preview:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE"] = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE"] = "cognitivecomputations/dolphin3.0-mistral-24b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_0_PRO_EXP_02_05_FREE"] = "google/gemini-2.0-pro-exp-02-05:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-72b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE"] = "mistralai/mistral-small-24b-instruct-2501:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B_FREE"] = "deepseek/deepseek-r1-distill-qwen-32b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE"] = "deepseek/deepseek-r1-distill-qwen-14b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B_FREE"] = "deepseek/deepseek-r1-distill-llama-70b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_FREE"] = "google/gemini-2.0-flash-thinking-exp:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_FREE"] = "deepseek/deepseek-r1:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_SOPHOSYMPATHEIA_ROGUE_ROSE_103B_V0_2_FREE"] = "sophosympatheia/rogue-rose-103b-v0.2:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_FREE"] = "deepseek/deepseek-chat:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_1219_FREE"] = "google/gemini-2.0-flash-thinking-exp-1219:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE"] = "google/gemini-2.0-flash-exp:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE"] = "meta-llama/llama-3.3-70b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWQ_32B_PREVIEW_FREE"] = "qwen/qwq-32b-preview:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_LEARNLM_1_5_PRO_EXPERIMENTAL_FREE"] = "google/learnlm-1.5-pro-experimental:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE"] = "qwen/qwen-2.5-coder-32b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT_FREE"] = "nvidia/llama-3.1-nemotron-70b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE"] = "meta-llama/llama-3.2-3b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE"] = "meta-llama/llama-3.2-1b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE"] = "meta-llama/llama-3.2-11b-vision-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN_2_5_72B_INSTRUCT_FREE"] = "qwen/qwen-2.5-72b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN_2_5_VL_7B_INSTRUCT_FREE"] = "qwen/qwen-2.5-vl-7b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_FLASH_1_5_8B_EXP"] = "google/gemini-flash-1.5-8b-exp"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE"] = "meta-llama/llama-3.1-8b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_NEMO_FREE"] = "mistralai/mistral-nemo:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN_2_7B_INSTRUCT_FREE"] = "qwen/qwen-2-7b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE"] = "google/gemma-2-9b-it:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MICROSOFT_PHI_3_MINI_128K_INSTRUCT_FREE"] = "microsoft/phi-3-mini-128k-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT_FREE"] = "microsoft/phi-3-medium-128k-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_8B_INSTRUCT_FREE"] = "meta-llama/llama-3-8b-instruct:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENCHAT_OPENCHAT_7B_FREE"] = "openchat/openchat-7b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_UNDI95_TOPPY_M_7B_FREE"] = "undi95/toppy-m-7b:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_HUGGINGFACEH4_ZEPHYR_7B_BETA_FREE"] = "huggingfaceh4/zephyr-7b-beta:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_GRYPHE_MYTHOMAX_L2_13B_FREE"] = "gryphe/mythomax-l2-13b:free"; +})(E_OPENROUTER_MODEL_FREE || (E_OPENROUTER_MODEL_FREE = {})); +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkEwRFg7QUExREQsV0FBWSx1QkFBdUI7SUFDakMsdUdBQTRFLENBQUE7SUFDNUUseUZBQThELENBQUE7SUFDOUQsaUhBQXNGLENBQUE7SUFDdEYsMkdBQWdGLENBQUE7SUFDaEYsbUhBQXdGLENBQUE7SUFDeEYsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYsaUdBQXNFLENBQUE7SUFDdEUscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUscUdBQTBFLENBQUE7SUFDMUUsNkZBQWtFLENBQUE7SUFDbEUsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsMkZBQWdFLENBQUE7SUFDaEUsK0ZBQW9FLENBQUE7SUFDcEUsdUdBQTRFLENBQUE7SUFDNUUsNkVBQWtELENBQUE7SUFDbEQsK0hBQW9HLENBQUE7SUFDcEcsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsbUhBQXdGLENBQUE7SUFDeEYsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsK0hBQW9HLENBQUE7SUFDcEcsK0hBQW9HLENBQUE7SUFDcEcsaUlBQXNHLENBQUE7SUFDdEcsNkhBQWtHLENBQUE7SUFDbEcsNkZBQWtFLENBQUE7SUFDbEUsNkhBQWtHLENBQUE7SUFDbEcsaUdBQXNFLENBQUE7SUFDdEUsdUlBQTRHLENBQUE7SUFDNUcsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYsNkZBQWtFLENBQUE7SUFDbEUsMkhBQWdHLENBQUE7SUFDaEcscUhBQTBGLENBQUE7SUFDMUYsaUlBQXNHLENBQUE7SUFDdEcscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYscUlBQTBHLENBQUE7SUFDMUcseUdBQThFLENBQUE7SUFDOUUsNkdBQWtGLENBQUE7SUFDbEYsdUdBQTRFLENBQUE7SUFDNUUscUhBQTBGLENBQUE7SUFDMUYsaUdBQXNFLENBQUE7SUFDdEUsbUdBQXdFLENBQUE7SUFDeEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7SUFDcEYseUhBQThGLENBQUE7SUFDOUYsNkhBQWtHLENBQUE7SUFDbEcsaUhBQXNGLENBQUE7SUFDdEYsNkZBQWtFLENBQUE7SUFDbEUsdUZBQTRELENBQUE7SUFDNUQsNkdBQWtGLENBQUE7SUFDbEYsaUdBQXNFLENBQUE7QUFDeEUsQ0FBQyxFQTFEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBMERsQyJ9 ;// ./dist-in/index.js @@ -212083,7 +214048,10 @@ const assistant_supported = { -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvaW5kZXgudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLFFBQVEsRUFBRSxNQUFNLGNBQWMsQ0FBQTtBQUN2QyxPQUFPLElBQUksTUFBTSxXQUFXLENBQUE7QUFFNUIsTUFBTSxTQUFTLEdBQUcsUUFBUSxLQUFLLE9BQU8sQ0FBQTtBQUV0QyxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sZUFBZSxDQUFBO0FBQzVDLE9BQU8sRUFBRSxPQUFPLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUMzQyxPQUFPLEVBQUUsV0FBVyxFQUFFLE1BQU0sZ0JBQWdCLENBQUE7QUFFNUMsTUFBTSxDQUFDLE1BQU0sTUFBTSxHQUFRLFlBQVksQ0FBQyxXQUFXLENBQUMsQ0FBQTtBQUNwRCxPQUFPLEVBQUUsR0FBRyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFDdkMsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLEdBQUcsRUFBRSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsU0FBUyxDQUFDLENBQUMsQ0FBQyxVQUFVLENBQUMsQ0FBQyxDQUFDLE1BQU0sQ0FBQyxFQUFFLElBQUksV0FBVyxFQUFFLENBQUMsQ0FBQyxDQUFBO0FBRXJILE1BQU0sQ0FBQyxNQUFNLG1CQUFtQixHQUEyQjtJQUN6RCxJQUFJLEVBQUUsVUFBVTtJQUNoQixNQUFNLEVBQUUsWUFBWTtJQUNwQixLQUFLLEVBQUUsZUFBZTtJQUN0QixNQUFNLEVBQUUsVUFBVTtJQUNsQixNQUFNLEVBQUUsb0JBQW9CO0lBQzVCLE9BQU8sRUFBRSx5RUFBeUU7SUFDbEYsS0FBSyxFQUFFLGVBQWU7SUFDdEIsT0FBTyxFQUFFLFdBQVc7SUFDcEIsT0FBTyxFQUFFLGFBQWE7SUFDdEIsS0FBSyxFQUFFLGlCQUFpQjtJQUN4QixPQUFPLEVBQUUsa0JBQWtCO0lBQzNCLEtBQUssRUFBRSxlQUFlO0lBQ3RCLE1BQU0sRUFBRSxpQkFBaUI7SUFDekIsTUFBTSxFQUFFLFlBQVk7SUFDcEIsT0FBTyxFQUFFLDJFQUEyRTtJQUNwRixLQUFLLEVBQUUsZUFBZTtJQUN0QixLQUFLLEVBQUUsYUFBYTtJQUNwQixLQUFLLEVBQUUsa0JBQWtCO0lBQ3pCLE1BQU0sRUFBRSxZQUFZO0lBQ3BCLEtBQUssRUFBRSx3QkFBd0I7SUFDL0IsTUFBTSxFQUFFLFlBQVk7Q0FDckIsQ0FBQTtBQUNELGNBQWMsWUFBWSxDQUFBO0FBQzFCLGNBQWMsZ0JBQWdCLENBQUE7QUFDOUIsY0FBYyxpQkFBaUIsQ0FBQSJ9 + + + +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvaW5kZXgudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLFFBQVEsRUFBRSxNQUFNLGNBQWMsQ0FBQTtBQUN2QyxPQUFPLElBQUksTUFBTSxXQUFXLENBQUE7QUFFNUIsTUFBTSxTQUFTLEdBQUcsUUFBUSxLQUFLLE9BQU8sQ0FBQTtBQUV0QyxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sZUFBZSxDQUFBO0FBQzVDLE9BQU8sRUFBRSxPQUFPLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUMzQyxPQUFPLEVBQUUsV0FBVyxFQUFFLE1BQU0sZ0JBQWdCLENBQUE7QUFFNUMsTUFBTSxDQUFDLE1BQU0sTUFBTSxHQUFRLFlBQVksQ0FBQyxXQUFXLENBQUMsQ0FBQTtBQUNwRCxPQUFPLEVBQUUsR0FBRyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFDdkMsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLEdBQUcsRUFBRSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsU0FBUyxDQUFDLENBQUMsQ0FBQyxVQUFVLENBQUMsQ0FBQyxDQUFDLE1BQU0sQ0FBQyxFQUFFLElBQUksV0FBVyxFQUFFLENBQUMsQ0FBQyxDQUFBO0FBRXJILE1BQU0sQ0FBQyxNQUFNLG1CQUFtQixHQUEyQjtJQUN6RCxJQUFJLEVBQUUsVUFBVTtJQUNoQixNQUFNLEVBQUUsWUFBWTtJQUNwQixLQUFLLEVBQUUsZUFBZTtJQUN0QixNQUFNLEVBQUUsVUFBVTtJQUNsQixNQUFNLEVBQUUsb0JBQW9CO0lBQzVCLE9BQU8sRUFBRSx5RUFBeUU7SUFDbEYsS0FBSyxFQUFFLGVBQWU7SUFDdEIsT0FBTyxFQUFFLFdBQVc7SUFDcEIsT0FBTyxFQUFFLGFBQWE7SUFDdEIsS0FBSyxFQUFFLGlCQUFpQjtJQUN4QixPQUFPLEVBQUUsa0JBQWtCO0lBQzNCLEtBQUssRUFBRSxlQUFlO0lBQ3RCLE1BQU0sRUFBRSxpQkFBaUI7SUFDekIsTUFBTSxFQUFFLFlBQVk7SUFDcEIsT0FBTyxFQUFFLDJFQUEyRTtJQUNwRixLQUFLLEVBQUUsZUFBZTtJQUN0QixLQUFLLEVBQUUsYUFBYTtJQUNwQixLQUFLLEVBQUUsa0JBQWtCO0lBQ3pCLE1BQU0sRUFBRSxZQUFZO0lBQ3BCLEtBQUssRUFBRSx3QkFBd0I7SUFDL0IsTUFBTSxFQUFFLFlBQVk7Q0FDckIsQ0FBQTtBQUNELGNBQWMsWUFBWSxDQUFBO0FBQzFCLGNBQWMsZ0JBQWdCLENBQUE7QUFDOUIsY0FBYyxpQkFBaUIsQ0FBQTtBQUUvQixPQUFPLEVBQUUsY0FBYyxFQUFFLE1BQU0saUNBQWlDLENBQUE7QUFDaEUsT0FBTyxFQUFFLGtCQUFrQixFQUFFLE1BQU0scUNBQXFDLENBQUE7QUFDeEUsT0FBTyxFQUFFLHVCQUF1QixFQUFFLE1BQU0sMENBQTBDLENBQUEifQ== ;// ./dist-in/filters.js @@ -212224,6 +214192,7 @@ const applyFilters = (value, filters) => { + const zod_schema_get_var = (key = '') => env_var_env_var.get(key).asString() || env_var_env_var.get(key.replace(/-/g, '_')).asString() || env_var_env_var.get(key.replace(/_/g, '-')).asString(); const zod_schema_HOME = (sub = '') => external_node_path_namespaceObject.join(process.env[(process.platform == 'win32') ? 'USERPROFILE' : 'HOME'] || '', sub); const PREFERENCES_DEFAULT = (key = 'KBOT_PREFERENCES') => zod_schema_get_var(key) || external_node_path_namespaceObject.join(zod_schema_HOME(`.${constants_API_PREFIX}`), PREFERENCES_FILE_NAME); @@ -212371,7 +214340,12 @@ const OptionsSchema = (opts) => { ]) .optional() .default(false) - .describe('Dry run - only write out parameters without making API calls')); + .describe('Dry run - only write out parameters without making API calls')) + .add('format', zod_lib_z.any() + .optional() + .default(null) + .describe('Zod schema for structured outputs') + .transform((val) => val ? zodResponseFormat(val, "format") : null)); return schemaMap.root() .passthrough() .describe('IKBotOptions'); @@ -212385,7 +214359,7 @@ const schemas = () => { write([OptionsSchema()], 'schema.json', 'kbot', {}); write_sync('schema_ui.json', schemaMap.getUISchema()); }; -//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"zod_schema.js","sourceRoot":"","sources":["../src/zod_schema.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAA;AACvB,OAAO,KAAK,IAAI,MAAM,WAAW,CAAA;AACjC,OAAO,KAAK,MAAM,OAAO,CAAA;AACzB,OAAO,GAAG,MAAM,SAAS,CAAA;AACzB,OAAO,EAAE,mBAAmB,EAAE,UAAU,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,mBAAmB,CAAA;AAEnF,OAAO,EAAE,IAAI,IAAI,OAAO,EAAE,MAAM,oBAAoB,CAAA;AACpD,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAA;AAEzE,OAAO,EAAE,UAAU,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAA;AAErF,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,MAAc,EAAE,EAAE,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;AAChK,MAAM,CAAC,MAAM,IAAI,GAAG,CAAC,GAAG,GAAG,EAAE,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,EAAE,GAAG,CAAC,CAAA;AAC3H,MAAM,CAAC,MAAM,mBAAmB,GAAG,CAAC,MAAc,kBAAkB,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,UAAU,EAAE,CAAC,EAAE,qBAAqB,CAAC,CAAA;AAEjJ,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AAC/C,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAA;AAE5C,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAQ,CAAC,CAAA;AAE5D,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,YAAY,EAAE,QAAQ,EAAE,UAAU,EAAE,aAAa,EAAE,QAAQ,EAAE,WAAW,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAA;AAGnI,MAAM,CAAC,MAAM,KAAK,GAAG;IACnB,UAAU,EAAE,YAAY;IACxB,KAAK,EAAE,OAAO;IACd,SAAS,EAAE,WAAW;IACtB,MAAM,EAAE,QAAQ;CACR,CAAA;AAGV,MAAM,CAAC,MAAM,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC;IAC1B,KAAK,CAAC,UAAU;IAChB,KAAK,CAAC,KAAK;IACX,KAAK,CAAC,SAAS;IACf,KAAK,CAAC,MAAM;CACb,CAAC,CAAA;AACF,oFAAoF;AACpF;;;;;;EAME;AACF,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC;IAC9B,MAAM;IACN,iCAAiC;IACjC,6BAA6B;IAC7B,gCAAgC;IAChC,0CAA0C;IAC1C,6BAA6B;CAC9B,CAAC,CAAC,QAAQ,CAAC,8BAA8B,CAAC,CAAA;AAK3C,OAAO,EAAE,qBAAqB,EAAE,mBAAmB,IAAI,6BAA6B,EAAE,MAAM,wBAAwB,CAAA;AACpH,OAAO,EAAE,iBAAiB,EAAE,mBAAmB,IAAI,yBAAyB,EAAE,MAAM,oBAAoB,CAAA;AAExG,IAAI,SAAS,CAAA;AAEb,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,IAAU,EAAO,EAAE;IAE/C,SAAS,GAAG,UAAU,CAAC,MAAM,EAAqB,CAAA;IAClD,SAAS,CAAC,GAAG,CACX,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,GAAG,CAAC,CAAC,CAAC;SACN,OAAO,CAAC,GAAG,CAAC;SACZ,QAAQ,CAAC,kBAAkB,CAAC,EAC7B,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC;SACzB,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,CAAC,4DAA4D,CAAC;SACtE,QAAQ,EAAE;SACV,OAAO,CAAC,aAAa,CAAC,CAC1B;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,0DAA0D,CAAC,CACxE;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,yIAAyI,CAAC,CACvJ;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,6OAA6O,CAAC,CAC3P;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC,2CAA2C,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAC1F;SACA,GAAG,CACF,cAAc,EACd,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC,mCAAmC,CAAC,CACjD;SACA,GAAG,CACF,OAAO,EACP,CAAC,CAAC,KAAK,CACL;QACE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;QACnB,CAAC,CAAC,MAAM,EAAE;KACX,CAAC,CAAC,QAAQ,EAAE;SACZ,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC;SAC9B,QAAQ,CAAC,4FAA4F,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;SACvI,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CACjE;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,QAAQ,EAAE;SACV,QAAQ,CAAC,gGAAgG,CAAC,CAC9G;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,yBAAyB,CAAC,CACvC;SACA,GAAG,CACF,OAAO,EACP,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,sDAAsD,WAAW,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAC9F;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,YAAY,CAAC;SACrB,QAAQ,CAAC,+CAA+C,CAAC,CAC7D;SACA,GAAG,CACF,MAAM,EACN,KAAK;SACF,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC;SACpB,QAAQ,CAAC;YACN,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC;YAC9B,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC;YACzB,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC;YAC7B,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC;OAC/B,CAAC,CACH;SACA,GAAG,CACF,UAAU,EACV,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,CAAC,CAAC;SACV,QAAQ,CAAC,mCAAmC,CAAC,CACjD;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,gEAAgE,CAAC,CAC9E;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,oDAAoD,CAAC,CAClE;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,6EAA6E,CAAC,CAC3F;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,iBAAiB,CAAC,CAC/B;SACA,GAAG,CACF,aAAa,EACb,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,mBAAmB,EAAE,CAAC;SAC9B,QAAQ,CAAC,0GAA0G,CAAC,CACxH;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,iBAAiB,CAAC;SAC1B,QAAQ,CAAC,mBAAmB,CAAC,CACjC;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,SAAS,CAAC;SAClB,QAAQ,CAAC,0BAA0B,CAAC,CACxC;QACD,wCAAwC;QACxC,oFAAoF;SACnF,GAAG,CACF,WAAW,EACX,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC;SAC7B,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC,CACf;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC;QACN,CAAC,CAAC,MAAM,EAAE;QACV,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;QAClB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;QACnB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC;KACtB,CAAC;SACC,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC;;;uBAGK,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC;SACvC,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE;QACjB,IAAG,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,MAAM,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAC,CAAC;YACnD,OAAO,GAAG,CAAA;QACZ,CAAC;QACD,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAA;QAClD,OAAO,GAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;QAC9D,OAAO,OAAO,CAAA;IAChB,CAAC,CAAC,CACL;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,KAAK,CAAC;QACN,CAAC,CAAC,OAAO,EAAE;QACX,CAAC,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,WAAW,EAAE,KAAK,MAAM,CAAC;KAC5D,CAAC;SACC,QAAQ,EAAE;SACV,OAAO,CAAC,KAAK,CAAC;SACd,QAAQ,CAAC,8DAA8D,CAAC,CAC5E,CAAC;IACJ,OAAO,SAAS,CAAC,IAAI,EAAE;SACpB,WAAW,EAAE;SACb,QAAQ,CAAC,cAAc,CAAC,CAAA;AAC7B,CAAC,CAAA;AAGD,MAAM,CAAC,MAAM,KAAK,GAAG,GAAG,EAAE;IACxB,mBAAmB,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,kBAAkB,CAAC,CAAA;IAC1D,mBAAmB,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,+BAA+B,CAAC,CAAC,CAAC,CAAA;IAC9F,OAAO,EAAE,CAAA;AACX,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,OAAO,GAAG,GAAG,EAAE;IAC1B,KAAK,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,aAAa,EAAE,MAAM,EAAE,EAAE,CAAC,CAAA;IACnD,OAAO,CAAC,gBAAgB,EAAE,SAAS,CAAC,WAAW,EAAE,CAAC,CAAA;AACpD,CAAC,CAAA"} +//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"zod_schema.js","sourceRoot":"","sources":["../src/zod_schema.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAA;AACvB,OAAO,KAAK,IAAI,MAAM,WAAW,CAAA;AACjC,OAAO,KAAK,MAAM,OAAO,CAAA;AACzB,OAAO,GAAG,MAAM,SAAS,CAAA;AACzB,OAAO,EAAE,mBAAmB,EAAE,UAAU,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,mBAAmB,CAAA;AAEnF,OAAO,EAAE,IAAI,IAAI,OAAO,EAAE,MAAM,oBAAoB,CAAA;AACpD,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAA;AACzE,OAAO,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAA;AAEtD,OAAO,EAAE,UAAU,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAA;AAErF,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,MAAc,EAAE,EAAE,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;AAChK,MAAM,CAAC,MAAM,IAAI,GAAG,CAAC,GAAG,GAAG,EAAE,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,EAAE,GAAG,CAAC,CAAA;AAC3H,MAAM,CAAC,MAAM,mBAAmB,GAAG,CAAC,MAAc,kBAAkB,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,UAAU,EAAE,CAAC,EAAE,qBAAqB,CAAC,CAAA;AAEjJ,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AAC/C,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAA;AAE5C,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAQ,CAAC,CAAA;AAE5D,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,YAAY,EAAE,QAAQ,EAAE,UAAU,EAAE,aAAa,EAAE,QAAQ,EAAE,WAAW,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAA;AAGnI,MAAM,CAAC,MAAM,KAAK,GAAG;IACnB,UAAU,EAAE,YAAY;IACxB,KAAK,EAAE,OAAO;IACd,SAAS,EAAE,WAAW;IACtB,MAAM,EAAE,QAAQ;CACR,CAAA;AAGV,MAAM,CAAC,MAAM,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC;IAC1B,KAAK,CAAC,UAAU;IAChB,KAAK,CAAC,KAAK;IACX,KAAK,CAAC,SAAS;IACf,KAAK,CAAC,MAAM;CACb,CAAC,CAAA;AACF,oFAAoF;AACpF;;;;;;EAME;AACF,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC;IAC9B,MAAM;IACN,iCAAiC;IACjC,6BAA6B;IAC7B,gCAAgC;IAChC,0CAA0C;IAC1C,6BAA6B;CAC9B,CAAC,CAAC,QAAQ,CAAC,8BAA8B,CAAC,CAAA;AAK3C,OAAO,EAAE,qBAAqB,EAAE,mBAAmB,IAAI,6BAA6B,EAAE,MAAM,wBAAwB,CAAA;AACpH,OAAO,EAAE,iBAAiB,EAAE,mBAAmB,IAAI,yBAAyB,EAAE,MAAM,oBAAoB,CAAA;AAExG,IAAI,SAAS,CAAA;AAEb,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,IAAU,EAAO,EAAE;IAE/C,SAAS,GAAG,UAAU,CAAC,MAAM,EAAqB,CAAA;IAClD,SAAS,CAAC,GAAG,CACX,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,GAAG,CAAC,CAAC,CAAC;SACN,OAAO,CAAC,GAAG,CAAC;SACZ,QAAQ,CAAC,kBAAkB,CAAC,EAC7B,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC;SACzB,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,CAAC,4DAA4D,CAAC;SACtE,QAAQ,EAAE;SACV,OAAO,CAAC,aAAa,CAAC,CAC1B;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,0DAA0D,CAAC,CACxE;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,yIAAyI,CAAC,CACvJ;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,6OAA6O,CAAC,CAC3P;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC,2CAA2C,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAC1F;SACA,GAAG,CACF,cAAc,EACd,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC,mCAAmC,CAAC,CACjD;SACA,GAAG,CACF,OAAO,EACP,CAAC,CAAC,KAAK,CACL;QACE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;QACnB,CAAC,CAAC,MAAM,EAAE;KACX,CAAC,CAAC,QAAQ,EAAE;SACZ,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC;SAC9B,QAAQ,CAAC,4FAA4F,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;SACvI,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CACjE;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;SAChB,QAAQ,EAAE;SACV,QAAQ,CAAC,gGAAgG,CAAC,CAC9G;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,yBAAyB,CAAC,CACvC;SACA,GAAG,CACF,OAAO,EACP,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,sDAAsD,WAAW,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAC9F;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,YAAY,CAAC;SACrB,QAAQ,CAAC,+CAA+C,CAAC,CAC7D;SACA,GAAG,CACF,MAAM,EACN,KAAK;SACF,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC;SACpB,QAAQ,CAAC;YACN,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC;YAC9B,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC;YACzB,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC;YAC7B,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC;OAC/B,CAAC,CACH;SACA,GAAG,CACF,UAAU,EACV,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,CAAC,CAAC;SACV,QAAQ,CAAC,mCAAmC,CAAC,CACjD;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,gEAAgE,CAAC,CAC9E;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,oDAAoD,CAAC,CAClE;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,6EAA6E,CAAC,CAC3F;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,QAAQ,EAAE;SACV,QAAQ,CAAC,iBAAiB,CAAC,CAC/B;SACA,GAAG,CACF,aAAa,EACb,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,mBAAmB,EAAE,CAAC;SAC9B,QAAQ,CAAC,0GAA0G,CAAC,CACxH;SACA,GAAG,CACF,MAAM,EACN,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,iBAAiB,CAAC;SAC1B,QAAQ,CAAC,mBAAmB,CAAC,CACjC;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,MAAM,EAAE;SACP,OAAO,CAAC,SAAS,CAAC;SAClB,QAAQ,CAAC,0BAA0B,CAAC,CACxC;QACD,wCAAwC;QACxC,oFAAoF;SACnF,GAAG,CACF,WAAW,EACX,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC;SAC7B,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC,CACf;SACA,GAAG,CACF,SAAS,EACT,CAAC,CAAC,KAAK,CAAC;QACN,CAAC,CAAC,MAAM,EAAE;QACV,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;QAClB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;QACnB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC;KACtB,CAAC;SACC,QAAQ,EAAE;SACV,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC;;;uBAGK,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC;SACvC,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE;QACjB,IAAG,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,MAAM,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAC,CAAC;YACnD,OAAO,GAAG,CAAA;QACZ,CAAC;QACD,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAA;QAClD,OAAO,GAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;QAC9D,OAAO,OAAO,CAAA;IAChB,CAAC,CAAC,CACL;SACA,GAAG,CACF,KAAK,EACL,CAAC,CAAC,KAAK,CAAC;QACN,CAAC,CAAC,OAAO,EAAE;QACX,CAAC,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,WAAW,EAAE,KAAK,MAAM,CAAC;KAC5D,CAAC;SACC,QAAQ,EAAE;SACV,OAAO,CAAC,KAAK,CAAC;SACd,QAAQ,CAAC,8DAA8D,CAAC,CAC5E;SACA,GAAG,CACF,QAAQ,EACR,CAAC,CAAC,GAAG,EAAE;SACJ,QAAQ,EAAE;SACV,OAAO,CAAC,IAAI,CAAC;SACb,QAAQ,CAAC,mCAAmC,CAAC;SAC7C,SAAS,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,iBAAiB,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CACrE,CAAC;IACJ,OAAO,SAAS,CAAC,IAAI,EAAE;SACpB,WAAW,EAAE;SACb,QAAQ,CAAC,cAAc,CAAC,CAAA;AAC7B,CAAC,CAAA;AAGD,MAAM,CAAC,MAAM,KAAK,GAAG,GAAG,EAAE;IACxB,mBAAmB,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,kBAAkB,CAAC,CAAA;IAC1D,mBAAmB,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,+BAA+B,CAAC,CAAC,CAAC,CAAA;IAC9F,OAAO,EAAE,CAAA;AACX,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,OAAO,GAAG,GAAG,EAAE;IAC1B,KAAK,CAAC,CAAC,aAAa,EAAE,CAAC,EAAE,aAAa,EAAE,MAAM,EAAE,EAAE,CAAC,CAAA;IACnD,OAAO,CAAC,gBAAgB,EAAE,SAAS,CAAC,WAAW,EAAE,CAAC,CAAA;AACpD,CAAC,CAAA"} ;// ./dist-in/commands/help.js @@ -213138,7 +215112,7 @@ const CONFIG_TEMPLATE = { "api_key": "YOUR_GOOGLE_API_KEY" } }; -const init = async (argv) => { +const commands_init_init = async (argv) => { const logger = new dist_esm_Logger({ hideLogPositionForProduction: true, maskPlaceholder: '***', @@ -213180,11 +215154,49 @@ const init = async (argv) => { + +const build_dirname = external_node_path_namespaceObject.dirname((0,external_node_url_namespaceObject.fileURLToPath)("file:///C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/dist-in/commands/build.js")); +const generateModelEnum = (models, provider) => { + const enumName = `E_${provider.toUpperCase()}_MODEL`; + const enumContent = `export enum ${enumName} { +${models.map(model => ` MODEL_${model.id.replace(/[^a-zA-Z0-9]/g, '_').toUpperCase()} = "${model.id}"`).join(',\n')} +}`; + return enumContent; +}; +const generateModelEnumFree = (models, provider) => { + const enumName = `E_${provider.toUpperCase()}_MODEL_FREE`; + const freeModels = models.filter(model => { + if (!model.pricing) + return false; + const pricing = model.pricing; + return ((pricing.prompt === 0 || pricing.prompt === "0") && + (pricing.completion === 0 || pricing.completion === "0") && + (pricing.image === 0 || pricing.image === "0")); + }); + const enumContent = `export enum ${enumName} { +${freeModels.map(model => ` MODEL_FREE_${model.id.replace(/[^a-zA-Z0-9]/g, '_').toUpperCase()} = "${model.id}"`).join(',\n')} +}`; + return enumContent; +}; +const generateModelEnumTools = (models, provider) => { + const enumName = `E_${provider.toUpperCase()}_MODEL`; + const toolModels = models.filter(model => { + if (!model.top_provider) + return false; + return model.top_provider.supports_functions === true || + model.top_provider.supports_function_calling === true || + model.top_provider.supports_tools === true; + }); + const enumContent = `export enum ${enumName} { +${toolModels.map(model => ` MODEL_TOOLS_${model.id.replace(/[^a-zA-Z0-9]/g, '_').toUpperCase()} = "${model.id}"`).join(',\n')} +}`; + return enumContent; +}; const build = async () => { - const examplesSrc = external_node_path_namespaceObject.resolve(__dirname, '../docs_/examples.md'); + const examplesSrc = external_node_path_namespaceObject.resolve(build_dirname, '../docs_/examples.md'); if (exists_sync(examplesSrc)) { const examples = read_sync(examplesSrc, 'string') || ''; - const examplesPath = external_node_path_namespaceObject.resolve(__dirname, '../src/docs-internal/examples.ts'); + const examplesPath = external_node_path_namespaceObject.resolve(build_dirname, '../src/docs-internal/examples.ts'); write_sync(examplesPath, `export const examples = ${JSON.stringify(examples)}`); dist_in_logger.info(`Examples file generated " ${examplesPath}`); } @@ -213199,15 +215211,31 @@ const build = async () => { name: model.name, pricing: model.pricing, context: model.context, - created: model.created + created: model.created, + top_provider: model.top_provider }; }); - const modelsOpenAIPath = external_node_path_namespaceObject.resolve(__dirname, '../src/models/cache/openai.ts'); + // Generate model enums + const openAIEnumPath = external_node_path_namespaceObject.resolve(build_dirname, '../../src/models/cache/openai-models.ts'); + const openRouterEnumPath = external_node_path_namespaceObject.resolve(build_dirname, '../../src/models/cache/openrouter-models.ts'); + const openAIEnumFreePath = external_node_path_namespaceObject.resolve(build_dirname, '../../src/models/cache/openai-models-free.ts'); + const openRouterEnumFreePath = external_node_path_namespaceObject.resolve(build_dirname, '../../src/models/cache/openrouter-models-free.ts'); + const openAIEnumToolsPath = external_node_path_namespaceObject.resolve(build_dirname, '../../src/models/cache/openai-models-tools.ts'); + const openRouterEnumToolsPath = external_node_path_namespaceObject.resolve(build_dirname, '../../src/models/cache/openrouter-models-tools.ts'); + write_sync(openAIEnumPath, generateModelEnum(modelsOpenAI, 'OpenAI')); + write_sync(openRouterEnumPath, generateModelEnum(modelsOpenRouter, 'OpenRouter')); + write_sync(openAIEnumFreePath, generateModelEnumFree(modelsOpenAI, 'OpenAI')); + write_sync(openRouterEnumFreePath, generateModelEnumFree(modelsOpenRouter, 'OpenRouter')); + write_sync(openAIEnumToolsPath, generateModelEnumTools(modelsOpenAI, 'OpenAI')); + write_sync(openRouterEnumToolsPath, generateModelEnumTools(modelsOpenRouter, 'OpenRouter')); + dist_in_logger.info('Model enums generated'); + // Write model data + const modelsOpenAIPath = external_node_path_namespaceObject.resolve(build_dirname, '../src/models/cache/openai.ts'); write_sync(modelsOpenAIPath, `export const models = ${JSON.stringify(modelsOpenAI)}`); - const modelsOpenRouterPath = external_node_path_namespaceObject.resolve(__dirname, '../src/models/cache/openrouter.ts'); + const modelsOpenRouterPath = external_node_path_namespaceObject.resolve(build_dirname, '../src/models/cache/openrouter.ts'); write_sync(modelsOpenRouterPath, `export const models = ${JSON.stringify(modelsOpenRouter)}`); }; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYnVpbGQuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvY29tbWFuZHMvYnVpbGQudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxJQUFJLE1BQU0sV0FBVyxDQUFBO0FBQzVCLE9BQU8sRUFBRSxjQUFjLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQTtBQUVsRCxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksS0FBSyxFQUFFLE1BQU0sb0JBQW9CLENBQUE7QUFDbEQsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUVwRCxPQUFPLEVBQUUsTUFBTSxFQUFFLE1BQU0sYUFBYSxDQUFBO0FBQ3BDLE9BQU8sRUFBRSxpQkFBaUIsRUFBRSxNQUFNLHFCQUFxQixDQUFBO0FBQ3ZELE9BQU8sRUFBRSxxQkFBcUIsRUFBRSxNQUFNLHlCQUF5QixDQUFBO0FBRS9ELE1BQU0sQ0FBQyxNQUFNLEtBQUssR0FBRyxLQUFLLElBQUksRUFBRTtJQUM5QixNQUFNLFdBQVcsR0FBSSxJQUFJLENBQUMsT0FBTyxDQUFDLFNBQVMsRUFBRSxzQkFBc0IsQ0FBQyxDQUFBO0lBQ3BFLElBQUcsTUFBTSxDQUFDLFdBQVcsQ0FBQyxFQUFFLENBQUM7UUFDdkIsTUFBTSxRQUFRLEdBQUcsSUFBSSxDQUFDLFdBQVcsRUFBQyxRQUFRLENBQUMsSUFBSSxFQUFFLENBQUE7UUFDakQsTUFBTSxZQUFZLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsa0NBQWtDLENBQUMsQ0FBQTtRQUNoRixLQUFLLENBQUMsWUFBWSxFQUFDLDJCQUEyQixJQUFJLENBQUMsU0FBUyxDQUFDLFFBQVEsQ0FBQyxFQUFFLENBQUMsQ0FBQTtRQUN6RSxNQUFNLENBQUMsSUFBSSxDQUFDLDZCQUE2QixZQUFZLEVBQUUsQ0FBQyxDQUFBO0lBQzFELENBQUM7U0FBSSxDQUFDO1FBQ0osTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsV0FBVyxFQUFFLENBQUMsQ0FBQTtJQUN4RCxDQUFDO0lBRUQsTUFBTSxNQUFNLEdBQUcsY0FBYyxFQUFTLENBQUE7SUFFdEMsTUFBTSxZQUFZLEdBQUksTUFBTSxpQkFBaUIsQ0FBQyxNQUFNLENBQUMsTUFBTSxDQUFDLEdBQUcsQ0FBQyxDQUFBO0lBQ2hFLE1BQU0sZ0JBQWdCLEdBQUcsQ0FBQyxNQUFNLHFCQUFxQixFQUFFLENBQUMsQ0FBQyxHQUFHLENBQUMsQ0FBQyxLQUFLLEVBQUUsRUFBRTtRQUNyRSxPQUFPO1lBQ0wsRUFBRSxFQUFFLEtBQUssQ0FBQyxFQUFFO1lBQ1osSUFBSSxFQUFFLEtBQUssQ0FBQyxJQUFJO1lBQ2hCLE9BQU8sRUFBRSxLQUFLLENBQUMsT0FBTztZQUN0QixPQUFPLEVBQUUsS0FBSyxDQUFDLE9BQU87WUFDdEIsT0FBTyxFQUFFLEtBQUssQ0FBQyxPQUFPO1NBQ3ZCLENBQUE7SUFDSCxDQUFDLENBQUMsQ0FBQTtJQUNGLE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsK0JBQStCLENBQUMsQ0FBQTtJQUNqRixLQUFLLENBQUMsZ0JBQWdCLEVBQUMseUJBQXlCLElBQUksQ0FBQyxTQUFTLENBQUMsWUFBWSxDQUFDLEVBQUUsQ0FBQyxDQUFBO0lBRS9FLE1BQU0sb0JBQW9CLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsbUNBQW1DLENBQUMsQ0FBQTtJQUN6RixLQUFLLENBQUMsb0JBQW9CLEVBQUMseUJBQXlCLElBQUksQ0FBQyxTQUFTLENBQUMsZ0JBQWdCLENBQUMsRUFBRSxDQUFDLENBQUE7QUFDekYsQ0FBQyxDQUFBIn0= +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYnVpbGQuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvY29tbWFuZHMvYnVpbGQudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxJQUFJLE1BQU0sV0FBVyxDQUFBO0FBQzVCLE9BQU8sRUFBRSxhQUFhLEVBQUUsTUFBTSxVQUFVLENBQUE7QUFDeEMsT0FBTyxFQUFFLGNBQWMsRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBRWxELE9BQU8sRUFBRSxJQUFJLElBQUksSUFBSSxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFDaEQsT0FBTyxFQUFFLElBQUksSUFBSSxLQUFLLEVBQUUsTUFBTSxvQkFBb0IsQ0FBQTtBQUNsRCxPQUFPLEVBQUUsSUFBSSxJQUFJLE1BQU0sRUFBRSxNQUFNLHFCQUFxQixDQUFBO0FBRXBELE9BQU8sRUFBRSxNQUFNLEVBQUUsTUFBTSxhQUFhLENBQUE7QUFDcEMsT0FBTyxFQUFFLGlCQUFpQixFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDdkQsT0FBTyxFQUFFLHFCQUFxQixFQUFFLE1BQU0seUJBQXlCLENBQUE7QUFFL0QsTUFBTSxTQUFTLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxhQUFhLENBQUMsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLENBQUMsQ0FBQyxDQUFBO0FBRTlELE1BQU0saUJBQWlCLEdBQUcsQ0FBQyxNQUFhLEVBQUUsUUFBZ0IsRUFBRSxFQUFFO0lBQzVELE1BQU0sUUFBUSxHQUFHLEtBQUssUUFBUSxDQUFDLFdBQVcsRUFBRSxRQUFRLENBQUE7SUFDcEQsTUFBTSxXQUFXLEdBQUcsZUFBZSxRQUFRO0VBQzNDLE1BQU0sQ0FBQyxHQUFHLENBQUMsS0FBSyxDQUFDLEVBQUUsQ0FBQyxXQUFXLEtBQUssQ0FBQyxFQUFFLENBQUMsT0FBTyxDQUFDLGVBQWUsRUFBRSxHQUFHLENBQUMsQ0FBQyxXQUFXLEVBQUUsT0FBTyxLQUFLLENBQUMsRUFBRSxHQUFHLENBQUMsQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDO0VBQ2xILENBQUE7SUFDQSxPQUFPLFdBQVcsQ0FBQTtBQUNwQixDQUFDLENBQUE7QUFFRCxNQUFNLHFCQUFxQixHQUFHLENBQUMsTUFBYSxFQUFFLFFBQWdCLEVBQUUsRUFBRTtJQUNoRSxNQUFNLFFBQVEsR0FBRyxLQUFLLFFBQVEsQ0FBQyxXQUFXLEVBQUUsYUFBYSxDQUFBO0lBQ3pELE1BQU0sVUFBVSxHQUFHLE1BQU0sQ0FBQyxNQUFNLENBQUMsS0FBSyxDQUFDLEVBQUU7UUFDdkMsSUFBSSxDQUFDLEtBQUssQ0FBQyxPQUFPO1lBQUUsT0FBTyxLQUFLLENBQUE7UUFDaEMsTUFBTSxPQUFPLEdBQUcsS0FBSyxDQUFDLE9BQU8sQ0FBQTtRQUM3QixPQUFPLENBQ0wsQ0FBQyxPQUFPLENBQUMsTUFBTSxLQUFLLENBQUMsSUFBSSxPQUFPLENBQUMsTUFBTSxLQUFLLEdBQUcsQ0FBQztZQUNoRCxDQUFDLE9BQU8sQ0FBQyxVQUFVLEtBQUssQ0FBQyxJQUFJLE9BQU8sQ0FBQyxVQUFVLEtBQUssR0FBRyxDQUFDO1lBQ3hELENBQUMsT0FBTyxDQUFDLEtBQUssS0FBSyxDQUFDLElBQUksT0FBTyxDQUFDLEtBQUssS0FBSyxHQUFHLENBQUMsQ0FDL0MsQ0FBQTtJQUNILENBQUMsQ0FBQyxDQUFBO0lBQ0YsTUFBTSxXQUFXLEdBQUcsZUFBZSxRQUFRO0VBQzNDLFVBQVUsQ0FBQyxHQUFHLENBQUMsS0FBSyxDQUFDLEVBQUUsQ0FBQyxnQkFBZ0IsS0FBSyxDQUFDLEVBQUUsQ0FBQyxPQUFPLENBQUMsZUFBZSxFQUFFLEdBQUcsQ0FBQyxDQUFDLFdBQVcsRUFBRSxPQUFPLEtBQUssQ0FBQyxFQUFFLEdBQUcsQ0FBQyxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUM7RUFDM0gsQ0FBQTtJQUNBLE9BQU8sV0FBVyxDQUFBO0FBQ3BCLENBQUMsQ0FBQTtBQUVELE1BQU0sc0JBQXNCLEdBQUcsQ0FBQyxNQUFhLEVBQUUsUUFBZ0IsRUFBRSxFQUFFO0lBQ2pFLE1BQU0sUUFBUSxHQUFHLEtBQUssUUFBUSxDQUFDLFdBQVcsRUFBRSxRQUFRLENBQUE7SUFDcEQsTUFBTSxVQUFVLEdBQUcsTUFBTSxDQUFDLE1BQU0sQ0FBQyxLQUFLLENBQUMsRUFBRTtRQUN2QyxJQUFJLENBQUMsS0FBSyxDQUFDLFlBQVk7WUFBRSxPQUFPLEtBQUssQ0FBQTtRQUNyQyxPQUFPLEtBQUssQ0FBQyxZQUFZLENBQUMsa0JBQWtCLEtBQUssSUFBSTtZQUM5QyxLQUFLLENBQUMsWUFBWSxDQUFDLHlCQUF5QixLQUFLLElBQUk7WUFDckQsS0FBSyxDQUFDLFlBQVksQ0FBQyxjQUFjLEtBQUssSUFBSSxDQUFBO0lBQ25ELENBQUMsQ0FBQyxDQUFBO0lBQ0YsTUFBTSxXQUFXLEdBQUcsZUFBZSxRQUFRO0VBQzNDLFVBQVUsQ0FBQyxHQUFHLENBQUMsS0FBSyxDQUFDLEVBQUUsQ0FBQyxpQkFBaUIsS0FBSyxDQUFDLEVBQUUsQ0FBQyxPQUFPLENBQUMsZUFBZSxFQUFFLEdBQUcsQ0FBQyxDQUFDLFdBQVcsRUFBRSxPQUFPLEtBQUssQ0FBQyxFQUFFLEdBQUcsQ0FBQyxDQUFDLElBQUksQ0FBQyxLQUFLLENBQUM7RUFDNUgsQ0FBQTtJQUNBLE9BQU8sV0FBVyxDQUFBO0FBQ3BCLENBQUMsQ0FBQTtBQUVELE1BQU0sQ0FBQyxNQUFNLEtBQUssR0FBRyxLQUFLLElBQUksRUFBRTtJQUM5QixNQUFNLFdBQVcsR0FBSSxJQUFJLENBQUMsT0FBTyxDQUFDLFNBQVMsRUFBRSxzQkFBc0IsQ0FBQyxDQUFBO0lBQ3BFLElBQUcsTUFBTSxDQUFDLFdBQVcsQ0FBQyxFQUFFLENBQUM7UUFDdkIsTUFBTSxRQUFRLEdBQUcsSUFBSSxDQUFDLFdBQVcsRUFBQyxRQUFRLENBQUMsSUFBSSxFQUFFLENBQUE7UUFDakQsTUFBTSxZQUFZLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsa0NBQWtDLENBQUMsQ0FBQTtRQUNoRixLQUFLLENBQUMsWUFBWSxFQUFDLDJCQUEyQixJQUFJLENBQUMsU0FBUyxDQUFDLFFBQVEsQ0FBQyxFQUFFLENBQUMsQ0FBQTtRQUN6RSxNQUFNLENBQUMsSUFBSSxDQUFDLDZCQUE2QixZQUFZLEVBQUUsQ0FBQyxDQUFBO0lBQzFELENBQUM7U0FBSSxDQUFDO1FBQ0osTUFBTSxDQUFDLEtBQUssQ0FBQywyQkFBMkIsV0FBVyxFQUFFLENBQUMsQ0FBQTtJQUN4RCxDQUFDO0lBRUQsTUFBTSxNQUFNLEdBQUcsY0FBYyxFQUFTLENBQUE7SUFFdEMsTUFBTSxZQUFZLEdBQUksTUFBTSxpQkFBaUIsQ0FBQyxNQUFNLENBQUMsTUFBTSxDQUFDLEdBQUcsQ0FBQyxDQUFBO0lBQ2hFLE1BQU0sZ0JBQWdCLEdBQUcsQ0FBQyxNQUFNLHFCQUFxQixFQUFFLENBQUMsQ0FBQyxHQUFHLENBQUMsQ0FBQyxLQUFLLEVBQUUsRUFBRTtRQUNyRSxPQUFPO1lBQ0wsRUFBRSxFQUFFLEtBQUssQ0FBQyxFQUFFO1lBQ1osSUFBSSxFQUFFLEtBQUssQ0FBQyxJQUFJO1lBQ2hCLE9BQU8sRUFBRSxLQUFLLENBQUMsT0FBTztZQUN0QixPQUFPLEVBQUUsS0FBSyxDQUFDLE9BQU87WUFDdEIsT0FBTyxFQUFFLEtBQUssQ0FBQyxPQUFPO1lBQ3RCLFlBQVksRUFBRSxLQUFLLENBQUMsWUFBWTtTQUNqQyxDQUFBO0lBQ0gsQ0FBQyxDQUFDLENBQUE7SUFDRix1QkFBdUI7SUFDdkIsTUFBTSxjQUFjLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUseUNBQXlDLENBQUMsQ0FBQTtJQUN6RixNQUFNLGtCQUFrQixHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsU0FBUyxFQUFFLDZDQUE2QyxDQUFDLENBQUE7SUFDakcsTUFBTSxrQkFBa0IsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLFNBQVMsRUFBRSw4Q0FBOEMsQ0FBQyxDQUFBO0lBQ2xHLE1BQU0sc0JBQXNCLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsa0RBQWtELENBQUMsQ0FBQTtJQUMxRyxNQUFNLG1CQUFtQixHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsU0FBUyxFQUFFLCtDQUErQyxDQUFDLENBQUE7SUFDcEcsTUFBTSx1QkFBdUIsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLFNBQVMsRUFBRSxtREFBbUQsQ0FBQyxDQUFBO0lBRTVHLEtBQUssQ0FBQyxjQUFjLEVBQUUsaUJBQWlCLENBQUMsWUFBWSxFQUFFLFFBQVEsQ0FBQyxDQUFDLENBQUE7SUFDaEUsS0FBSyxDQUFDLGtCQUFrQixFQUFFLGlCQUFpQixDQUFDLGdCQUFnQixFQUFFLFlBQVksQ0FBQyxDQUFDLENBQUE7SUFDNUUsS0FBSyxDQUFDLGtCQUFrQixFQUFFLHFCQUFxQixDQUFDLFlBQVksRUFBRSxRQUFRLENBQUMsQ0FBQyxDQUFBO0lBQ3hFLEtBQUssQ0FBQyxzQkFBc0IsRUFBRSxxQkFBcUIsQ0FBQyxnQkFBZ0IsRUFBRSxZQUFZLENBQUMsQ0FBQyxDQUFBO0lBQ3BGLEtBQUssQ0FBQyxtQkFBbUIsRUFBRSxzQkFBc0IsQ0FBQyxZQUFZLEVBQUUsUUFBUSxDQUFDLENBQUMsQ0FBQTtJQUMxRSxLQUFLLENBQUMsdUJBQXVCLEVBQUUsc0JBQXNCLENBQUMsZ0JBQWdCLEVBQUUsWUFBWSxDQUFDLENBQUMsQ0FBQTtJQUN0RixNQUFNLENBQUMsSUFBSSxDQUFDLHVCQUF1QixDQUFDLENBQUE7SUFFcEMsbUJBQW1CO0lBQ25CLE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsK0JBQStCLENBQUMsQ0FBQTtJQUNqRixLQUFLLENBQUMsZ0JBQWdCLEVBQUMseUJBQXlCLElBQUksQ0FBQyxTQUFTLENBQUMsWUFBWSxDQUFDLEVBQUUsQ0FBQyxDQUFBO0lBRS9FLE1BQU0sb0JBQW9CLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxTQUFTLEVBQUUsbUNBQW1DLENBQUMsQ0FBQTtJQUN6RixLQUFLLENBQUMsb0JBQW9CLEVBQUMseUJBQXlCLElBQUksQ0FBQyxTQUFTLENBQUMsZ0JBQWdCLENBQUMsRUFBRSxDQUFDLENBQUE7QUFDekYsQ0FBQyxDQUFBIn0= ;// ./dist-in/commands/fetch.js @@ -213272,7 +215300,7 @@ const yargOptions = { }) }; yargs(hideBin(process.argv)) - .command('init', 'Initialize KBot configuration', (yargs) => toYargs(yargs, OptionsSchema(), yargOptions), init) + .command('init', 'Initialize KBot configuration', (yargs) => toYargs(yargs, OptionsSchema(), yargOptions), commands_init_init) .command('modify [prompt]', 'Modify an existing project', (yargs) => toYargs(yargs, OptionsSchema(), yargOptions), modify) .command('types', 'Generate types', (yargs) => { }, (argv) => zod_schema_types()) .command('build', 'Build kbot essentials', (yargs) => { }, (argv) => build()) diff --git a/packages/kbot/dist/package.json b/packages/kbot/dist/package.json index 8bc4c907..6d46d3ea 100644 --- a/packages/kbot/dist/package.json +++ b/packages/kbot/dist/package.json @@ -1,6 +1,6 @@ { "name": "@plastichub/kbot", - "version": "1.1.16", + "version": "1.1.17", "main": "main_node.js", "author": "", "license": "ISC", diff --git a/packages/kbot/package-lock.json b/packages/kbot/package-lock.json index 1200ed97..6a394076 100644 --- a/packages/kbot/package-lock.json +++ b/packages/kbot/package-lock.json @@ -23,7 +23,7 @@ "marked": "14.1.4", "marked-terminal": "7.2.1", "mime-types": "2.1.35", - "openai": "4.87.4", + "openai": "4.91.0", "p-map": "7.0.3", "ts-retry": "6.0.0", "tslog": "^4.9.3", @@ -70,12 +70,13 @@ "marked": "^15.0.4", "mime-types": "^2.1.35", "nodemailer": "^6.9.16", - "openai": "^4.85.3", + "openai": "^4.87.4", "p-map": "^7.0.3", "rotating-file-stream": "^3.2.6", "screenshot-desktop": "^1.15.0", "showdown": "^2.1.0", "simple-git": "^3.27.0", + "supports-color": "^10.0.0", "tslog": "^4.9.3", "turndown": "^7.2.0", "type-fest": "^4.30.2", @@ -4868,9 +4869,9 @@ } }, "node_modules/openai": { - "version": "4.87.4", - "resolved": "https://registry.npmjs.org/openai/-/openai-4.87.4.tgz", - "integrity": "sha512-lsfM20jZY4A0lNexfoUAkfmrEXxaTXvv8OKYicpeAJUNHObpRgkvC7pxPgMnB6gc9ID8OCwzzhEhBpNy69UR7w==", + "version": "4.91.0", + "resolved": "https://registry.npmjs.org/openai/-/openai-4.91.0.tgz", + "integrity": "sha512-zdDg6eyvUmCP58QAW7/aPb+XdeavJ51pK6AcwZOWG5QNSLIovVz0XonRL9vARGJRmw8iImmvf2A31Q7hoh544w==", "license": "Apache-2.0", "dependencies": { "@types/node": "^18.11.18", diff --git a/packages/kbot/package.json b/packages/kbot/package.json index 1a2f9461..8808e9d1 100644 --- a/packages/kbot/package.json +++ b/packages/kbot/package.json @@ -39,7 +39,7 @@ "marked": "14.1.4", "marked-terminal": "7.2.1", "mime-types": "2.1.35", - "openai": "4.87.4", + "openai": "4.91.0", "p-map": "7.0.3", "ts-retry": "6.0.0", "tslog": "^4.9.3", diff --git a/packages/kbot/schema.json b/packages/kbot/schema.json index 94e899af..4273401e 100644 --- a/packages/kbot/schema.json +++ b/packages/kbot/schema.json @@ -81,7 +81,7 @@ }, "model": { "type": "string", - "description": "AI model to use for processing. Available models:\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenRouter models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n01-ai/yi-large | paid\naetherwiing/mn-starcannon-12b | paid\nai21/jamba-1-5-large | paid\nai21/jamba-1-5-mini | paid\nai21/jamba-1.6-large | paid\nai21/jamba-instruct | paid\nai21/jamba-1.6-mini | paid\naion-labs/aion-1.0 | paid\naion-labs/aion-1.0-mini | paid\naion-labs/aion-rp-llama-3.1-8b | paid\njondurbin/airoboros-l2-70b | paid\nallenai/olmo-2-0325-32b-instruct | paid\namazon/nova-lite-v1 | paid\namazon/nova-micro-v1 | paid\namazon/nova-pro-v1 | paid\nanthropic/claude-3-haiku | paid\nanthropic/claude-3-haiku:beta | paid\nanthropic/claude-3-opus | paid\nanthropic/claude-3-opus:beta | paid\nanthropic/claude-3-sonnet | paid\nanthropic/claude-3-sonnet:beta | paid\nanthropic/claude-3.5-haiku | paid\nanthropic/claude-3.5-haiku-20241022 | paid\nanthropic/claude-3.5-haiku-20241022:beta | paid\nanthropic/claude-3.5-haiku:beta | paid\nanthropic/claude-3.5-sonnet | paid\nanthropic/claude-3.5-sonnet-20240620 | paid\nanthropic/claude-3.5-sonnet-20240620:beta | paid\nanthropic/claude-3.5-sonnet:beta | paid\nanthropic/claude-3.7-sonnet | paid\nanthropic/claude-3.7-sonnet:beta | paid\nanthropic/claude-3.7-sonnet:thinking | paid\nanthropic/claude-2 | paid\nanthropic/claude-2:beta | paid\nanthropic/claude-2.0 | paid\nanthropic/claude-2.0:beta | paid\nanthropic/claude-2.1 | paid\nanthropic/claude-2.1:beta | paid\nopenrouter/auto | paid\ncohere/command | paid\ncohere/command-a | paid\ncohere/command-r | paid\ncohere/command-r-03-2024 | paid\ncohere/command-r-08-2024 | paid\ncohere/command-r-plus | paid\ncohere/command-r-plus-04-2024 | paid\ncohere/command-r-plus-08-2024 | paid\ncohere/command-r7b-12-2024 | paid\ndeepseek/deepseek-r1-zero:free | free\ndeepseek/deepseek-chat | paid\ndeepseek/deepseek-chat:free | free\ndeepseek/deepseek-r1 | paid\ndeepseek/deepseek-r1:free | free\ndeepseek/deepseek-r1-distill-llama-70b | paid\ndeepseek/deepseek-r1-distill-llama-70b:free | free\ndeepseek/deepseek-r1-distill-llama-8b | paid\ndeepseek/deepseek-r1-distill-qwen-1.5b | paid\ndeepseek/deepseek-r1-distill-qwen-14b | paid\ndeepseek/deepseek-r1-distill-qwen-14b:free | free\ndeepseek/deepseek-r1-distill-qwen-32b | paid\ndeepseek/deepseek-r1-distill-qwen-32b:free | free\ncognitivecomputations/dolphin-mixtral-8x7b | paid\ncognitivecomputations/dolphin-mixtral-8x22b | paid\ncognitivecomputations/dolphin3.0-mistral-24b:free | free\ncognitivecomputations/dolphin3.0-r1-mistral-24b:free | free\neva-unit-01/eva-llama-3.33-70b | paid\neva-unit-01/eva-qwen-2.5-32b | paid\neva-unit-01/eva-qwen-2.5-72b | paid\nsao10k/fimbulvetr-11b-v2 | paid\nalpindale/goliath-120b | paid\ngoogle/gemini-2.0-flash-lite-001 | paid\ngoogle/gemini-2.0-flash-thinking-exp-1219:free | free\ngoogle/gemini-2.0-flash-thinking-exp:free | free\ngoogle/gemini-exp-1206:free | free\ngoogle/gemini-flash-1.5 | paid\ngoogle/gemini-flash-1.5-8b | paid\ngoogle/gemini-flash-1.5-8b-exp | paid\ngoogle/gemini-2.0-flash-001 | paid\ngoogle/gemini-2.0-flash-exp:free | free\ngoogle/gemini-2.0-flash-lite-preview-02-05:free | free\ngoogle/gemini-pro | paid\ngoogle/gemini-pro-1.5 | paid\ngoogle/gemini-2.0-pro-exp-02-05:free | free\ngoogle/gemini-pro-vision | paid\ngoogle/gemma-2-27b-it | paid\ngoogle/gemma-2-9b-it | paid\ngoogle/gemma-2-9b-it:free | free\ngoogle/gemma-3-12b-it:free | free\ngoogle/gemma-3-1b-it:free | free\ngoogle/gemma-3-27b-it | paid\ngoogle/gemma-3-27b-it:free | free\ngoogle/gemma-3-4b-it:free | free\ngoogle/gemma-7b-it | paid\ngoogle/learnlm-1.5-pro-experimental:free | free\ngoogle/palm-2-chat-bison | paid\ngoogle/palm-2-chat-bison-32k | paid\ngoogle/palm-2-codechat-bison | paid\ngoogle/palm-2-codechat-bison-32k | paid\nhuggingfaceh4/zephyr-7b-beta:free | free\ninfermatic/mn-inferor-12b | paid\ninflection/inflection-3-pi | paid\ninflection/inflection-3-productivity | paid\nlatitudegames/wayfarer-large-70b-llama-3.3 | paid\nliquid/lfm-3b | paid\nliquid/lfm-40b | paid\nliquid/lfm-7b | paid\nallenai/llama-3.1-tulu-3-405b | paid\nmeta-llama/llama-guard-3-8b | paid\nalpindale/magnum-72b | paid\nanthracite-org/magnum-v2-72b | paid\nanthracite-org/magnum-v4-72b | paid\nmancer/weaver | paid\nmeta-llama/llama-2-13b-chat | paid\nmeta-llama/llama-2-70b-chat | paid\nmeta-llama/llama-3-70b-instruct | paid\nmeta-llama/llama-3-8b-instruct | paid\nmeta-llama/llama-3-8b-instruct:free | free\nmeta-llama/llama-3.1-405b | paid\nmeta-llama/llama-3.1-405b-instruct | paid\nmeta-llama/llama-3.1-70b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct:free | free\nmeta-llama/llama-3.2-11b-vision-instruct | paid\nmeta-llama/llama-3.2-11b-vision-instruct:free | free\nmeta-llama/llama-3.2-1b-instruct | paid\nmeta-llama/llama-3.2-1b-instruct:free | free\nmeta-llama/llama-3.2-3b-instruct | paid\nmeta-llama/llama-3.2-3b-instruct:free | free\nmeta-llama/llama-3.2-90b-vision-instruct | paid\nmeta-llama/llama-3.3-70b-instruct | paid\nmeta-llama/llama-3.3-70b-instruct:free | free\nmeta-llama/llama-guard-2-8b | paid\nmicrosoft/phi-4 | paid\nmicrosoft/phi-4-multimodal-instruct | paid\nmicrosoft/phi-3-medium-128k-instruct | paid\nmicrosoft/phi-3-medium-128k-instruct:free | free\nmicrosoft/phi-3-mini-128k-instruct | paid\nmicrosoft/phi-3-mini-128k-instruct:free | free\nmicrosoft/phi-3.5-mini-128k-instruct | paid\nsophosympatheia/midnight-rose-70b | paid\nminimax/minimax-01 | paid\nmistralai/mistral-large | paid\nmistralai/mistral-large-2407 | paid\nmistralai/mistral-large-2411 | paid\nmistralai/mistral-medium | paid\nnothingiisreal/mn-celeste-12b | paid\nmistralai/mistral-small | paid\nmistralai/mistral-tiny | paid\nmistralai/codestral-2501 | paid\nmistralai/codestral-mamba | paid\nmistralai/ministral-3b | paid\nmistralai/ministral-8b | paid\nmistralai/mistral-7b-instruct | paid\nmistralai/mistral-7b-instruct:free | free\nmistralai/mistral-7b-instruct-v0.1 | paid\nmistralai/mistral-7b-instruct-v0.2 | paid\nmistralai/mistral-7b-instruct-v0.3 | paid\nmistralai/mistral-nemo | paid\nmistralai/mistral-nemo:free | free\nmistralai/mistral-small-24b-instruct-2501 | paid\nmistralai/mistral-small-24b-instruct-2501:free | free\nmistralai/mistral-small-3.1-24b-instruct | paid\nmistralai/mixtral-8x22b-instruct | paid\nmistralai/mixtral-8x7b | paid\nmistralai/mixtral-8x7b-instruct | paid\nmistralai/pixtral-12b | paid\nmistralai/pixtral-large-2411 | paid\nmistralai/mistral-saba | paid\nmoonshotai/moonlight-16b-a3b-instruct:free | free\ngryphe/mythomax-l2-13b | paid\ngryphe/mythomax-l2-13b:free | free\nneversleep/llama-3-lumimaid-70b | paid\nneversleep/llama-3-lumimaid-8b | paid\nneversleep/llama-3-lumimaid-8b:extended | paid\nneversleep/llama-3.1-lumimaid-70b | paid\nneversleep/llama-3.1-lumimaid-8b | paid\nneversleep/noromaid-20b | paid\nnousresearch/deephermes-3-llama-3-8b-preview:free | free\nnousresearch/nous-hermes-llama2-13b | paid\nnousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid\nnousresearch/hermes-3-llama-3.1-405b | paid\nnousresearch/hermes-3-llama-3.1-70b | paid\nnousresearch/hermes-2-pro-llama-3-8b | paid\nnvidia/llama-3.1-nemotron-70b-instruct | paid\nnvidia/llama-3.1-nemotron-70b-instruct:free | free\nopen-r1/olympiccoder-32b:free | free\nopen-r1/olympiccoder-7b:free | free\nopenai/chatgpt-4o-latest | paid\nopenai/gpt-3.5-turbo | paid\nopenai/gpt-3.5-turbo-0613 | paid\nopenai/gpt-3.5-turbo-16k | paid\nopenai/gpt-3.5-turbo-0125 | paid\nopenai/gpt-3.5-turbo-1106 | paid\nopenai/gpt-3.5-turbo-instruct | paid\nopenai/gpt-4 | paid\nopenai/gpt-4-0314 | paid\nopenai/gpt-4-32k | paid\nopenai/gpt-4-32k-0314 | paid\nopenai/gpt-4-turbo | paid\nopenai/gpt-4-1106-preview | paid\nopenai/gpt-4-turbo-preview | paid\nopenai/gpt-4.5-preview | paid\nopenai/gpt-4o | paid\nopenai/gpt-4o-2024-05-13 | paid\nopenai/gpt-4o-2024-08-06 | paid\nopenai/gpt-4o-2024-11-20 | paid\nopenai/gpt-4o:extended | paid\nopenai/gpt-4o-search-preview | paid\nopenai/gpt-4o-mini | paid\nopenai/gpt-4o-mini-2024-07-18 | paid\nopenai/gpt-4o-mini-search-preview | paid\nopenai/o1 | paid\nopenai/o1-mini | paid\nopenai/o1-mini-2024-09-12 | paid\nopenai/o1-preview | paid\nopenai/o1-preview-2024-09-12 | paid\nopenai/o3-mini | paid\nopenai/o3-mini-high | paid\nopenchat/openchat-7b | paid\nopenchat/openchat-7b:free | free\nteknium/openhermes-2.5-mistral-7b | paid\nperplexity/llama-3.1-sonar-large-128k-chat | paid\nperplexity/llama-3.1-sonar-large-128k-online | paid\nperplexity/llama-3.1-sonar-small-128k-chat | paid\nperplexity/llama-3.1-sonar-small-128k-online | paid\nperplexity/r1-1776 | paid\nperplexity/sonar | paid\nperplexity/sonar-deep-research | paid\nperplexity/sonar-pro | paid\nperplexity/sonar-reasoning | paid\nperplexity/sonar-reasoning-pro | paid\npygmalionai/mythalion-13b | paid\nqwen/qwen-2-72b-instruct | paid\nqwen/qwen-2-7b-instruct | paid\nqwen/qwen-2-7b-instruct:free | free\nqwen/qwen-vl-max | paid\nqwen/qwen-vl-plus | paid\nqwen/qwen-max | paid\nqwen/qwen-plus | paid\nqwen/qwen-turbo | paid\nqwen/qwen2.5-32b-instruct | paid\nqwen/qwen2.5-vl-72b-instruct | paid\nqwen/qwen2.5-vl-72b-instruct:free | free\nqwen/qwen-2.5-vl-72b-instruct | paid\nqwen/qwen-2.5-vl-7b-instruct | paid\nqwen/qwq-32b | paid\nqwen/qwq-32b:free | free\nqwen/qwq-32b-preview | paid\nqwen/qwq-32b-preview:free | free\nqwen/qwen-2.5-72b-instruct | paid\nqwen/qwen-2.5-72b-instruct:free | free\nqwen/qwen-2.5-7b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct:free | free\nrekaai/reka-flash-3:free | free\nundi95/remm-slerp-l2-13b | paid\nthedrummer/rocinante-12b | paid\nsophosympatheia/rogue-rose-103b-v0.2:free | free\nsao10k/l3-lunaris-8b | paid\nsao10k/l3-euryale-70b | paid\nsao10k/l3.1-70b-hanami-x1 | paid\nsao10k/l3.1-euryale-70b | paid\nsao10k/l3.3-euryale-70b | paid\nraifle/sorcererlm-8x22b | paid\nsteelskull/l3.3-electra-r1-70b | paid\ntokyotech-llm/llama-3.1-swallow-70b-instruct-v0.3 | paid\nthedrummer/anubis-pro-105b-v1 | paid\nthedrummer/skyfall-36b-v2 | paid\nundi95/toppy-m-7b | paid\nundi95/toppy-m-7b:free | free\nthedrummer/unslopnemo-12b | paid\nmicrosoft/wizardlm-2-7b | paid\nmicrosoft/wizardlm-2-8x22b | paid\nx-ai/grok-2-1212 | paid\nx-ai/grok-2-vision-1212 | paid\nx-ai/grok-beta | paid\nx-ai/grok-vision-beta | paid\nxwin-lm/xwin-lm-70b | paid\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenAI models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\nbabbage-002\nchatgpt-4o-latest\ndall-e-2\ndall-e-3\ndavinci-002\ngpt-3.5-turbo\ngpt-3.5-turbo-0125\ngpt-3.5-turbo-1106\ngpt-3.5-turbo-16k\ngpt-3.5-turbo-instruct\ngpt-3.5-turbo-instruct-0914\ngpt-4\ngpt-4-0125-preview\ngpt-4-0613\ngpt-4-1106-preview\ngpt-4-turbo\ngpt-4-turbo-2024-04-09\ngpt-4-turbo-preview\ngpt-4.5-preview\ngpt-4.5-preview-2025-02-27\ngpt-4o\ngpt-4o-2024-05-13\ngpt-4o-2024-08-06\ngpt-4o-2024-11-20\ngpt-4o-audio-preview\ngpt-4o-audio-preview-2024-10-01\ngpt-4o-audio-preview-2024-12-17\ngpt-4o-mini\ngpt-4o-mini-2024-07-18\ngpt-4o-mini-audio-preview\ngpt-4o-mini-audio-preview-2024-12-17\ngpt-4o-mini-realtime-preview\ngpt-4o-mini-realtime-preview-2024-12-17\ngpt-4o-mini-search-preview\ngpt-4o-mini-search-preview-2025-03-11\ngpt-4o-realtime-preview\ngpt-4o-realtime-preview-2024-10-01\ngpt-4o-realtime-preview-2024-12-17\ngpt-4o-search-preview\ngpt-4o-search-preview-2025-03-11\no1\no1-2024-12-17\no1-mini\no1-mini-2024-09-12\no1-preview\no1-preview-2024-09-12\no3-mini\no3-mini-2025-01-31\nomni-moderation-2024-09-26\nomni-moderation-latest\ntext-embedding-3-large\ntext-embedding-3-small\ntext-embedding-ada-002\ntts-1\ntts-1-1106\ntts-1-hd\ntts-1-hd-1106\nwhisper-1\n-----\n\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m Deepseek models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\ndeepseek-chat\ndeepseek-reasoner\n-----\n" + "description": "AI model to use for processing. Available models:\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenRouter models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n01-ai/yi-large | paid\naetherwiing/mn-starcannon-12b | paid\nai21/jamba-1-5-large | paid\nai21/jamba-1-5-mini | paid\nai21/jamba-1.6-large | paid\nai21/jamba-instruct | paid\nai21/jamba-1.6-mini | paid\naion-labs/aion-1.0 | paid\naion-labs/aion-1.0-mini | paid\naion-labs/aion-rp-llama-3.1-8b | paid\njondurbin/airoboros-l2-70b | paid\nallenai/molmo-7b-d:free | free\nallenai/olmo-2-0325-32b-instruct | paid\namazon/nova-lite-v1 | paid\namazon/nova-micro-v1 | paid\namazon/nova-pro-v1 | paid\nanthropic/claude-3-haiku | paid\nanthropic/claude-3-haiku:beta | paid\nanthropic/claude-3-opus | paid\nanthropic/claude-3-opus:beta | paid\nanthropic/claude-3-sonnet | paid\nanthropic/claude-3-sonnet:beta | paid\nanthropic/claude-3.5-haiku | paid\nanthropic/claude-3.5-haiku-20241022 | paid\nanthropic/claude-3.5-haiku-20241022:beta | paid\nanthropic/claude-3.5-haiku:beta | paid\nanthropic/claude-3.5-sonnet | paid\nanthropic/claude-3.5-sonnet-20240620 | paid\nanthropic/claude-3.5-sonnet-20240620:beta | paid\nanthropic/claude-3.5-sonnet:beta | paid\nanthropic/claude-3.7-sonnet | paid\nanthropic/claude-3.7-sonnet:beta | paid\nanthropic/claude-3.7-sonnet:thinking | paid\nanthropic/claude-2 | paid\nanthropic/claude-2:beta | paid\nanthropic/claude-2.0 | paid\nanthropic/claude-2.0:beta | paid\nanthropic/claude-2.1 | paid\nanthropic/claude-2.1:beta | paid\nopenrouter/auto | paid\nbytedance-research/ui-tars-72b:free | free\ncohere/command | paid\ncohere/command-a | paid\ncohere/command-r | paid\ncohere/command-r-03-2024 | paid\ncohere/command-r-08-2024 | paid\ncohere/command-r-plus | paid\ncohere/command-r-plus-04-2024 | paid\ncohere/command-r-plus-08-2024 | paid\ncohere/command-r7b-12-2024 | paid\ndeepseek/deepseek-r1-zero:free | free\ndeepseek/deepseek-chat | paid\ndeepseek/deepseek-chat:free | free\ndeepseek/deepseek-chat-v3-0324 | paid\ndeepseek/deepseek-chat-v3-0324:free | free\ndeepseek/deepseek-v3-base:free | free\ndeepseek/deepseek-r1 | paid\ndeepseek/deepseek-r1:free | free\ndeepseek/deepseek-r1-distill-llama-70b | paid\ndeepseek/deepseek-r1-distill-llama-70b:free | free\ndeepseek/deepseek-r1-distill-llama-8b | paid\ndeepseek/deepseek-r1-distill-qwen-1.5b | paid\ndeepseek/deepseek-r1-distill-qwen-14b | paid\ndeepseek/deepseek-r1-distill-qwen-14b:free | free\ndeepseek/deepseek-r1-distill-qwen-32b | paid\ndeepseek/deepseek-r1-distill-qwen-32b:free | free\ncognitivecomputations/dolphin-mixtral-8x7b | paid\ncognitivecomputations/dolphin-mixtral-8x22b | paid\ncognitivecomputations/dolphin3.0-mistral-24b:free | free\ncognitivecomputations/dolphin3.0-r1-mistral-24b:free | free\neva-unit-01/eva-llama-3.33-70b | paid\neva-unit-01/eva-qwen-2.5-32b | paid\neva-unit-01/eva-qwen-2.5-72b | paid\nsao10k/fimbulvetr-11b-v2 | paid\nalpindale/goliath-120b | paid\ngoogle/gemini-2.0-flash-lite-001 | paid\ngoogle/gemini-2.0-flash-thinking-exp-1219:free | free\ngoogle/gemini-2.0-flash-thinking-exp:free | free\ngoogle/gemini-flash-1.5 | paid\ngoogle/gemini-flash-1.5-8b | paid\ngoogle/gemini-flash-1.5-8b-exp | paid\ngoogle/gemini-2.0-flash-001 | paid\ngoogle/gemini-2.0-flash-exp:free | free\ngoogle/gemini-pro | paid\ngoogle/gemini-pro-1.5 | paid\ngoogle/gemini-2.0-pro-exp-02-05:free | free\ngoogle/gemini-2.5-pro-exp-03-25:free | free\ngoogle/gemini-pro-vision | paid\ngoogle/gemma-2-27b-it | paid\ngoogle/gemma-2-9b-it | paid\ngoogle/gemma-2-9b-it:free | free\ngoogle/gemma-3-12b-it | paid\ngoogle/gemma-3-12b-it:free | free\ngoogle/gemma-3-1b-it:free | free\ngoogle/gemma-3-27b-it | paid\ngoogle/gemma-3-27b-it:free | free\ngoogle/gemma-3-4b-it | paid\ngoogle/gemma-3-4b-it:free | free\ngoogle/gemma-7b-it | paid\ngoogle/learnlm-1.5-pro-experimental:free | free\ngoogle/palm-2-chat-bison | paid\ngoogle/palm-2-chat-bison-32k | paid\ngoogle/palm-2-codechat-bison | paid\ngoogle/palm-2-codechat-bison-32k | paid\nhuggingfaceh4/zephyr-7b-beta:free | free\ninfermatic/mn-inferor-12b | paid\ninflection/inflection-3-pi | paid\ninflection/inflection-3-productivity | paid\nlatitudegames/wayfarer-large-70b-llama-3.3 | paid\nliquid/lfm-3b | paid\nliquid/lfm-40b | paid\nliquid/lfm-7b | paid\nmeta-llama/llama-guard-3-8b | paid\nalpindale/magnum-72b | paid\nanthracite-org/magnum-v2-72b | paid\nanthracite-org/magnum-v4-72b | paid\nmancer/weaver | paid\nmeta-llama/llama-2-13b-chat | paid\nmeta-llama/llama-2-70b-chat | paid\nmeta-llama/llama-3-70b-instruct | paid\nmeta-llama/llama-3-8b-instruct | paid\nmeta-llama/llama-3-8b-instruct:free | free\nmeta-llama/llama-3.1-405b | paid\nmeta-llama/llama-3.1-405b-instruct | paid\nmeta-llama/llama-3.1-70b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct:free | free\nmeta-llama/llama-3.2-11b-vision-instruct | paid\nmeta-llama/llama-3.2-11b-vision-instruct:free | free\nmeta-llama/llama-3.2-1b-instruct | paid\nmeta-llama/llama-3.2-1b-instruct:free | free\nmeta-llama/llama-3.2-3b-instruct | paid\nmeta-llama/llama-3.2-3b-instruct:free | free\nmeta-llama/llama-3.2-90b-vision-instruct | paid\nmeta-llama/llama-3.3-70b-instruct | paid\nmeta-llama/llama-3.3-70b-instruct:free | free\nmeta-llama/llama-guard-2-8b | paid\nmicrosoft/phi-4 | paid\nmicrosoft/phi-4-multimodal-instruct | paid\nmicrosoft/phi-3-medium-128k-instruct | paid\nmicrosoft/phi-3-medium-128k-instruct:free | free\nmicrosoft/phi-3-mini-128k-instruct | paid\nmicrosoft/phi-3-mini-128k-instruct:free | free\nmicrosoft/phi-3.5-mini-128k-instruct | paid\nsophosympatheia/midnight-rose-70b | paid\nminimax/minimax-01 | paid\nmistralai/mistral-large | paid\nmistralai/mistral-large-2407 | paid\nmistralai/mistral-large-2411 | paid\nmistralai/mistral-medium | paid\nnothingiisreal/mn-celeste-12b | paid\nmistralai/mistral-small | paid\nmistralai/mistral-tiny | paid\nmistralai/codestral-2501 | paid\nmistralai/codestral-mamba | paid\nmistralai/ministral-3b | paid\nmistral/ministral-8b | paid\nmistralai/ministral-8b | paid\nmistralai/mistral-7b-instruct | paid\nmistralai/mistral-7b-instruct:free | free\nmistralai/mistral-7b-instruct-v0.1 | paid\nmistralai/mistral-7b-instruct-v0.2 | paid\nmistralai/mistral-7b-instruct-v0.3 | paid\nmistralai/mistral-nemo | paid\nmistralai/mistral-nemo:free | free\nmistralai/mistral-small-24b-instruct-2501 | paid\nmistralai/mistral-small-24b-instruct-2501:free | free\nmistralai/mistral-small-3.1-24b-instruct | paid\nmistralai/mistral-small-3.1-24b-instruct:free | free\nmistralai/mixtral-8x22b-instruct | paid\nmistralai/mixtral-8x7b | paid\nmistralai/mixtral-8x7b-instruct | paid\nmistralai/pixtral-12b | paid\nmistralai/pixtral-large-2411 | paid\nmistralai/mistral-saba | paid\nmoonshotai/moonlight-16b-a3b-instruct:free | free\ngryphe/mythomax-l2-13b | paid\ngryphe/mythomax-l2-13b:free | free\nneversleep/llama-3-lumimaid-70b | paid\nneversleep/llama-3-lumimaid-8b | paid\nneversleep/llama-3-lumimaid-8b:extended | paid\nneversleep/llama-3.1-lumimaid-70b | paid\nneversleep/llama-3.1-lumimaid-8b | paid\nneversleep/noromaid-20b | paid\nnousresearch/deephermes-3-llama-3-8b-preview:free | free\nnousresearch/nous-hermes-llama2-13b | paid\nnousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid\nnousresearch/hermes-3-llama-3.1-405b | paid\nnousresearch/hermes-3-llama-3.1-70b | paid\nnousresearch/hermes-2-pro-llama-3-8b | paid\nnvidia/llama-3.1-nemotron-70b-instruct | paid\nnvidia/llama-3.1-nemotron-70b-instruct:free | free\nopen-r1/olympiccoder-32b:free | free\nopen-r1/olympiccoder-7b:free | free\nopenai/chatgpt-4o-latest | paid\nopenai/gpt-3.5-turbo | paid\nopenai/gpt-3.5-turbo-0613 | paid\nopenai/gpt-3.5-turbo-16k | paid\nopenai/gpt-3.5-turbo-0125 | paid\nopenai/gpt-3.5-turbo-1106 | paid\nopenai/gpt-3.5-turbo-instruct | paid\nopenai/gpt-4 | paid\nopenai/gpt-4-0314 | paid\nopenai/gpt-4-32k | paid\nopenai/gpt-4-32k-0314 | paid\nopenai/gpt-4-turbo | paid\nopenai/gpt-4-1106-preview | paid\nopenai/gpt-4-turbo-preview | paid\nopenai/gpt-4.5-preview | paid\nopenai/gpt-4o | paid\nopenai/gpt-4o-2024-05-13 | paid\nopenai/gpt-4o-2024-08-06 | paid\nopenai/gpt-4o-2024-11-20 | paid\nopenai/gpt-4o:extended | paid\nopenai/gpt-4o-search-preview | paid\nopenai/gpt-4o-mini | paid\nopenai/gpt-4o-mini-2024-07-18 | paid\nopenai/gpt-4o-mini-search-preview | paid\nopenai/o1 | paid\nopenai/o1-mini | paid\nopenai/o1-mini-2024-09-12 | paid\nopenai/o1-preview | paid\nopenai/o1-preview-2024-09-12 | paid\nopenai/o1-pro | paid\nopenai/o3-mini | paid\nopenai/o3-mini-high | paid\nopenchat/openchat-7b | paid\nopenchat/openchat-7b:free | free\nteknium/openhermes-2.5-mistral-7b | paid\nperplexity/llama-3.1-sonar-large-128k-online | paid\nperplexity/llama-3.1-sonar-small-128k-online | paid\nperplexity/r1-1776 | paid\nperplexity/sonar | paid\nperplexity/sonar-deep-research | paid\nperplexity/sonar-pro | paid\nperplexity/sonar-reasoning | paid\nperplexity/sonar-reasoning-pro | paid\npygmalionai/mythalion-13b | paid\nqwen/qwen-2-72b-instruct | paid\nqwen/qwen-2-7b-instruct | paid\nqwen/qwen-2-7b-instruct:free | free\nqwen/qwen-vl-max | paid\nqwen/qwen-vl-plus | paid\nqwen/qwen-max | paid\nqwen/qwen-plus | paid\nqwen/qwen-turbo | paid\nqwen/qwen2.5-32b-instruct | paid\nqwen/qwen2.5-vl-32b-instruct:free | free\nqwen/qwen2.5-vl-3b-instruct:free | free\nqwen/qwen2.5-vl-72b-instruct | paid\nqwen/qwen2.5-vl-72b-instruct:free | free\nqwen/qwen-2.5-vl-72b-instruct | paid\nqwen/qwen-2.5-vl-7b-instruct | paid\nqwen/qwen-2.5-vl-7b-instruct:free | free\nqwen/qwq-32b | paid\nqwen/qwq-32b:free | free\nqwen/qwq-32b-preview | paid\nqwen/qwq-32b-preview:free | free\nqwen/qwen-2.5-72b-instruct | paid\nqwen/qwen-2.5-72b-instruct:free | free\nqwen/qwen-2.5-7b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct:free | free\nfeatherless/qwerky-72b:free | free\nrekaai/reka-flash-3:free | free\nundi95/remm-slerp-l2-13b | paid\nthedrummer/rocinante-12b | paid\nsophosympatheia/rogue-rose-103b-v0.2:free | free\nsao10k/l3-lunaris-8b | paid\nsao10k/l3-euryale-70b | paid\nsao10k/l3.1-70b-hanami-x1 | paid\nsao10k/l3.1-euryale-70b | paid\nsao10k/l3.3-euryale-70b | paid\nraifle/sorcererlm-8x22b | paid\nsteelskull/l3.3-electra-r1-70b | paid\ntokyotech-llm/llama-3.1-swallow-70b-instruct-v0.3 | paid\nthedrummer/anubis-pro-105b-v1 | paid\nthedrummer/skyfall-36b-v2 | paid\nundi95/toppy-m-7b | paid\nundi95/toppy-m-7b:free | free\nscb10x/llama3.1-typhoon2-70b-instruct | paid\nscb10x/llama3.1-typhoon2-8b-instruct | paid\nthedrummer/unslopnemo-12b | paid\nmicrosoft/wizardlm-2-7b | paid\nmicrosoft/wizardlm-2-8x22b | paid\nx-ai/grok-2-1212 | paid\nx-ai/grok-2-vision-1212 | paid\nx-ai/grok-beta | paid\nx-ai/grok-vision-beta | paid\nxwin-lm/xwin-lm-70b | paid\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenAI models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\nbabbage-002\nchatgpt-4o-latest\ndall-e-2\ndall-e-3\ndavinci-002\ngpt-3.5-turbo\ngpt-3.5-turbo-0125\ngpt-3.5-turbo-1106\ngpt-3.5-turbo-16k\ngpt-3.5-turbo-instruct\ngpt-3.5-turbo-instruct-0914\ngpt-4\ngpt-4-0125-preview\ngpt-4-0613\ngpt-4-1106-preview\ngpt-4-turbo\ngpt-4-turbo-2024-04-09\ngpt-4-turbo-preview\ngpt-4.5-preview\ngpt-4.5-preview-2025-02-27\ngpt-4o\ngpt-4o-2024-05-13\ngpt-4o-2024-08-06\ngpt-4o-2024-11-20\ngpt-4o-audio-preview\ngpt-4o-audio-preview-2024-10-01\ngpt-4o-audio-preview-2024-12-17\ngpt-4o-mini\ngpt-4o-mini-2024-07-18\ngpt-4o-mini-audio-preview\ngpt-4o-mini-audio-preview-2024-12-17\ngpt-4o-mini-realtime-preview\ngpt-4o-mini-realtime-preview-2024-12-17\ngpt-4o-mini-search-preview\ngpt-4o-mini-search-preview-2025-03-11\ngpt-4o-mini-transcribe\ngpt-4o-mini-tts\ngpt-4o-realtime-preview\ngpt-4o-realtime-preview-2024-10-01\ngpt-4o-realtime-preview-2024-12-17\ngpt-4o-search-preview\ngpt-4o-search-preview-2025-03-11\ngpt-4o-transcribe\no1\no1-2024-12-17\no1-mini\no1-mini-2024-09-12\no1-preview\no1-preview-2024-09-12\no1-pro\no1-pro-2025-03-19\no3-mini\no3-mini-2025-01-31\nomni-moderation-2024-09-26\nomni-moderation-latest\ntext-embedding-3-large\ntext-embedding-3-small\ntext-embedding-ada-002\ntts-1\ntts-1-1106\ntts-1-hd\ntts-1-hd-1106\nwhisper-1\n-----\n\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m Deepseek models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\ndeepseek-chat\ndeepseek-reasoner\n-----\n" }, "router": { "type": "string", @@ -186,6 +186,10 @@ ], "default": false, "description": "Dry run - only write out parameters without making API calls" + }, + "format": { + "default": null, + "description": "Zod schema for structured outputs" } }, "additionalProperties": true, diff --git a/packages/kbot/schema_ui.json b/packages/kbot/schema_ui.json index 4b25261d..9a1ba94c 100644 --- a/packages/kbot/schema_ui.json +++ b/packages/kbot/schema_ui.json @@ -63,7 +63,7 @@ "ui:title": "Api_key" }, "model": { - "ui:description": "AI model to use for processing. Available models:\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenRouter models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n01-ai/yi-large | paid\naetherwiing/mn-starcannon-12b | paid\nai21/jamba-1-5-large | paid\nai21/jamba-1-5-mini | paid\nai21/jamba-1.6-large | paid\nai21/jamba-instruct | paid\nai21/jamba-1.6-mini | paid\naion-labs/aion-1.0 | paid\naion-labs/aion-1.0-mini | paid\naion-labs/aion-rp-llama-3.1-8b | paid\njondurbin/airoboros-l2-70b | paid\nallenai/olmo-2-0325-32b-instruct | paid\namazon/nova-lite-v1 | paid\namazon/nova-micro-v1 | paid\namazon/nova-pro-v1 | paid\nanthropic/claude-3-haiku | paid\nanthropic/claude-3-haiku:beta | paid\nanthropic/claude-3-opus | paid\nanthropic/claude-3-opus:beta | paid\nanthropic/claude-3-sonnet | paid\nanthropic/claude-3-sonnet:beta | paid\nanthropic/claude-3.5-haiku | paid\nanthropic/claude-3.5-haiku-20241022 | paid\nanthropic/claude-3.5-haiku-20241022:beta | paid\nanthropic/claude-3.5-haiku:beta | paid\nanthropic/claude-3.5-sonnet | paid\nanthropic/claude-3.5-sonnet-20240620 | paid\nanthropic/claude-3.5-sonnet-20240620:beta | paid\nanthropic/claude-3.5-sonnet:beta | paid\nanthropic/claude-3.7-sonnet | paid\nanthropic/claude-3.7-sonnet:beta | paid\nanthropic/claude-3.7-sonnet:thinking | paid\nanthropic/claude-2 | paid\nanthropic/claude-2:beta | paid\nanthropic/claude-2.0 | paid\nanthropic/claude-2.0:beta | paid\nanthropic/claude-2.1 | paid\nanthropic/claude-2.1:beta | paid\nopenrouter/auto | paid\ncohere/command | paid\ncohere/command-a | paid\ncohere/command-r | paid\ncohere/command-r-03-2024 | paid\ncohere/command-r-08-2024 | paid\ncohere/command-r-plus | paid\ncohere/command-r-plus-04-2024 | paid\ncohere/command-r-plus-08-2024 | paid\ncohere/command-r7b-12-2024 | paid\ndeepseek/deepseek-r1-zero:free | free\ndeepseek/deepseek-chat | paid\ndeepseek/deepseek-chat:free | free\ndeepseek/deepseek-r1 | paid\ndeepseek/deepseek-r1:free | free\ndeepseek/deepseek-r1-distill-llama-70b | paid\ndeepseek/deepseek-r1-distill-llama-70b:free | free\ndeepseek/deepseek-r1-distill-llama-8b | paid\ndeepseek/deepseek-r1-distill-qwen-1.5b | paid\ndeepseek/deepseek-r1-distill-qwen-14b | paid\ndeepseek/deepseek-r1-distill-qwen-14b:free | free\ndeepseek/deepseek-r1-distill-qwen-32b | paid\ndeepseek/deepseek-r1-distill-qwen-32b:free | free\ncognitivecomputations/dolphin-mixtral-8x7b | paid\ncognitivecomputations/dolphin-mixtral-8x22b | paid\ncognitivecomputations/dolphin3.0-mistral-24b:free | free\ncognitivecomputations/dolphin3.0-r1-mistral-24b:free | free\neva-unit-01/eva-llama-3.33-70b | paid\neva-unit-01/eva-qwen-2.5-32b | paid\neva-unit-01/eva-qwen-2.5-72b | paid\nsao10k/fimbulvetr-11b-v2 | paid\nalpindale/goliath-120b | paid\ngoogle/gemini-2.0-flash-lite-001 | paid\ngoogle/gemini-2.0-flash-thinking-exp-1219:free | free\ngoogle/gemini-2.0-flash-thinking-exp:free | free\ngoogle/gemini-exp-1206:free | free\ngoogle/gemini-flash-1.5 | paid\ngoogle/gemini-flash-1.5-8b | paid\ngoogle/gemini-flash-1.5-8b-exp | paid\ngoogle/gemini-2.0-flash-001 | paid\ngoogle/gemini-2.0-flash-exp:free | free\ngoogle/gemini-2.0-flash-lite-preview-02-05:free | free\ngoogle/gemini-pro | paid\ngoogle/gemini-pro-1.5 | paid\ngoogle/gemini-2.0-pro-exp-02-05:free | free\ngoogle/gemini-pro-vision | paid\ngoogle/gemma-2-27b-it | paid\ngoogle/gemma-2-9b-it | paid\ngoogle/gemma-2-9b-it:free | free\ngoogle/gemma-3-12b-it:free | free\ngoogle/gemma-3-1b-it:free | free\ngoogle/gemma-3-27b-it | paid\ngoogle/gemma-3-27b-it:free | free\ngoogle/gemma-3-4b-it:free | free\ngoogle/gemma-7b-it | paid\ngoogle/learnlm-1.5-pro-experimental:free | free\ngoogle/palm-2-chat-bison | paid\ngoogle/palm-2-chat-bison-32k | paid\ngoogle/palm-2-codechat-bison | paid\ngoogle/palm-2-codechat-bison-32k | paid\nhuggingfaceh4/zephyr-7b-beta:free | free\ninfermatic/mn-inferor-12b | paid\ninflection/inflection-3-pi | paid\ninflection/inflection-3-productivity | paid\nlatitudegames/wayfarer-large-70b-llama-3.3 | paid\nliquid/lfm-3b | paid\nliquid/lfm-40b | paid\nliquid/lfm-7b | paid\nallenai/llama-3.1-tulu-3-405b | paid\nmeta-llama/llama-guard-3-8b | paid\nalpindale/magnum-72b | paid\nanthracite-org/magnum-v2-72b | paid\nanthracite-org/magnum-v4-72b | paid\nmancer/weaver | paid\nmeta-llama/llama-2-13b-chat | paid\nmeta-llama/llama-2-70b-chat | paid\nmeta-llama/llama-3-70b-instruct | paid\nmeta-llama/llama-3-8b-instruct | paid\nmeta-llama/llama-3-8b-instruct:free | free\nmeta-llama/llama-3.1-405b | paid\nmeta-llama/llama-3.1-405b-instruct | paid\nmeta-llama/llama-3.1-70b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct:free | free\nmeta-llama/llama-3.2-11b-vision-instruct | paid\nmeta-llama/llama-3.2-11b-vision-instruct:free | free\nmeta-llama/llama-3.2-1b-instruct | paid\nmeta-llama/llama-3.2-1b-instruct:free | free\nmeta-llama/llama-3.2-3b-instruct | paid\nmeta-llama/llama-3.2-3b-instruct:free | free\nmeta-llama/llama-3.2-90b-vision-instruct | paid\nmeta-llama/llama-3.3-70b-instruct | paid\nmeta-llama/llama-3.3-70b-instruct:free | free\nmeta-llama/llama-guard-2-8b | paid\nmicrosoft/phi-4 | paid\nmicrosoft/phi-4-multimodal-instruct | paid\nmicrosoft/phi-3-medium-128k-instruct | paid\nmicrosoft/phi-3-medium-128k-instruct:free | free\nmicrosoft/phi-3-mini-128k-instruct | paid\nmicrosoft/phi-3-mini-128k-instruct:free | free\nmicrosoft/phi-3.5-mini-128k-instruct | paid\nsophosympatheia/midnight-rose-70b | paid\nminimax/minimax-01 | paid\nmistralai/mistral-large | paid\nmistralai/mistral-large-2407 | paid\nmistralai/mistral-large-2411 | paid\nmistralai/mistral-medium | paid\nnothingiisreal/mn-celeste-12b | paid\nmistralai/mistral-small | paid\nmistralai/mistral-tiny | paid\nmistralai/codestral-2501 | paid\nmistralai/codestral-mamba | paid\nmistralai/ministral-3b | paid\nmistralai/ministral-8b | paid\nmistralai/mistral-7b-instruct | paid\nmistralai/mistral-7b-instruct:free | free\nmistralai/mistral-7b-instruct-v0.1 | paid\nmistralai/mistral-7b-instruct-v0.2 | paid\nmistralai/mistral-7b-instruct-v0.3 | paid\nmistralai/mistral-nemo | paid\nmistralai/mistral-nemo:free | free\nmistralai/mistral-small-24b-instruct-2501 | paid\nmistralai/mistral-small-24b-instruct-2501:free | free\nmistralai/mistral-small-3.1-24b-instruct | paid\nmistralai/mixtral-8x22b-instruct | paid\nmistralai/mixtral-8x7b | paid\nmistralai/mixtral-8x7b-instruct | paid\nmistralai/pixtral-12b | paid\nmistralai/pixtral-large-2411 | paid\nmistralai/mistral-saba | paid\nmoonshotai/moonlight-16b-a3b-instruct:free | free\ngryphe/mythomax-l2-13b | paid\ngryphe/mythomax-l2-13b:free | free\nneversleep/llama-3-lumimaid-70b | paid\nneversleep/llama-3-lumimaid-8b | paid\nneversleep/llama-3-lumimaid-8b:extended | paid\nneversleep/llama-3.1-lumimaid-70b | paid\nneversleep/llama-3.1-lumimaid-8b | paid\nneversleep/noromaid-20b | paid\nnousresearch/deephermes-3-llama-3-8b-preview:free | free\nnousresearch/nous-hermes-llama2-13b | paid\nnousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid\nnousresearch/hermes-3-llama-3.1-405b | paid\nnousresearch/hermes-3-llama-3.1-70b | paid\nnousresearch/hermes-2-pro-llama-3-8b | paid\nnvidia/llama-3.1-nemotron-70b-instruct | paid\nnvidia/llama-3.1-nemotron-70b-instruct:free | free\nopen-r1/olympiccoder-32b:free | free\nopen-r1/olympiccoder-7b:free | free\nopenai/chatgpt-4o-latest | paid\nopenai/gpt-3.5-turbo | paid\nopenai/gpt-3.5-turbo-0613 | paid\nopenai/gpt-3.5-turbo-16k | paid\nopenai/gpt-3.5-turbo-0125 | paid\nopenai/gpt-3.5-turbo-1106 | paid\nopenai/gpt-3.5-turbo-instruct | paid\nopenai/gpt-4 | paid\nopenai/gpt-4-0314 | paid\nopenai/gpt-4-32k | paid\nopenai/gpt-4-32k-0314 | paid\nopenai/gpt-4-turbo | paid\nopenai/gpt-4-1106-preview | paid\nopenai/gpt-4-turbo-preview | paid\nopenai/gpt-4.5-preview | paid\nopenai/gpt-4o | paid\nopenai/gpt-4o-2024-05-13 | paid\nopenai/gpt-4o-2024-08-06 | paid\nopenai/gpt-4o-2024-11-20 | paid\nopenai/gpt-4o:extended | paid\nopenai/gpt-4o-search-preview | paid\nopenai/gpt-4o-mini | paid\nopenai/gpt-4o-mini-2024-07-18 | paid\nopenai/gpt-4o-mini-search-preview | paid\nopenai/o1 | paid\nopenai/o1-mini | paid\nopenai/o1-mini-2024-09-12 | paid\nopenai/o1-preview | paid\nopenai/o1-preview-2024-09-12 | paid\nopenai/o3-mini | paid\nopenai/o3-mini-high | paid\nopenchat/openchat-7b | paid\nopenchat/openchat-7b:free | free\nteknium/openhermes-2.5-mistral-7b | paid\nperplexity/llama-3.1-sonar-large-128k-chat | paid\nperplexity/llama-3.1-sonar-large-128k-online | paid\nperplexity/llama-3.1-sonar-small-128k-chat | paid\nperplexity/llama-3.1-sonar-small-128k-online | paid\nperplexity/r1-1776 | paid\nperplexity/sonar | paid\nperplexity/sonar-deep-research | paid\nperplexity/sonar-pro | paid\nperplexity/sonar-reasoning | paid\nperplexity/sonar-reasoning-pro | paid\npygmalionai/mythalion-13b | paid\nqwen/qwen-2-72b-instruct | paid\nqwen/qwen-2-7b-instruct | paid\nqwen/qwen-2-7b-instruct:free | free\nqwen/qwen-vl-max | paid\nqwen/qwen-vl-plus | paid\nqwen/qwen-max | paid\nqwen/qwen-plus | paid\nqwen/qwen-turbo | paid\nqwen/qwen2.5-32b-instruct | paid\nqwen/qwen2.5-vl-72b-instruct | paid\nqwen/qwen2.5-vl-72b-instruct:free | free\nqwen/qwen-2.5-vl-72b-instruct | paid\nqwen/qwen-2.5-vl-7b-instruct | paid\nqwen/qwq-32b | paid\nqwen/qwq-32b:free | free\nqwen/qwq-32b-preview | paid\nqwen/qwq-32b-preview:free | free\nqwen/qwen-2.5-72b-instruct | paid\nqwen/qwen-2.5-72b-instruct:free | free\nqwen/qwen-2.5-7b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct:free | free\nrekaai/reka-flash-3:free | free\nundi95/remm-slerp-l2-13b | paid\nthedrummer/rocinante-12b | paid\nsophosympatheia/rogue-rose-103b-v0.2:free | free\nsao10k/l3-lunaris-8b | paid\nsao10k/l3-euryale-70b | paid\nsao10k/l3.1-70b-hanami-x1 | paid\nsao10k/l3.1-euryale-70b | paid\nsao10k/l3.3-euryale-70b | paid\nraifle/sorcererlm-8x22b | paid\nsteelskull/l3.3-electra-r1-70b | paid\ntokyotech-llm/llama-3.1-swallow-70b-instruct-v0.3 | paid\nthedrummer/anubis-pro-105b-v1 | paid\nthedrummer/skyfall-36b-v2 | paid\nundi95/toppy-m-7b | paid\nundi95/toppy-m-7b:free | free\nthedrummer/unslopnemo-12b | paid\nmicrosoft/wizardlm-2-7b | paid\nmicrosoft/wizardlm-2-8x22b | paid\nx-ai/grok-2-1212 | paid\nx-ai/grok-2-vision-1212 | paid\nx-ai/grok-beta | paid\nx-ai/grok-vision-beta | paid\nxwin-lm/xwin-lm-70b | paid\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenAI models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\nbabbage-002\nchatgpt-4o-latest\ndall-e-2\ndall-e-3\ndavinci-002\ngpt-3.5-turbo\ngpt-3.5-turbo-0125\ngpt-3.5-turbo-1106\ngpt-3.5-turbo-16k\ngpt-3.5-turbo-instruct\ngpt-3.5-turbo-instruct-0914\ngpt-4\ngpt-4-0125-preview\ngpt-4-0613\ngpt-4-1106-preview\ngpt-4-turbo\ngpt-4-turbo-2024-04-09\ngpt-4-turbo-preview\ngpt-4.5-preview\ngpt-4.5-preview-2025-02-27\ngpt-4o\ngpt-4o-2024-05-13\ngpt-4o-2024-08-06\ngpt-4o-2024-11-20\ngpt-4o-audio-preview\ngpt-4o-audio-preview-2024-10-01\ngpt-4o-audio-preview-2024-12-17\ngpt-4o-mini\ngpt-4o-mini-2024-07-18\ngpt-4o-mini-audio-preview\ngpt-4o-mini-audio-preview-2024-12-17\ngpt-4o-mini-realtime-preview\ngpt-4o-mini-realtime-preview-2024-12-17\ngpt-4o-mini-search-preview\ngpt-4o-mini-search-preview-2025-03-11\ngpt-4o-realtime-preview\ngpt-4o-realtime-preview-2024-10-01\ngpt-4o-realtime-preview-2024-12-17\ngpt-4o-search-preview\ngpt-4o-search-preview-2025-03-11\no1\no1-2024-12-17\no1-mini\no1-mini-2024-09-12\no1-preview\no1-preview-2024-09-12\no3-mini\no3-mini-2025-01-31\nomni-moderation-2024-09-26\nomni-moderation-latest\ntext-embedding-3-large\ntext-embedding-3-small\ntext-embedding-ada-002\ntts-1\ntts-1-1106\ntts-1-hd\ntts-1-hd-1106\nwhisper-1\n-----\n\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m Deepseek models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\ndeepseek-chat\ndeepseek-reasoner\n-----\n", + "ui:description": "AI model to use for processing. Available models:\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenRouter models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n01-ai/yi-large | paid\naetherwiing/mn-starcannon-12b | paid\nai21/jamba-1-5-large | paid\nai21/jamba-1-5-mini | paid\nai21/jamba-1.6-large | paid\nai21/jamba-instruct | paid\nai21/jamba-1.6-mini | paid\naion-labs/aion-1.0 | paid\naion-labs/aion-1.0-mini | paid\naion-labs/aion-rp-llama-3.1-8b | paid\njondurbin/airoboros-l2-70b | paid\nallenai/molmo-7b-d:free | free\nallenai/olmo-2-0325-32b-instruct | paid\namazon/nova-lite-v1 | paid\namazon/nova-micro-v1 | paid\namazon/nova-pro-v1 | paid\nanthropic/claude-3-haiku | paid\nanthropic/claude-3-haiku:beta | paid\nanthropic/claude-3-opus | paid\nanthropic/claude-3-opus:beta | paid\nanthropic/claude-3-sonnet | paid\nanthropic/claude-3-sonnet:beta | paid\nanthropic/claude-3.5-haiku | paid\nanthropic/claude-3.5-haiku-20241022 | paid\nanthropic/claude-3.5-haiku-20241022:beta | paid\nanthropic/claude-3.5-haiku:beta | paid\nanthropic/claude-3.5-sonnet | paid\nanthropic/claude-3.5-sonnet-20240620 | paid\nanthropic/claude-3.5-sonnet-20240620:beta | paid\nanthropic/claude-3.5-sonnet:beta | paid\nanthropic/claude-3.7-sonnet | paid\nanthropic/claude-3.7-sonnet:beta | paid\nanthropic/claude-3.7-sonnet:thinking | paid\nanthropic/claude-2 | paid\nanthropic/claude-2:beta | paid\nanthropic/claude-2.0 | paid\nanthropic/claude-2.0:beta | paid\nanthropic/claude-2.1 | paid\nanthropic/claude-2.1:beta | paid\nopenrouter/auto | paid\nbytedance-research/ui-tars-72b:free | free\ncohere/command | paid\ncohere/command-a | paid\ncohere/command-r | paid\ncohere/command-r-03-2024 | paid\ncohere/command-r-08-2024 | paid\ncohere/command-r-plus | paid\ncohere/command-r-plus-04-2024 | paid\ncohere/command-r-plus-08-2024 | paid\ncohere/command-r7b-12-2024 | paid\ndeepseek/deepseek-r1-zero:free | free\ndeepseek/deepseek-chat | paid\ndeepseek/deepseek-chat:free | free\ndeepseek/deepseek-chat-v3-0324 | paid\ndeepseek/deepseek-chat-v3-0324:free | free\ndeepseek/deepseek-v3-base:free | free\ndeepseek/deepseek-r1 | paid\ndeepseek/deepseek-r1:free | free\ndeepseek/deepseek-r1-distill-llama-70b | paid\ndeepseek/deepseek-r1-distill-llama-70b:free | free\ndeepseek/deepseek-r1-distill-llama-8b | paid\ndeepseek/deepseek-r1-distill-qwen-1.5b | paid\ndeepseek/deepseek-r1-distill-qwen-14b | paid\ndeepseek/deepseek-r1-distill-qwen-14b:free | free\ndeepseek/deepseek-r1-distill-qwen-32b | paid\ndeepseek/deepseek-r1-distill-qwen-32b:free | free\ncognitivecomputations/dolphin-mixtral-8x7b | paid\ncognitivecomputations/dolphin-mixtral-8x22b | paid\ncognitivecomputations/dolphin3.0-mistral-24b:free | free\ncognitivecomputations/dolphin3.0-r1-mistral-24b:free | free\neva-unit-01/eva-llama-3.33-70b | paid\neva-unit-01/eva-qwen-2.5-32b | paid\neva-unit-01/eva-qwen-2.5-72b | paid\nsao10k/fimbulvetr-11b-v2 | paid\nalpindale/goliath-120b | paid\ngoogle/gemini-2.0-flash-lite-001 | paid\ngoogle/gemini-2.0-flash-thinking-exp-1219:free | free\ngoogle/gemini-2.0-flash-thinking-exp:free | free\ngoogle/gemini-flash-1.5 | paid\ngoogle/gemini-flash-1.5-8b | paid\ngoogle/gemini-flash-1.5-8b-exp | paid\ngoogle/gemini-2.0-flash-001 | paid\ngoogle/gemini-2.0-flash-exp:free | free\ngoogle/gemini-pro | paid\ngoogle/gemini-pro-1.5 | paid\ngoogle/gemini-2.0-pro-exp-02-05:free | free\ngoogle/gemini-2.5-pro-exp-03-25:free | free\ngoogle/gemini-pro-vision | paid\ngoogle/gemma-2-27b-it | paid\ngoogle/gemma-2-9b-it | paid\ngoogle/gemma-2-9b-it:free | free\ngoogle/gemma-3-12b-it | paid\ngoogle/gemma-3-12b-it:free | free\ngoogle/gemma-3-1b-it:free | free\ngoogle/gemma-3-27b-it | paid\ngoogle/gemma-3-27b-it:free | free\ngoogle/gemma-3-4b-it | paid\ngoogle/gemma-3-4b-it:free | free\ngoogle/gemma-7b-it | paid\ngoogle/learnlm-1.5-pro-experimental:free | free\ngoogle/palm-2-chat-bison | paid\ngoogle/palm-2-chat-bison-32k | paid\ngoogle/palm-2-codechat-bison | paid\ngoogle/palm-2-codechat-bison-32k | paid\nhuggingfaceh4/zephyr-7b-beta:free | free\ninfermatic/mn-inferor-12b | paid\ninflection/inflection-3-pi | paid\ninflection/inflection-3-productivity | paid\nlatitudegames/wayfarer-large-70b-llama-3.3 | paid\nliquid/lfm-3b | paid\nliquid/lfm-40b | paid\nliquid/lfm-7b | paid\nmeta-llama/llama-guard-3-8b | paid\nalpindale/magnum-72b | paid\nanthracite-org/magnum-v2-72b | paid\nanthracite-org/magnum-v4-72b | paid\nmancer/weaver | paid\nmeta-llama/llama-2-13b-chat | paid\nmeta-llama/llama-2-70b-chat | paid\nmeta-llama/llama-3-70b-instruct | paid\nmeta-llama/llama-3-8b-instruct | paid\nmeta-llama/llama-3-8b-instruct:free | free\nmeta-llama/llama-3.1-405b | paid\nmeta-llama/llama-3.1-405b-instruct | paid\nmeta-llama/llama-3.1-70b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct:free | free\nmeta-llama/llama-3.2-11b-vision-instruct | paid\nmeta-llama/llama-3.2-11b-vision-instruct:free | free\nmeta-llama/llama-3.2-1b-instruct | paid\nmeta-llama/llama-3.2-1b-instruct:free | free\nmeta-llama/llama-3.2-3b-instruct | paid\nmeta-llama/llama-3.2-3b-instruct:free | free\nmeta-llama/llama-3.2-90b-vision-instruct | paid\nmeta-llama/llama-3.3-70b-instruct | paid\nmeta-llama/llama-3.3-70b-instruct:free | free\nmeta-llama/llama-guard-2-8b | paid\nmicrosoft/phi-4 | paid\nmicrosoft/phi-4-multimodal-instruct | paid\nmicrosoft/phi-3-medium-128k-instruct | paid\nmicrosoft/phi-3-medium-128k-instruct:free | free\nmicrosoft/phi-3-mini-128k-instruct | paid\nmicrosoft/phi-3-mini-128k-instruct:free | free\nmicrosoft/phi-3.5-mini-128k-instruct | paid\nsophosympatheia/midnight-rose-70b | paid\nminimax/minimax-01 | paid\nmistralai/mistral-large | paid\nmistralai/mistral-large-2407 | paid\nmistralai/mistral-large-2411 | paid\nmistralai/mistral-medium | paid\nnothingiisreal/mn-celeste-12b | paid\nmistralai/mistral-small | paid\nmistralai/mistral-tiny | paid\nmistralai/codestral-2501 | paid\nmistralai/codestral-mamba | paid\nmistralai/ministral-3b | paid\nmistral/ministral-8b | paid\nmistralai/ministral-8b | paid\nmistralai/mistral-7b-instruct | paid\nmistralai/mistral-7b-instruct:free | free\nmistralai/mistral-7b-instruct-v0.1 | paid\nmistralai/mistral-7b-instruct-v0.2 | paid\nmistralai/mistral-7b-instruct-v0.3 | paid\nmistralai/mistral-nemo | paid\nmistralai/mistral-nemo:free | free\nmistralai/mistral-small-24b-instruct-2501 | paid\nmistralai/mistral-small-24b-instruct-2501:free | free\nmistralai/mistral-small-3.1-24b-instruct | paid\nmistralai/mistral-small-3.1-24b-instruct:free | free\nmistralai/mixtral-8x22b-instruct | paid\nmistralai/mixtral-8x7b | paid\nmistralai/mixtral-8x7b-instruct | paid\nmistralai/pixtral-12b | paid\nmistralai/pixtral-large-2411 | paid\nmistralai/mistral-saba | paid\nmoonshotai/moonlight-16b-a3b-instruct:free | free\ngryphe/mythomax-l2-13b | paid\ngryphe/mythomax-l2-13b:free | free\nneversleep/llama-3-lumimaid-70b | paid\nneversleep/llama-3-lumimaid-8b | paid\nneversleep/llama-3-lumimaid-8b:extended | paid\nneversleep/llama-3.1-lumimaid-70b | paid\nneversleep/llama-3.1-lumimaid-8b | paid\nneversleep/noromaid-20b | paid\nnousresearch/deephermes-3-llama-3-8b-preview:free | free\nnousresearch/nous-hermes-llama2-13b | paid\nnousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid\nnousresearch/hermes-3-llama-3.1-405b | paid\nnousresearch/hermes-3-llama-3.1-70b | paid\nnousresearch/hermes-2-pro-llama-3-8b | paid\nnvidia/llama-3.1-nemotron-70b-instruct | paid\nnvidia/llama-3.1-nemotron-70b-instruct:free | free\nopen-r1/olympiccoder-32b:free | free\nopen-r1/olympiccoder-7b:free | free\nopenai/chatgpt-4o-latest | paid\nopenai/gpt-3.5-turbo | paid\nopenai/gpt-3.5-turbo-0613 | paid\nopenai/gpt-3.5-turbo-16k | paid\nopenai/gpt-3.5-turbo-0125 | paid\nopenai/gpt-3.5-turbo-1106 | paid\nopenai/gpt-3.5-turbo-instruct | paid\nopenai/gpt-4 | paid\nopenai/gpt-4-0314 | paid\nopenai/gpt-4-32k | paid\nopenai/gpt-4-32k-0314 | paid\nopenai/gpt-4-turbo | paid\nopenai/gpt-4-1106-preview | paid\nopenai/gpt-4-turbo-preview | paid\nopenai/gpt-4.5-preview | paid\nopenai/gpt-4o | paid\nopenai/gpt-4o-2024-05-13 | paid\nopenai/gpt-4o-2024-08-06 | paid\nopenai/gpt-4o-2024-11-20 | paid\nopenai/gpt-4o:extended | paid\nopenai/gpt-4o-search-preview | paid\nopenai/gpt-4o-mini | paid\nopenai/gpt-4o-mini-2024-07-18 | paid\nopenai/gpt-4o-mini-search-preview | paid\nopenai/o1 | paid\nopenai/o1-mini | paid\nopenai/o1-mini-2024-09-12 | paid\nopenai/o1-preview | paid\nopenai/o1-preview-2024-09-12 | paid\nopenai/o1-pro | paid\nopenai/o3-mini | paid\nopenai/o3-mini-high | paid\nopenchat/openchat-7b | paid\nopenchat/openchat-7b:free | free\nteknium/openhermes-2.5-mistral-7b | paid\nperplexity/llama-3.1-sonar-large-128k-online | paid\nperplexity/llama-3.1-sonar-small-128k-online | paid\nperplexity/r1-1776 | paid\nperplexity/sonar | paid\nperplexity/sonar-deep-research | paid\nperplexity/sonar-pro | paid\nperplexity/sonar-reasoning | paid\nperplexity/sonar-reasoning-pro | paid\npygmalionai/mythalion-13b | paid\nqwen/qwen-2-72b-instruct | paid\nqwen/qwen-2-7b-instruct | paid\nqwen/qwen-2-7b-instruct:free | free\nqwen/qwen-vl-max | paid\nqwen/qwen-vl-plus | paid\nqwen/qwen-max | paid\nqwen/qwen-plus | paid\nqwen/qwen-turbo | paid\nqwen/qwen2.5-32b-instruct | paid\nqwen/qwen2.5-vl-32b-instruct:free | free\nqwen/qwen2.5-vl-3b-instruct:free | free\nqwen/qwen2.5-vl-72b-instruct | paid\nqwen/qwen2.5-vl-72b-instruct:free | free\nqwen/qwen-2.5-vl-72b-instruct | paid\nqwen/qwen-2.5-vl-7b-instruct | paid\nqwen/qwen-2.5-vl-7b-instruct:free | free\nqwen/qwq-32b | paid\nqwen/qwq-32b:free | free\nqwen/qwq-32b-preview | paid\nqwen/qwq-32b-preview:free | free\nqwen/qwen-2.5-72b-instruct | paid\nqwen/qwen-2.5-72b-instruct:free | free\nqwen/qwen-2.5-7b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct:free | free\nfeatherless/qwerky-72b:free | free\nrekaai/reka-flash-3:free | free\nundi95/remm-slerp-l2-13b | paid\nthedrummer/rocinante-12b | paid\nsophosympatheia/rogue-rose-103b-v0.2:free | free\nsao10k/l3-lunaris-8b | paid\nsao10k/l3-euryale-70b | paid\nsao10k/l3.1-70b-hanami-x1 | paid\nsao10k/l3.1-euryale-70b | paid\nsao10k/l3.3-euryale-70b | paid\nraifle/sorcererlm-8x22b | paid\nsteelskull/l3.3-electra-r1-70b | paid\ntokyotech-llm/llama-3.1-swallow-70b-instruct-v0.3 | paid\nthedrummer/anubis-pro-105b-v1 | paid\nthedrummer/skyfall-36b-v2 | paid\nundi95/toppy-m-7b | paid\nundi95/toppy-m-7b:free | free\nscb10x/llama3.1-typhoon2-70b-instruct | paid\nscb10x/llama3.1-typhoon2-8b-instruct | paid\nthedrummer/unslopnemo-12b | paid\nmicrosoft/wizardlm-2-7b | paid\nmicrosoft/wizardlm-2-8x22b | paid\nx-ai/grok-2-1212 | paid\nx-ai/grok-2-vision-1212 | paid\nx-ai/grok-beta | paid\nx-ai/grok-vision-beta | paid\nxwin-lm/xwin-lm-70b | paid\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenAI models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\nbabbage-002\nchatgpt-4o-latest\ndall-e-2\ndall-e-3\ndavinci-002\ngpt-3.5-turbo\ngpt-3.5-turbo-0125\ngpt-3.5-turbo-1106\ngpt-3.5-turbo-16k\ngpt-3.5-turbo-instruct\ngpt-3.5-turbo-instruct-0914\ngpt-4\ngpt-4-0125-preview\ngpt-4-0613\ngpt-4-1106-preview\ngpt-4-turbo\ngpt-4-turbo-2024-04-09\ngpt-4-turbo-preview\ngpt-4.5-preview\ngpt-4.5-preview-2025-02-27\ngpt-4o\ngpt-4o-2024-05-13\ngpt-4o-2024-08-06\ngpt-4o-2024-11-20\ngpt-4o-audio-preview\ngpt-4o-audio-preview-2024-10-01\ngpt-4o-audio-preview-2024-12-17\ngpt-4o-mini\ngpt-4o-mini-2024-07-18\ngpt-4o-mini-audio-preview\ngpt-4o-mini-audio-preview-2024-12-17\ngpt-4o-mini-realtime-preview\ngpt-4o-mini-realtime-preview-2024-12-17\ngpt-4o-mini-search-preview\ngpt-4o-mini-search-preview-2025-03-11\ngpt-4o-mini-transcribe\ngpt-4o-mini-tts\ngpt-4o-realtime-preview\ngpt-4o-realtime-preview-2024-10-01\ngpt-4o-realtime-preview-2024-12-17\ngpt-4o-search-preview\ngpt-4o-search-preview-2025-03-11\ngpt-4o-transcribe\no1\no1-2024-12-17\no1-mini\no1-mini-2024-09-12\no1-preview\no1-preview-2024-09-12\no1-pro\no1-pro-2025-03-19\no3-mini\no3-mini-2025-01-31\nomni-moderation-2024-09-26\nomni-moderation-latest\ntext-embedding-3-large\ntext-embedding-3-small\ntext-embedding-ada-002\ntts-1\ntts-1-1106\ntts-1-hd\ntts-1-hd-1106\nwhisper-1\n-----\n\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m Deepseek models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\ndeepseek-chat\ndeepseek-reasoner\n-----\n", "ui:title": "Model" }, "router": { @@ -125,5 +125,10 @@ "ui:description": "Dry run - only write out parameters without making API calls", "ui:title": "Dry", "ui:placeholder": false + }, + "format": { + "ui:description": "Zod schema for structured outputs", + "ui:title": "Format", + "ui:placeholder": null } } \ No newline at end of file diff --git a/packages/kbot/src/commands/build.ts b/packages/kbot/src/commands/build.ts index b5c24253..5a1ce463 100644 --- a/packages/kbot/src/commands/build.ts +++ b/packages/kbot/src/commands/build.ts @@ -1,4 +1,5 @@ import path from 'node:path' +import { fileURLToPath } from 'node:url' import { CONFIG_DEFAULT } from '@polymech/commons' import { sync as read } from '@polymech/fs/read' @@ -9,6 +10,47 @@ import { logger } from '../index.js' import { fetchOpenAIModels } from '../models/openai.js' import { fetchOpenRouterModels } from '../models/openrouter.js' +const __dirname = path.dirname(fileURLToPath(import.meta.url)) + +const generateModelEnum = (models: any[], provider: string) => { + const enumName = `E_${provider.toUpperCase()}_MODEL` + const enumContent = `export enum ${enumName} { +${models.map(model => ` MODEL_${model.id.replace(/[^a-zA-Z0-9]/g, '_').toUpperCase()} = "${model.id}"`).join(',\n')} +}` + return enumContent +} + +const generateModelEnumFree = (models: any[], provider: string) => { + const enumName = `E_${provider.toUpperCase()}_MODEL_FREE` + const freeModels = models.filter(model => { + if (!model.pricing) return false + const pricing = model.pricing + return ( + (pricing.prompt === 0 || pricing.prompt === "0") && + (pricing.completion === 0 || pricing.completion === "0") && + (pricing.image === 0 || pricing.image === "0") + ) + }) + const enumContent = `export enum ${enumName} { +${freeModels.map(model => ` MODEL_FREE_${model.id.replace(/[^a-zA-Z0-9]/g, '_').toUpperCase()} = "${model.id}"`).join(',\n')} +}` + return enumContent +} + +const generateModelEnumTools = (models: any[], provider: string) => { + const enumName = `E_${provider.toUpperCase()}_MODEL` + const toolModels = models.filter(model => { + if (!model.top_provider) return false + return model.top_provider.supports_functions === true || + model.top_provider.supports_function_calling === true || + model.top_provider.supports_tools === true + }) + const enumContent = `export enum ${enumName} { +${toolModels.map(model => ` MODEL_TOOLS_${model.id.replace(/[^a-zA-Z0-9]/g, '_').toUpperCase()} = "${model.id}"`).join(',\n')} +}` + return enumContent +} + export const build = async () => { const examplesSrc = path.resolve(__dirname, '../docs_/examples.md') if(exists(examplesSrc)) { @@ -29,9 +71,27 @@ export const build = async () => { name: model.name, pricing: model.pricing, context: model.context, - created: model.created + created: model.created, + top_provider: model.top_provider } }) + // Generate model enums + const openAIEnumPath = path.resolve(__dirname, '../../src/models/cache/openai-models.ts') + const openRouterEnumPath = path.resolve(__dirname, '../../src/models/cache/openrouter-models.ts') + const openAIEnumFreePath = path.resolve(__dirname, '../../src/models/cache/openai-models-free.ts') + const openRouterEnumFreePath = path.resolve(__dirname, '../../src/models/cache/openrouter-models-free.ts') + const openAIEnumToolsPath = path.resolve(__dirname, '../../src/models/cache/openai-models-tools.ts') + const openRouterEnumToolsPath = path.resolve(__dirname, '../../src/models/cache/openrouter-models-tools.ts') + + write(openAIEnumPath, generateModelEnum(modelsOpenAI, 'OpenAI')) + write(openRouterEnumPath, generateModelEnum(modelsOpenRouter, 'OpenRouter')) + write(openAIEnumFreePath, generateModelEnumFree(modelsOpenAI, 'OpenAI')) + write(openRouterEnumFreePath, generateModelEnumFree(modelsOpenRouter, 'OpenRouter')) + write(openAIEnumToolsPath, generateModelEnumTools(modelsOpenAI, 'OpenAI')) + write(openRouterEnumToolsPath, generateModelEnumTools(modelsOpenRouter, 'OpenRouter')) + logger.info('Model enums generated') + + // Write model data const modelsOpenAIPath = path.resolve(__dirname, '../src/models/cache/openai.ts') write(modelsOpenAIPath,`export const models = ${JSON.stringify(modelsOpenAI)}`) diff --git a/packages/kbot/src/commands/run-tools.ts b/packages/kbot/src/commands/run-tools.ts index e7df6bb2..c2e90f06 100644 --- a/packages/kbot/src/commands/run-tools.ts +++ b/packages/kbot/src/commands/run-tools.ts @@ -2,7 +2,7 @@ import OpenAI from 'openai' import { ChatCompletionMessage, ChatCompletionMessageParam } from 'openai/resources/index.mjs' import { ChatCompletionToolRunnerParams } from 'openai/lib/ChatCompletionRunner' -import { IKBotTask } from '@polymech/ai-tools' +import { IKBotTask, ICollector } from '@polymech/ai-tools' import { content } from '../utils/content.js' import { logger } from '../index.js' @@ -37,16 +37,16 @@ export const runTools = async (client: OpenAI, params: any, options: IKBotTask) try { runner = await client.beta.chat.completions.runTools(params as ChatCompletionToolRunnerParams) .on('message', (message: ChatCompletionMessageParam) => { - options.collector.onMessage(logMessage(message, sessionId, options.prompt)) + (options.collector as ICollector).onMessage(logMessage(message, sessionId, options.prompt)) }) .on('functionCall', (tool: ChatCompletionMessage.FunctionCall) => { - return options.collector.onToolCall(logMessage(tool, sessionId, options.prompt)) + return (options.collector as ICollector).onToolCall(logMessage(tool, sessionId, options.prompt)) }) .on('functionCallResult', (a) => { - options.collector.onFunctionCallResult(a) + (options.collector as ICollector).onFunctionCallResult(a) }) - .on('chatCompletion', options.collector.onChatCompletion) - .on('content', options.collector.onContent) + .on('chatCompletion', (options.collector as ICollector).onChatCompletion) + .on('content', (options.collector as ICollector).onContent) } catch (e) { logger.trace(e) } diff --git a/packages/kbot/src/index.ts b/packages/kbot/src/index.ts index e0be0bac..91e03d61 100644 --- a/packages/kbot/src/index.ts +++ b/packages/kbot/src/index.ts @@ -37,4 +37,9 @@ export const assistant_supported: Record = { export * from './types.js' export * from './zod_types.js' export * from './zod_schema.js' + +export { E_OPENAI_MODEL } from './models/cache/openai-models.js' +export { E_OPENROUTER_MODEL } from './models/cache/openrouter-models.js' +export { E_OPENROUTER_MODEL_FREE } from './models/cache/openrouter-models-free.js' + export { IKBotTask } from '@polymech/ai-tools' \ No newline at end of file diff --git a/packages/kbot/src/models/cache/openai-models-free.ts b/packages/kbot/src/models/cache/openai-models-free.ts new file mode 100644 index 00000000..7b9873a7 --- /dev/null +++ b/packages/kbot/src/models/cache/openai-models-free.ts @@ -0,0 +1,3 @@ +export enum E_OPENAI_MODEL_FREE { + +} \ No newline at end of file diff --git a/packages/kbot/src/models/cache/openai-models-tools.ts b/packages/kbot/src/models/cache/openai-models-tools.ts new file mode 100644 index 00000000..e57afaba --- /dev/null +++ b/packages/kbot/src/models/cache/openai-models-tools.ts @@ -0,0 +1,3 @@ +export enum E_OPENAI_MODEL { + +} \ No newline at end of file diff --git a/packages/kbot/src/models/cache/openai-models.ts b/packages/kbot/src/models/cache/openai-models.ts new file mode 100644 index 00000000..1815c454 --- /dev/null +++ b/packages/kbot/src/models/cache/openai-models.ts @@ -0,0 +1,65 @@ +export enum E_OPENAI_MODEL { + MODEL_GPT_4O_REALTIME_PREVIEW_2024_12_17 = "gpt-4o-realtime-preview-2024-12-17", + MODEL_GPT_4O_AUDIO_PREVIEW_2024_12_17 = "gpt-4o-audio-preview-2024-12-17", + MODEL_DALL_E_3 = "dall-e-3", + MODEL_DALL_E_2 = "dall-e-2", + MODEL_GPT_4O_AUDIO_PREVIEW_2024_10_01 = "gpt-4o-audio-preview-2024-10-01", + MODEL_O3_MINI = "o3-mini", + MODEL_O3_MINI_2025_01_31 = "o3-mini-2025-01-31", + MODEL_GPT_4O_MINI_REALTIME_PREVIEW_2024_12_17 = "gpt-4o-mini-realtime-preview-2024-12-17", + MODEL_GPT_4O_MINI_REALTIME_PREVIEW = "gpt-4o-mini-realtime-preview", + MODEL_GPT_4O_REALTIME_PREVIEW_2024_10_01 = "gpt-4o-realtime-preview-2024-10-01", + MODEL_GPT_4O_TRANSCRIBE = "gpt-4o-transcribe", + MODEL_GPT_4O_MINI_TRANSCRIBE = "gpt-4o-mini-transcribe", + MODEL_GPT_4O_REALTIME_PREVIEW = "gpt-4o-realtime-preview", + MODEL_BABBAGE_002 = "babbage-002", + MODEL_GPT_4O_MINI_TTS = "gpt-4o-mini-tts", + MODEL_TTS_1_HD_1106 = "tts-1-hd-1106", + MODEL_TEXT_EMBEDDING_3_LARGE = "text-embedding-3-large", + MODEL_GPT_4 = "gpt-4", + MODEL_TEXT_EMBEDDING_ADA_002 = "text-embedding-ada-002", + MODEL_TTS_1_HD = "tts-1-hd", + MODEL_GPT_4O_MINI_AUDIO_PREVIEW = "gpt-4o-mini-audio-preview", + MODEL_GPT_4O_AUDIO_PREVIEW = "gpt-4o-audio-preview", + MODEL_O1_PREVIEW_2024_09_12 = "o1-preview-2024-09-12", + MODEL_GPT_3_5_TURBO_INSTRUCT_0914 = "gpt-3.5-turbo-instruct-0914", + MODEL_GPT_4O_MINI_SEARCH_PREVIEW = "gpt-4o-mini-search-preview", + MODEL_TTS_1_1106 = "tts-1-1106", + MODEL_DAVINCI_002 = "davinci-002", + MODEL_GPT_3_5_TURBO_1106 = "gpt-3.5-turbo-1106", + MODEL_GPT_4_TURBO = "gpt-4-turbo", + MODEL_GPT_3_5_TURBO_INSTRUCT = "gpt-3.5-turbo-instruct", + MODEL_GPT_3_5_TURBO = "gpt-3.5-turbo", + MODEL_CHATGPT_4O_LATEST = "chatgpt-4o-latest", + MODEL_GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = "gpt-4o-mini-search-preview-2025-03-11", + MODEL_GPT_4O_2024_11_20 = "gpt-4o-2024-11-20", + MODEL_WHISPER_1 = "whisper-1", + MODEL_GPT_3_5_TURBO_0125 = "gpt-3.5-turbo-0125", + MODEL_GPT_4O_2024_05_13 = "gpt-4o-2024-05-13", + MODEL_GPT_3_5_TURBO_16K = "gpt-3.5-turbo-16k", + MODEL_GPT_4_TURBO_2024_04_09 = "gpt-4-turbo-2024-04-09", + MODEL_GPT_4_1106_PREVIEW = "gpt-4-1106-preview", + MODEL_O1_PREVIEW = "o1-preview", + MODEL_GPT_4_0613 = "gpt-4-0613", + MODEL_GPT_4O_SEARCH_PREVIEW = "gpt-4o-search-preview", + MODEL_GPT_4_5_PREVIEW = "gpt-4.5-preview", + MODEL_GPT_4_5_PREVIEW_2025_02_27 = "gpt-4.5-preview-2025-02-27", + MODEL_GPT_4O_SEARCH_PREVIEW_2025_03_11 = "gpt-4o-search-preview-2025-03-11", + MODEL_OMNI_MODERATION_LATEST = "omni-moderation-latest", + MODEL_TTS_1 = "tts-1", + MODEL_OMNI_MODERATION_2024_09_26 = "omni-moderation-2024-09-26", + MODEL_TEXT_EMBEDDING_3_SMALL = "text-embedding-3-small", + MODEL_GPT_4O = "gpt-4o", + MODEL_GPT_4O_MINI = "gpt-4o-mini", + MODEL_GPT_4O_2024_08_06 = "gpt-4o-2024-08-06", + MODEL_GPT_4O_MINI_2024_07_18 = "gpt-4o-mini-2024-07-18", + MODEL_GPT_4_TURBO_PREVIEW = "gpt-4-turbo-preview", + MODEL_O1_MINI = "o1-mini", + MODEL_GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = "gpt-4o-mini-audio-preview-2024-12-17", + MODEL_O1_MINI_2024_09_12 = "o1-mini-2024-09-12", + MODEL_GPT_4_0125_PREVIEW = "gpt-4-0125-preview", + MODEL_O1 = "o1", + MODEL_O1_2024_12_17 = "o1-2024-12-17", + MODEL_O1_PRO = "o1-pro", + MODEL_O1_PRO_2025_03_19 = "o1-pro-2025-03-19" +} \ No newline at end of file diff --git a/packages/kbot/src/models/cache/openai.ts b/packages/kbot/src/models/cache/openai.ts index c5c854aa..ee8754e6 100644 --- a/packages/kbot/src/models/cache/openai.ts +++ b/packages/kbot/src/models/cache/openai.ts @@ -1 +1 @@ -export const models = [{"id":"gpt-4o-audio-preview-2024-10-01","object":"model","created":1727389042,"owned_by":"system"},{"id":"gpt-4o-realtime-preview","object":"model","created":1727659998,"owned_by":"system"},{"id":"gpt-4o-realtime-preview-2024-10-01","object":"model","created":1727131766,"owned_by":"system"},{"id":"dall-e-2","object":"model","created":1698798177,"owned_by":"system"},{"id":"gpt-4o-2024-08-06","object":"model","created":1722814719,"owned_by":"system"},{"id":"gpt-4-turbo","object":"model","created":1712361441,"owned_by":"system"},{"id":"gpt-4-1106-preview","object":"model","created":1698957206,"owned_by":"system"},{"id":"gpt-4o","object":"model","created":1715367049,"owned_by":"system"},{"id":"gpt-3.5-turbo","object":"model","created":1677610602,"owned_by":"openai"},{"id":"gpt-3.5-turbo-0125","object":"model","created":1706048358,"owned_by":"system"},{"id":"gpt-3.5-turbo-instruct","object":"model","created":1692901427,"owned_by":"system"},{"id":"gpt-4-1106-vision-preview","object":"model","created":1711473033,"owned_by":"system"},{"id":"babbage-002","object":"model","created":1692634615,"owned_by":"system"},{"id":"whisper-1","object":"model","created":1677532384,"owned_by":"openai-internal"},{"id":"dall-e-3","object":"model","created":1698785189,"owned_by":"system"},{"id":"text-embedding-3-small","object":"model","created":1705948997,"owned_by":"system"},{"id":"gpt-3.5-turbo-16k","object":"model","created":1683758102,"owned_by":"openai-internal"},{"id":"gpt-4-0125-preview","object":"model","created":1706037612,"owned_by":"system"},{"id":"gpt-4-turbo-preview","object":"model","created":1706037777,"owned_by":"system"},{"id":"chatgpt-4o-latest","object":"model","created":1723515131,"owned_by":"system"},{"id":"omni-moderation-latest","object":"model","created":1731689265,"owned_by":"system"},{"id":"gpt-4o-2024-05-13","object":"model","created":1715368132,"owned_by":"system"},{"id":"o1-preview-2024-09-12","object":"model","created":1725648865,"owned_by":"system"},{"id":"omni-moderation-2024-09-26","object":"model","created":1732734466,"owned_by":"system"},{"id":"tts-1-hd-1106","object":"model","created":1699053533,"owned_by":"system"},{"id":"o1-preview","object":"model","created":1725648897,"owned_by":"system"},{"id":"gpt-4","object":"model","created":1687882411,"owned_by":"openai"},{"id":"gpt-4-0613","object":"model","created":1686588896,"owned_by":"openai"},{"id":"tts-1-hd","object":"model","created":1699046015,"owned_by":"system"},{"id":"gpt-4-vision-preview","object":"model","created":1698894917,"owned_by":"system"},{"id":"text-embedding-ada-002","object":"model","created":1671217299,"owned_by":"openai-internal"},{"id":"gpt-3.5-turbo-1106","object":"model","created":1698959748,"owned_by":"system"},{"id":"gpt-4o-audio-preview","object":"model","created":1727460443,"owned_by":"system"},{"id":"tts-1","object":"model","created":1681940951,"owned_by":"openai-internal"},{"id":"tts-1-1106","object":"model","created":1699053241,"owned_by":"system"},{"id":"gpt-3.5-turbo-instruct-0914","object":"model","created":1694122472,"owned_by":"system"},{"id":"davinci-002","object":"model","created":1692634301,"owned_by":"system"},{"id":"text-embedding-3-large","object":"model","created":1705953180,"owned_by":"system"},{"id":"gpt-4o-realtime-preview-2024-12-17","object":"model","created":1733945430,"owned_by":"system"},{"id":"gpt-4o-mini-realtime-preview","object":"model","created":1734387380,"owned_by":"system"},{"id":"gpt-4o-mini-realtime-preview-2024-12-17","object":"model","created":1734112601,"owned_by":"system"},{"id":"o1-mini","object":"model","created":1725649008,"owned_by":"system"},{"id":"gpt-4o-2024-11-20","object":"model","created":1731975040,"owned_by":"system"},{"id":"o1-mini-2024-09-12","object":"model","created":1725648979,"owned_by":"system"},{"id":"gpt-4o-mini-2024-07-18","object":"model","created":1721172717,"owned_by":"system"},{"id":"gpt-4o-mini","object":"model","created":1721172741,"owned_by":"system"},{"id":"gpt-4o-audio-preview-2024-12-17","object":"model","created":1734034239,"owned_by":"system"},{"id":"gpt-4o-mini-audio-preview","object":"model","created":1734387424,"owned_by":"system"},{"id":"gpt-4o-mini-audio-preview-2024-12-17","object":"model","created":1734115920,"owned_by":"system"},{"id":"gpt-4-turbo-2024-04-09","object":"model","created":1712601677,"owned_by":"system"}] \ No newline at end of file +export const models = [{"id":"gpt-4o-audio-preview-2024-10-01","object":"model","created":1727389042,"owned_by":"system"},{"id":"gpt-4o-realtime-preview","object":"model","created":1727659998,"owned_by":"system"},{"id":"gpt-4o-realtime-preview-2024-10-01","object":"model","created":1727131766,"owned_by":"system"},{"id":"dall-e-2","object":"model","created":1698798177,"owned_by":"system"},{"id":"gpt-4o-2024-08-06","object":"model","created":1722814719,"owned_by":"system"},{"id":"gpt-4-turbo","object":"model","created":1712361441,"owned_by":"system"},{"id":"gpt-4-1106-preview","object":"model","created":1698957206,"owned_by":"system"},{"id":"gpt-4o","object":"model","created":1715367049,"owned_by":"system"},{"id":"gpt-3.5-turbo","object":"model","created":1677610602,"owned_by":"openai"},{"id":"gpt-3.5-turbo-0125","object":"model","created":1706048358,"owned_by":"system"},{"id":"gpt-3.5-turbo-instruct","object":"model","created":1692901427,"owned_by":"system"},{"id":"gpt-4-1106-vision-preview","object":"model","created":1711473033,"owned_by":"system"},{"id":"babbage-002","object":"model","created":1692634615,"owned_by":"system"},{"id":"whisper-1","object":"model","created":1677532384,"owned_by":"openai-internal"},{"id":"dall-e-3","object":"model","created":1698785189,"owned_by":"system"},{"id":"text-embedding-3-small","object":"model","created":1705948997,"owned_by":"system"},{"id":"gpt-3.5-turbo-16k","object":"model","created":1683758102,"owned_by":"openai-internal"},{"id":"gpt-4-0125-preview","object":"model","created":1706037612,"owned_by":"system"},{"id":"gpt-4-turbo-preview","object":"model","created":1706037777,"owned_by":"system"},{"id":"chatgpt-4o-latest","object":"model","created":1723515131,"owned_by":"system"},{"id":"omni-moderation-latest","object":"model","created":1731689265,"owned_by":"system"},{"id":"gpt-4o-2024-05-13","object":"model","created":1715368132,"owned_by":"system"},{"id":"o1-preview-2024-09-12","object":"model","created":1725648865,"owned_by":"system"},{"id":"omni-moderation-2024-09-26","object":"model","created":1732734466,"owned_by":"system"},{"id":"tts-1-hd-1106","object":"model","created":1699053533,"owned_by":"system"},{"id":"o1-preview","object":"model","created":1725648897,"owned_by":"system"},{"id":"gpt-4","object":"model","created":1687882411,"owned_by":"openai"},{"id":"gpt-4-0613","object":"model","created":1686588896,"owned_by":"openai"},{"id":"tts-1-hd","object":"model","created":1699046015,"owned_by":"system"},{"id":"gpt-4-vision-preview","object":"model","created":1698894917,"owned_by":"system"},{"id":"text-embedding-ada-002","object":"model","created":1671217299,"owned_by":"openai-internal"},{"id":"gpt-3.5-turbo-1106","object":"model","created":1698959748,"owned_by":"system"},{"id":"gpt-4o-audio-preview","object":"model","created":1727460443,"owned_by":"system"},{"id":"tts-1","object":"model","created":1681940951,"owned_by":"openai-internal"},{"id":"tts-1-1106","object":"model","created":1699053241,"owned_by":"system"},{"id":"gpt-3.5-turbo-instruct-0914","object":"model","created":1694122472,"owned_by":"system"},{"id":"davinci-002","object":"model","created":1692634301,"owned_by":"system"},{"id":"text-embedding-3-large","object":"model","created":1705953180,"owned_by":"system"},{"id":"gpt-4o-realtime-preview-2024-12-17","object":"model","created":1733945430,"owned_by":"system"},{"id":"gpt-4o-mini-realtime-preview","object":"model","created":1734387380,"owned_by":"system"},{"id":"gpt-4o-mini-realtime-preview-2024-12-17","object":"model","created":1734112601,"owned_by":"system"},{"id":"o1-mini","object":"model","created":1725649008,"owned_by":"system"},{"id":"gpt-4o-2024-11-20","object":"model","created":1731975040,"owned_by":"system"},{"id":"o1-mini-2024-09-12","object":"model","created":1725648979,"owned_by":"system"},{"id":"gpt-4o-mini-2024-07-18","object":"model","created":1721172717,"owned_by":"system"},{"id":"gpt-4o-mini","object":"model","created":1721172741,"owned_by":"system"},{"id":"gpt-4o-audio-preview-2024-12-17","object":"model","created":1734034239,"owned_by":"system"},{"id":"gpt-4o-mini-audio-preview","object":"model","created":1734387424,"owned_by":"system"},{"id":"gpt-4o-mini-audio-preview-2024-12-17","object":"model","created":1734115920,"owned_by":"system"},{"id":"gpt-4-turbo-2024-04-09","object":"model","created":1712601677,"owned_by":""}] \ No newline at end of file diff --git a/packages/kbot/src/models/cache/openrouter-models-free.ts b/packages/kbot/src/models/cache/openrouter-models-free.ts new file mode 100644 index 00000000..4efe7726 --- /dev/null +++ b/packages/kbot/src/models/cache/openrouter-models-free.ts @@ -0,0 +1,59 @@ +export enum E_OPENROUTER_MODEL_FREE { + MODEL_FREE_DEEPSEEK_DEEPSEEK_V3_BASE_FREE = "deepseek/deepseek-v3-base:free", + MODEL_FREE_ALLENAI_MOLMO_7B_D_FREE = "allenai/molmo-7b-d:free", + MODEL_FREE_BYTEDANCE_RESEARCH_UI_TARS_72B_FREE = "bytedance-research/ui-tars-72b:free", + MODEL_FREE_QWEN_QWEN2_5_VL_3B_INSTRUCT_FREE = "qwen/qwen2.5-vl-3b-instruct:free", + MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25_FREE = "google/gemini-2.5-pro-exp-03-25:free", + MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free", + MODEL_FREE_FEATHERLESS_QWERKY_72B_FREE = "featherless/qwerky-72b:free", + MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free", + MODEL_FREE_OPEN_R1_OLYMPICCODER_7B_FREE = "open-r1/olympiccoder-7b:free", + MODEL_FREE_OPEN_R1_OLYMPICCODER_32B_FREE = "open-r1/olympiccoder-32b:free", + MODEL_FREE_GOOGLE_GEMMA_3_1B_IT_FREE = "google/gemma-3-1b-it:free", + MODEL_FREE_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free", + MODEL_FREE_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free", + MODEL_FREE_REKAAI_REKA_FLASH_3_FREE = "rekaai/reka-flash-3:free", + MODEL_FREE_GOOGLE_GEMMA_3_27B_IT_FREE = "google/gemma-3-27b-it:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_ZERO_FREE = "deepseek/deepseek-r1-zero:free", + MODEL_FREE_QWEN_QWQ_32B_FREE = "qwen/qwq-32b:free", + MODEL_FREE_MOONSHOTAI_MOONLIGHT_16B_A3B_INSTRUCT_FREE = "moonshotai/moonlight-16b-a3b-instruct:free", + MODEL_FREE_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE = "nousresearch/deephermes-3-llama-3-8b-preview:free", + MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free", + MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-mistral-24b:free", + MODEL_FREE_GOOGLE_GEMINI_2_0_PRO_EXP_02_05_FREE = "google/gemini-2.0-pro-exp-02-05:free", + MODEL_FREE_QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE = "qwen/qwen2.5-vl-72b-instruct:free", + MODEL_FREE_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE = "mistralai/mistral-small-24b-instruct-2501:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B_FREE = "deepseek/deepseek-r1-distill-qwen-32b:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE = "deepseek/deepseek-r1-distill-qwen-14b:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B_FREE = "deepseek/deepseek-r1-distill-llama-70b:free", + MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_FREE = "google/gemini-2.0-flash-thinking-exp:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_FREE = "deepseek/deepseek-r1:free", + MODEL_FREE_SOPHOSYMPATHEIA_ROGUE_ROSE_103B_V0_2_FREE = "sophosympatheia/rogue-rose-103b-v0.2:free", + MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_FREE = "deepseek/deepseek-chat:free", + MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_1219_FREE = "google/gemini-2.0-flash-thinking-exp-1219:free", + MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE = "google/gemini-2.0-flash-exp:free", + MODEL_FREE_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE = "meta-llama/llama-3.3-70b-instruct:free", + MODEL_FREE_QWEN_QWQ_32B_PREVIEW_FREE = "qwen/qwq-32b-preview:free", + MODEL_FREE_GOOGLE_LEARNLM_1_5_PRO_EXPERIMENTAL_FREE = "google/learnlm-1.5-pro-experimental:free", + MODEL_FREE_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free", + MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT_FREE = "nvidia/llama-3.1-nemotron-70b-instruct:free", + MODEL_FREE_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free", + MODEL_FREE_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE = "meta-llama/llama-3.2-1b-instruct:free", + MODEL_FREE_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free", + MODEL_FREE_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free", + MODEL_FREE_QWEN_QWEN_2_5_VL_7B_INSTRUCT_FREE = "qwen/qwen-2.5-vl-7b-instruct:free", + MODEL_FREE_GOOGLE_GEMINI_FLASH_1_5_8B_EXP = "google/gemini-flash-1.5-8b-exp", + MODEL_FREE_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE = "meta-llama/llama-3.1-8b-instruct:free", + MODEL_FREE_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free", + MODEL_FREE_QWEN_QWEN_2_7B_INSTRUCT_FREE = "qwen/qwen-2-7b-instruct:free", + MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free", + MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free", + MODEL_FREE_MICROSOFT_PHI_3_MINI_128K_INSTRUCT_FREE = "microsoft/phi-3-mini-128k-instruct:free", + MODEL_FREE_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT_FREE = "microsoft/phi-3-medium-128k-instruct:free", + MODEL_FREE_META_LLAMA_LLAMA_3_8B_INSTRUCT_FREE = "meta-llama/llama-3-8b-instruct:free", + MODEL_FREE_OPENCHAT_OPENCHAT_7B_FREE = "openchat/openchat-7b:free", + MODEL_FREE_UNDI95_TOPPY_M_7B_FREE = "undi95/toppy-m-7b:free", + MODEL_FREE_HUGGINGFACEH4_ZEPHYR_7B_BETA_FREE = "huggingfaceh4/zephyr-7b-beta:free", + MODEL_FREE_GRYPHE_MYTHOMAX_L2_13B_FREE = "gryphe/mythomax-l2-13b:free" +} \ No newline at end of file diff --git a/packages/kbot/src/models/cache/openrouter-models-tools.ts b/packages/kbot/src/models/cache/openrouter-models-tools.ts new file mode 100644 index 00000000..c97786e4 --- /dev/null +++ b/packages/kbot/src/models/cache/openrouter-models-tools.ts @@ -0,0 +1,3 @@ +export enum E_OPENROUTER_MODEL { + +} \ No newline at end of file diff --git a/packages/kbot/src/models/cache/openrouter-models.ts b/packages/kbot/src/models/cache/openrouter-models.ts new file mode 100644 index 00000000..93354059 --- /dev/null +++ b/packages/kbot/src/models/cache/openrouter-models.ts @@ -0,0 +1,292 @@ +export enum E_OPENROUTER_MODEL { + MODEL_MISTRAL_MINISTRAL_8B = "mistral/ministral-8b", + MODEL_DEEPSEEK_DEEPSEEK_V3_BASE_FREE = "deepseek/deepseek-v3-base:free", + MODEL_SCB10X_LLAMA3_1_TYPHOON2_8B_INSTRUCT = "scb10x/llama3.1-typhoon2-8b-instruct", + MODEL_SCB10X_LLAMA3_1_TYPHOON2_70B_INSTRUCT = "scb10x/llama3.1-typhoon2-70b-instruct", + MODEL_ALLENAI_MOLMO_7B_D_FREE = "allenai/molmo-7b-d:free", + MODEL_BYTEDANCE_RESEARCH_UI_TARS_72B_FREE = "bytedance-research/ui-tars-72b:free", + MODEL_QWEN_QWEN2_5_VL_3B_INSTRUCT_FREE = "qwen/qwen2.5-vl-3b-instruct:free", + MODEL_GOOGLE_GEMINI_2_5_PRO_EXP_03_25_FREE = "google/gemini-2.5-pro-exp-03-25:free", + MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free", + MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free", + MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324 = "deepseek/deepseek-chat-v3-0324", + MODEL_FEATHERLESS_QWERKY_72B_FREE = "featherless/qwerky-72b:free", + MODEL_OPENAI_O1_PRO = "openai/o1-pro", + MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free", + MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT = "mistralai/mistral-small-3.1-24b-instruct", + MODEL_OPEN_R1_OLYMPICCODER_7B_FREE = "open-r1/olympiccoder-7b:free", + MODEL_OPEN_R1_OLYMPICCODER_32B_FREE = "open-r1/olympiccoder-32b:free", + MODEL_STEELSKULL_L3_3_ELECTRA_R1_70B = "steelskull/l3.3-electra-r1-70b", + MODEL_ALLENAI_OLMO_2_0325_32B_INSTRUCT = "allenai/olmo-2-0325-32b-instruct", + MODEL_GOOGLE_GEMMA_3_1B_IT_FREE = "google/gemma-3-1b-it:free", + MODEL_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free", + MODEL_GOOGLE_GEMMA_3_4B_IT = "google/gemma-3-4b-it", + MODEL_AI21_JAMBA_1_6_LARGE = "ai21/jamba-1.6-large", + MODEL_AI21_JAMBA_1_6_MINI = "ai21/jamba-1.6-mini", + MODEL_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free", + MODEL_GOOGLE_GEMMA_3_12B_IT = "google/gemma-3-12b-it", + MODEL_COHERE_COMMAND_A = "cohere/command-a", + MODEL_OPENAI_GPT_4O_MINI_SEARCH_PREVIEW = "openai/gpt-4o-mini-search-preview", + MODEL_OPENAI_GPT_4O_SEARCH_PREVIEW = "openai/gpt-4o-search-preview", + MODEL_TOKYOTECH_LLM_LLAMA_3_1_SWALLOW_70B_INSTRUCT_V0_3 = "tokyotech-llm/llama-3.1-swallow-70b-instruct-v0.3", + MODEL_REKAAI_REKA_FLASH_3_FREE = "rekaai/reka-flash-3:free", + MODEL_GOOGLE_GEMMA_3_27B_IT_FREE = "google/gemma-3-27b-it:free", + MODEL_GOOGLE_GEMMA_3_27B_IT = "google/gemma-3-27b-it", + MODEL_THEDRUMMER_ANUBIS_PRO_105B_V1 = "thedrummer/anubis-pro-105b-v1", + MODEL_LATITUDEGAMES_WAYFARER_LARGE_70B_LLAMA_3_3 = "latitudegames/wayfarer-large-70b-llama-3.3", + MODEL_THEDRUMMER_SKYFALL_36B_V2 = "thedrummer/skyfall-36b-v2", + MODEL_MICROSOFT_PHI_4_MULTIMODAL_INSTRUCT = "microsoft/phi-4-multimodal-instruct", + MODEL_PERPLEXITY_SONAR_REASONING_PRO = "perplexity/sonar-reasoning-pro", + MODEL_PERPLEXITY_SONAR_PRO = "perplexity/sonar-pro", + MODEL_PERPLEXITY_SONAR_DEEP_RESEARCH = "perplexity/sonar-deep-research", + MODEL_DEEPSEEK_DEEPSEEK_R1_ZERO_FREE = "deepseek/deepseek-r1-zero:free", + MODEL_QWEN_QWQ_32B_FREE = "qwen/qwq-32b:free", + MODEL_QWEN_QWQ_32B = "qwen/qwq-32b", + MODEL_QWEN_QWEN2_5_32B_INSTRUCT = "qwen/qwen2.5-32b-instruct", + MODEL_MOONSHOTAI_MOONLIGHT_16B_A3B_INSTRUCT_FREE = "moonshotai/moonlight-16b-a3b-instruct:free", + MODEL_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE = "nousresearch/deephermes-3-llama-3-8b-preview:free", + MODEL_OPENAI_GPT_4_5_PREVIEW = "openai/gpt-4.5-preview", + MODEL_GOOGLE_GEMINI_2_0_FLASH_LITE_001 = "google/gemini-2.0-flash-lite-001", + MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_BETA = "anthropic/claude-3.7-sonnet:beta", + MODEL_ANTHROPIC_CLAUDE_3_7_SONNET = "anthropic/claude-3.7-sonnet", + MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_THINKING = "anthropic/claude-3.7-sonnet:thinking", + MODEL_PERPLEXITY_R1_1776 = "perplexity/r1-1776", + MODEL_MISTRALAI_MISTRAL_SABA = "mistralai/mistral-saba", + MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free", + MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-mistral-24b:free", + MODEL_META_LLAMA_LLAMA_GUARD_3_8B = "meta-llama/llama-guard-3-8b", + MODEL_OPENAI_O3_MINI_HIGH = "openai/o3-mini-high", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_8B = "deepseek/deepseek-r1-distill-llama-8b", + MODEL_GOOGLE_GEMINI_2_0_FLASH_001 = "google/gemini-2.0-flash-001", + MODEL_GOOGLE_GEMINI_2_0_PRO_EXP_02_05_FREE = "google/gemini-2.0-pro-exp-02-05:free", + MODEL_QWEN_QWEN_VL_PLUS = "qwen/qwen-vl-plus", + MODEL_AION_LABS_AION_1_0 = "aion-labs/aion-1.0", + MODEL_AION_LABS_AION_1_0_MINI = "aion-labs/aion-1.0-mini", + MODEL_AION_LABS_AION_RP_LLAMA_3_1_8B = "aion-labs/aion-rp-llama-3.1-8b", + MODEL_QWEN_QWEN_VL_MAX = "qwen/qwen-vl-max", + MODEL_QWEN_QWEN_TURBO = "qwen/qwen-turbo", + MODEL_QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE = "qwen/qwen2.5-vl-72b-instruct:free", + MODEL_QWEN_QWEN2_5_VL_72B_INSTRUCT = "qwen/qwen2.5-vl-72b-instruct", + MODEL_QWEN_QWEN_PLUS = "qwen/qwen-plus", + MODEL_QWEN_QWEN_MAX = "qwen/qwen-max", + MODEL_OPENAI_O3_MINI = "openai/o3-mini", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_1_5B = "deepseek/deepseek-r1-distill-qwen-1.5b", + MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE = "mistralai/mistral-small-24b-instruct-2501:free", + MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501 = "mistralai/mistral-small-24b-instruct-2501", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B_FREE = "deepseek/deepseek-r1-distill-qwen-32b:free", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B = "deepseek/deepseek-r1-distill-qwen-32b", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE = "deepseek/deepseek-r1-distill-qwen-14b:free", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B = "deepseek/deepseek-r1-distill-qwen-14b", + MODEL_PERPLEXITY_SONAR_REASONING = "perplexity/sonar-reasoning", + MODEL_PERPLEXITY_SONAR = "perplexity/sonar", + MODEL_LIQUID_LFM_7B = "liquid/lfm-7b", + MODEL_LIQUID_LFM_3B = "liquid/lfm-3b", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B_FREE = "deepseek/deepseek-r1-distill-llama-70b:free", + MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B = "deepseek/deepseek-r1-distill-llama-70b", + MODEL_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_FREE = "google/gemini-2.0-flash-thinking-exp:free", + MODEL_DEEPSEEK_DEEPSEEK_R1_FREE = "deepseek/deepseek-r1:free", + MODEL_DEEPSEEK_DEEPSEEK_R1 = "deepseek/deepseek-r1", + MODEL_SOPHOSYMPATHEIA_ROGUE_ROSE_103B_V0_2_FREE = "sophosympatheia/rogue-rose-103b-v0.2:free", + MODEL_MINIMAX_MINIMAX_01 = "minimax/minimax-01", + MODEL_MISTRALAI_CODESTRAL_2501 = "mistralai/codestral-2501", + MODEL_MICROSOFT_PHI_4 = "microsoft/phi-4", + MODEL_SAO10K_L3_1_70B_HANAMI_X1 = "sao10k/l3.1-70b-hanami-x1", + MODEL_DEEPSEEK_DEEPSEEK_CHAT_FREE = "deepseek/deepseek-chat:free", + MODEL_DEEPSEEK_DEEPSEEK_CHAT = "deepseek/deepseek-chat", + MODEL_GOOGLE_GEMINI_2_0_FLASH_THINKING_EXP_1219_FREE = "google/gemini-2.0-flash-thinking-exp-1219:free", + MODEL_SAO10K_L3_3_EURYALE_70B = "sao10k/l3.3-euryale-70b", + MODEL_OPENAI_O1 = "openai/o1", + MODEL_EVA_UNIT_01_EVA_LLAMA_3_33_70B = "eva-unit-01/eva-llama-3.33-70b", + MODEL_X_AI_GROK_2_VISION_1212 = "x-ai/grok-2-vision-1212", + MODEL_X_AI_GROK_2_1212 = "x-ai/grok-2-1212", + MODEL_COHERE_COMMAND_R7B_12_2024 = "cohere/command-r7b-12-2024", + MODEL_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE = "google/gemini-2.0-flash-exp:free", + MODEL_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE = "meta-llama/llama-3.3-70b-instruct:free", + MODEL_META_LLAMA_LLAMA_3_3_70B_INSTRUCT = "meta-llama/llama-3.3-70b-instruct", + MODEL_AMAZON_NOVA_LITE_V1 = "amazon/nova-lite-v1", + MODEL_AMAZON_NOVA_MICRO_V1 = "amazon/nova-micro-v1", + MODEL_AMAZON_NOVA_PRO_V1 = "amazon/nova-pro-v1", + MODEL_QWEN_QWQ_32B_PREVIEW_FREE = "qwen/qwq-32b-preview:free", + MODEL_QWEN_QWQ_32B_PREVIEW = "qwen/qwq-32b-preview", + MODEL_GOOGLE_LEARNLM_1_5_PRO_EXPERIMENTAL_FREE = "google/learnlm-1.5-pro-experimental:free", + MODEL_EVA_UNIT_01_EVA_QWEN_2_5_72B = "eva-unit-01/eva-qwen-2.5-72b", + MODEL_OPENAI_GPT_4O_2024_11_20 = "openai/gpt-4o-2024-11-20", + MODEL_MISTRALAI_MISTRAL_LARGE_2411 = "mistralai/mistral-large-2411", + MODEL_MISTRALAI_MISTRAL_LARGE_2407 = "mistralai/mistral-large-2407", + MODEL_MISTRALAI_PIXTRAL_LARGE_2411 = "mistralai/pixtral-large-2411", + MODEL_X_AI_GROK_VISION_BETA = "x-ai/grok-vision-beta", + MODEL_INFERMATIC_MN_INFEROR_12B = "infermatic/mn-inferor-12b", + MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free", + MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT = "qwen/qwen-2.5-coder-32b-instruct", + MODEL_RAIFLE_SORCERERLM_8X22B = "raifle/sorcererlm-8x22b", + MODEL_EVA_UNIT_01_EVA_QWEN_2_5_32B = "eva-unit-01/eva-qwen-2.5-32b", + MODEL_THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_BETA = "anthropic/claude-3.5-haiku:beta", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022_BETA = "anthropic/claude-3.5-haiku-20241022:beta", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022", + MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_70B = "neversleep/llama-3.1-lumimaid-70b", + MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b", + MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_BETA = "anthropic/claude-3.5-sonnet:beta", + MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet", + MODEL_X_AI_GROK_BETA = "x-ai/grok-beta", + MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b", + MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b", + MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct", + MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT_FREE = "nvidia/llama-3.1-nemotron-70b-instruct:free", + MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct", + MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity", + MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi", + MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b", + MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b", + MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b", + MODEL_LIQUID_LFM_40B = "liquid/lfm-40b", + MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free", + MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct", + MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE = "meta-llama/llama-3.2-1b-instruct:free", + MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct", + MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct", + MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free", + MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct", + MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free", + MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct", + MODEL_QWEN_QWEN_2_5_VL_72B_INSTRUCT = "qwen/qwen-2.5-vl-72b-instruct", + MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b", + MODEL_OPENAI_O1_PREVIEW = "openai/o1-preview", + MODEL_OPENAI_O1_PREVIEW_2024_09_12 = "openai/o1-preview-2024-09-12", + MODEL_OPENAI_O1_MINI = "openai/o1-mini", + MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12", + MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b", + MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024", + MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024", + MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT_FREE = "qwen/qwen-2.5-vl-7b-instruct:free", + MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct", + MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b", + MODEL_GOOGLE_GEMINI_FLASH_1_5_8B_EXP = "google/gemini-flash-1.5-8b-exp", + MODEL_AI21_JAMBA_1_5_MINI = "ai21/jamba-1-5-mini", + MODEL_AI21_JAMBA_1_5_LARGE = "ai21/jamba-1-5-large", + MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct", + MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B = "nousresearch/hermes-3-llama-3.1-70b", + MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B = "nousresearch/hermes-3-llama-3.1-405b", + MODEL_OPENAI_CHATGPT_4O_LATEST = "openai/chatgpt-4o-latest", + MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b", + MODEL_AETHERWIING_MN_STARCANNON_12B = "aetherwiing/mn-starcannon-12b", + MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06", + MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b", + MODEL_NOTHINGIISREAL_MN_CELESTE_12B = "nothingiisreal/mn-celeste-12b", + MODEL_PERPLEXITY_LLAMA_3_1_SONAR_SMALL_128K_ONLINE = "perplexity/llama-3.1-sonar-small-128k-online", + MODEL_PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE = "perplexity/llama-3.1-sonar-large-128k-online", + MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE = "meta-llama/llama-3.1-8b-instruct:free", + MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct", + MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct", + MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct", + MODEL_MISTRALAI_CODESTRAL_MAMBA = "mistralai/codestral-mamba", + MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free", + MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo", + MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini", + MODEL_OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18", + MODEL_QWEN_QWEN_2_7B_INSTRUCT_FREE = "qwen/qwen-2-7b-instruct:free", + MODEL_QWEN_QWEN_2_7B_INSTRUCT = "qwen/qwen-2-7b-instruct", + MODEL_GOOGLE_GEMMA_2_27B_IT = "google/gemma-2-27b-it", + MODEL_ALPINDALE_MAGNUM_72B = "alpindale/magnum-72b", + MODEL_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free", + MODEL_GOOGLE_GEMMA_2_9B_IT = "google/gemma-2-9b-it", + MODEL_01_AI_YI_LARGE = "01-ai/yi-large", + MODEL_AI21_JAMBA_INSTRUCT = "ai21/jamba-instruct", + MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620_BETA = "anthropic/claude-3.5-sonnet-20240620:beta", + MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620 = "anthropic/claude-3.5-sonnet-20240620", + MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b", + MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b", + MODEL_QWEN_QWEN_2_72B_INSTRUCT = "qwen/qwen-2-72b-instruct", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3", + MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b", + MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT_FREE = "microsoft/phi-3-mini-128k-instruct:free", + MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct", + MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT_FREE = "microsoft/phi-3-medium-128k-instruct:free", + MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct", + MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b", + MODEL_GOOGLE_GEMINI_FLASH_1_5 = "google/gemini-flash-1.5", + MODEL_OPENAI_GPT_4O = "openai/gpt-4o", + MODEL_OPENAI_GPT_4O_EXTENDED = "openai/gpt-4o:extended", + MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13", + MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b", + MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_8B_EXTENDED = "neversleep/llama-3-lumimaid-8b:extended", + MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_8B = "neversleep/llama-3-lumimaid-8b", + MODEL_SAO10K_FIMBULVETR_11B_V2 = "sao10k/fimbulvetr-11b-v2", + MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT_FREE = "meta-llama/llama-3-8b-instruct:free", + MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct", + MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT = "meta-llama/llama-3-70b-instruct", + MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT = "mistralai/mixtral-8x22b-instruct", + MODEL_MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b", + MODEL_MICROSOFT_WIZARDLM_2_7B = "microsoft/wizardlm-2-7b", + MODEL_GOOGLE_GEMINI_PRO_1_5 = "google/gemini-pro-1.5", + MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo", + MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus", + MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024", + MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b", + MODEL_COHERE_COMMAND = "cohere/command", + MODEL_COHERE_COMMAND_R = "cohere/command-r", + MODEL_ANTHROPIC_CLAUDE_3_HAIKU_BETA = "anthropic/claude-3-haiku:beta", + MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku", + MODEL_ANTHROPIC_CLAUDE_3_OPUS_BETA = "anthropic/claude-3-opus:beta", + MODEL_ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus", + MODEL_ANTHROPIC_CLAUDE_3_SONNET_BETA = "anthropic/claude-3-sonnet:beta", + MODEL_ANTHROPIC_CLAUDE_3_SONNET = "anthropic/claude-3-sonnet", + MODEL_COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024", + MODEL_MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large", + MODEL_GOOGLE_GEMMA_7B_IT = "google/gemma-7b-it", + MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613", + MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview", + MODEL_NOUSRESEARCH_NOUS_HERMES_2_MIXTRAL_8X7B_DPO = "nousresearch/nous-hermes-2-mixtral-8x7b-dpo", + MODEL_MISTRALAI_MISTRAL_MEDIUM = "mistralai/mistral-medium", + MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small", + MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_2 = "mistralai/mistral-7b-instruct-v0.2", + MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X7B = "cognitivecomputations/dolphin-mixtral-8x7b", + MODEL_GOOGLE_GEMINI_PRO_VISION = "google/gemini-pro-vision", + MODEL_GOOGLE_GEMINI_PRO = "google/gemini-pro", + MODEL_MISTRALAI_MIXTRAL_8X7B = "mistralai/mixtral-8x7b", + MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct", + MODEL_OPENCHAT_OPENCHAT_7B_FREE = "openchat/openchat-7b:free", + MODEL_OPENCHAT_OPENCHAT_7B = "openchat/openchat-7b", + MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b", + MODEL_ANTHROPIC_CLAUDE_2_1_BETA = "anthropic/claude-2.1:beta", + MODEL_ANTHROPIC_CLAUDE_2_1 = "anthropic/claude-2.1", + MODEL_ANTHROPIC_CLAUDE_2_BETA = "anthropic/claude-2:beta", + MODEL_ANTHROPIC_CLAUDE_2 = "anthropic/claude-2", + MODEL_TEKNIUM_OPENHERMES_2_5_MISTRAL_7B = "teknium/openhermes-2.5-mistral-7b", + MODEL_UNDI95_TOPPY_M_7B_FREE = "undi95/toppy-m-7b:free", + MODEL_UNDI95_TOPPY_M_7B = "undi95/toppy-m-7b", + MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b", + MODEL_OPENROUTER_AUTO = "openrouter/auto", + MODEL_OPENAI_GPT_3_5_TURBO_1106 = "openai/gpt-3.5-turbo-1106", + MODEL_OPENAI_GPT_4_1106_PREVIEW = "openai/gpt-4-1106-preview", + MODEL_GOOGLE_PALM_2_CHAT_BISON_32K = "google/palm-2-chat-bison-32k", + MODEL_GOOGLE_PALM_2_CODECHAT_BISON_32K = "google/palm-2-codechat-bison-32k", + MODEL_JONDURBIN_AIROBOROS_L2_70B = "jondurbin/airoboros-l2-70b", + MODEL_XWIN_LM_XWIN_LM_70B = "xwin-lm/xwin-lm-70b", + MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1", + MODEL_PYGMALIONAI_MYTHALION_13B = "pygmalionai/mythalion-13b", + MODEL_OPENAI_GPT_3_5_TURBO_16K = "openai/gpt-3.5-turbo-16k", + MODEL_OPENAI_GPT_4_32K = "openai/gpt-4-32k", + MODEL_OPENAI_GPT_4_32K_0314 = "openai/gpt-4-32k-0314", + MODEL_NOUSRESEARCH_NOUS_HERMES_LLAMA2_13B = "nousresearch/nous-hermes-llama2-13b", + MODEL_MANCER_WEAVER = "mancer/weaver", + MODEL_HUGGINGFACEH4_ZEPHYR_7B_BETA_FREE = "huggingfaceh4/zephyr-7b-beta:free", + MODEL_ANTHROPIC_CLAUDE_2_0_BETA = "anthropic/claude-2.0:beta", + MODEL_ANTHROPIC_CLAUDE_2_0 = "anthropic/claude-2.0", + MODEL_UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b", + MODEL_GOOGLE_PALM_2_CHAT_BISON = "google/palm-2-chat-bison", + MODEL_GOOGLE_PALM_2_CODECHAT_BISON = "google/palm-2-codechat-bison", + MODEL_GRYPHE_MYTHOMAX_L2_13B_FREE = "gryphe/mythomax-l2-13b:free", + MODEL_GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b", + MODEL_META_LLAMA_LLAMA_2_13B_CHAT = "meta-llama/llama-2-13b-chat", + MODEL_META_LLAMA_LLAMA_2_70B_CHAT = "meta-llama/llama-2-70b-chat", + MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo", + MODEL_OPENAI_GPT_3_5_TURBO_0125 = "openai/gpt-3.5-turbo-0125", + MODEL_OPENAI_GPT_4 = "openai/gpt-4", + MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314" +} \ No newline at end of file diff --git a/packages/kbot/src/models/index.ts b/packages/kbot/src/models/index.ts index 7e983ee8..a14ccaf9 100644 --- a/packages/kbot/src/models/index.ts +++ b/packages/kbot/src/models/index.ts @@ -51,8 +51,6 @@ export const models_dist = () => { return models } - - export const models = () => { const models: string[] = [] const openRouterPath = path.resolve(OPENROUTER_CACHE_PATH) diff --git a/packages/kbot/src/models/openrouter.ts b/packages/kbot/src/models/openrouter.ts index 882f3aed..f81c7cf0 100644 --- a/packages/kbot/src/models/openrouter.ts +++ b/packages/kbot/src/models/openrouter.ts @@ -18,6 +18,15 @@ interface ModelContext { supported_parameters: string[]; } +interface TopProvider { + context_length: number; + max_completion_tokens: number | null; + is_moderated: boolean; + supports_functions?: boolean; + supports_function_calling?: boolean; + supports_tools?: boolean; +} + export interface OpenRouterModel { id: string; name: string; @@ -25,14 +34,18 @@ export interface OpenRouterModel { pricing: ModelPricing; context: ModelContext; created: number; + top_provider?: TopProvider; } + interface OpenRouterResponse { data: OpenRouterModel[]; } + export interface CachedModels { timestamp: number; models: OpenRouterModel[]; } + import { fileURLToPath } from 'node:url'; //export const CACHE_PATH = path.resolve(path.join(path.parse(__filename).dir), 'data', 'openrouter_models.json') const __dirname = fileURLToPath(new URL('.', import.meta.url)); diff --git a/packages/kbot/src/zod_schema.ts b/packages/kbot/src/zod_schema.ts index 92ee4e6a..238fd6fb 100644 --- a/packages/kbot/src/zod_schema.ts +++ b/packages/kbot/src/zod_schema.ts @@ -6,6 +6,7 @@ import { generate_interfaces, ZodMetaMap, resolve, write } from '@polymech/commo import { sync as exists } from '@polymech/fs/exists' import { sync as writeFS } from '@polymech/fs/write' import { isArray, isFunction, isString } from '@polymech/core/primitives' +import { zodResponseFormat } from "openai/helpers/zod" import { API_PREFIX, LOGGING_DIRECTORY, PREFERENCES_FILE_NAME } from './constants.js' @@ -243,6 +244,14 @@ export const OptionsSchema = (opts?: any): any => { .optional() .default(false) .describe('Dry run - only write out parameters without making API calls') + ) + .add( + 'format', + z.any() + .optional() + .default(null) + .describe('Zod schema for structured outputs') + .transform((val) => val ? zodResponseFormat(val, "format") : null) ); return schemaMap.root() .passthrough() diff --git a/packages/kbot/src/zod_types.ts b/packages/kbot/src/zod_types.ts index 909dba60..323954c5 100644 --- a/packages/kbot/src/zod_types.ts +++ b/packages/kbot/src/zod_types.ts @@ -34,6 +34,7 @@ export interface IKBotOptions { aion-labs/aion-1.0-mini | paid aion-labs/aion-rp-llama-3.1-8b | paid jondurbin/airoboros-l2-70b | paid + allenai/molmo-7b-d:free | free allenai/olmo-2-0325-32b-instruct | paid amazon/nova-lite-v1 | paid amazon/nova-micro-v1 | paid @@ -62,6 +63,7 @@ export interface IKBotOptions { anthropic/claude-2.1 | paid anthropic/claude-2.1:beta | paid openrouter/auto | paid + bytedance-research/ui-tars-72b:free | free cohere/command | paid cohere/command-a | paid cohere/command-r | paid @@ -74,6 +76,9 @@ export interface IKBotOptions { deepseek/deepseek-r1-zero:free | free deepseek/deepseek-chat | paid deepseek/deepseek-chat:free | free + deepseek/deepseek-chat-v3-0324 | paid + deepseek/deepseek-chat-v3-0324:free | free + deepseek/deepseek-v3-base:free | free deepseek/deepseek-r1 | paid deepseek/deepseek-r1:free | free deepseek/deepseek-r1-distill-llama-70b | paid @@ -96,24 +101,25 @@ export interface IKBotOptions { google/gemini-2.0-flash-lite-001 | paid google/gemini-2.0-flash-thinking-exp-1219:free | free google/gemini-2.0-flash-thinking-exp:free | free - google/gemini-exp-1206:free | free google/gemini-flash-1.5 | paid google/gemini-flash-1.5-8b | paid google/gemini-flash-1.5-8b-exp | paid google/gemini-2.0-flash-001 | paid google/gemini-2.0-flash-exp:free | free - google/gemini-2.0-flash-lite-preview-02-05:free | free google/gemini-pro | paid google/gemini-pro-1.5 | paid google/gemini-2.0-pro-exp-02-05:free | free + google/gemini-2.5-pro-exp-03-25:free | free google/gemini-pro-vision | paid google/gemma-2-27b-it | paid google/gemma-2-9b-it | paid google/gemma-2-9b-it:free | free + google/gemma-3-12b-it | paid google/gemma-3-12b-it:free | free google/gemma-3-1b-it:free | free google/gemma-3-27b-it | paid google/gemma-3-27b-it:free | free + google/gemma-3-4b-it | paid google/gemma-3-4b-it:free | free google/gemma-7b-it | paid google/learnlm-1.5-pro-experimental:free | free @@ -129,7 +135,6 @@ export interface IKBotOptions { liquid/lfm-3b | paid liquid/lfm-40b | paid liquid/lfm-7b | paid - allenai/llama-3.1-tulu-3-405b | paid meta-llama/llama-guard-3-8b | paid alpindale/magnum-72b | paid anthracite-org/magnum-v2-72b | paid @@ -174,6 +179,7 @@ export interface IKBotOptions { mistralai/codestral-2501 | paid mistralai/codestral-mamba | paid mistralai/ministral-3b | paid + mistral/ministral-8b | paid mistralai/ministral-8b | paid mistralai/mistral-7b-instruct | paid mistralai/mistral-7b-instruct:free | free @@ -185,6 +191,7 @@ export interface IKBotOptions { mistralai/mistral-small-24b-instruct-2501 | paid mistralai/mistral-small-24b-instruct-2501:free | free mistralai/mistral-small-3.1-24b-instruct | paid + mistralai/mistral-small-3.1-24b-instruct:free | free mistralai/mixtral-8x22b-instruct | paid mistralai/mixtral-8x7b | paid mistralai/mixtral-8x7b-instruct | paid @@ -239,14 +246,13 @@ export interface IKBotOptions { openai/o1-mini-2024-09-12 | paid openai/o1-preview | paid openai/o1-preview-2024-09-12 | paid + openai/o1-pro | paid openai/o3-mini | paid openai/o3-mini-high | paid openchat/openchat-7b | paid openchat/openchat-7b:free | free teknium/openhermes-2.5-mistral-7b | paid - perplexity/llama-3.1-sonar-large-128k-chat | paid perplexity/llama-3.1-sonar-large-128k-online | paid - perplexity/llama-3.1-sonar-small-128k-chat | paid perplexity/llama-3.1-sonar-small-128k-online | paid perplexity/r1-1776 | paid perplexity/sonar | paid @@ -264,10 +270,13 @@ export interface IKBotOptions { qwen/qwen-plus | paid qwen/qwen-turbo | paid qwen/qwen2.5-32b-instruct | paid + qwen/qwen2.5-vl-32b-instruct:free | free + qwen/qwen2.5-vl-3b-instruct:free | free qwen/qwen2.5-vl-72b-instruct | paid qwen/qwen2.5-vl-72b-instruct:free | free qwen/qwen-2.5-vl-72b-instruct | paid qwen/qwen-2.5-vl-7b-instruct | paid + qwen/qwen-2.5-vl-7b-instruct:free | free qwen/qwq-32b | paid qwen/qwq-32b:free | free qwen/qwq-32b-preview | paid @@ -277,6 +286,7 @@ export interface IKBotOptions { qwen/qwen-2.5-7b-instruct | paid qwen/qwen-2.5-coder-32b-instruct | paid qwen/qwen-2.5-coder-32b-instruct:free | free + featherless/qwerky-72b:free | free rekaai/reka-flash-3:free | free undi95/remm-slerp-l2-13b | paid thedrummer/rocinante-12b | paid @@ -293,6 +303,8 @@ export interface IKBotOptions { thedrummer/skyfall-36b-v2 | paid undi95/toppy-m-7b | paid undi95/toppy-m-7b:free | free + scb10x/llama3.1-typhoon2-70b-instruct | paid + scb10x/llama3.1-typhoon2-8b-instruct | paid thedrummer/unslopnemo-12b | paid microsoft/wizardlm-2-7b | paid microsoft/wizardlm-2-8x22b | paid @@ -339,17 +351,22 @@ export interface IKBotOptions { gpt-4o-mini-realtime-preview-2024-12-17 gpt-4o-mini-search-preview gpt-4o-mini-search-preview-2025-03-11 + gpt-4o-mini-transcribe + gpt-4o-mini-tts gpt-4o-realtime-preview gpt-4o-realtime-preview-2024-10-01 gpt-4o-realtime-preview-2024-12-17 gpt-4o-search-preview gpt-4o-search-preview-2025-03-11 + gpt-4o-transcribe o1 o1-2024-12-17 o1-mini o1-mini-2024-09-12 o1-preview o1-preview-2024-09-12 + o1-pro + o1-pro-2025-03-19 o3-mini o3-mini-2025-01-31 omni-moderation-2024-09-26 @@ -410,4 +427,6 @@ export interface IKBotOptions { filters?: (string | ("JSON" | "JSONUnescape" | "JSONPretty" | "AlphaSort" | "code" | "JSONParse" | "trim")[] | string[] | ((...args_0: unknown[]) => unknown)[]); /** Dry run - only write out parameters without making API calls */ dry?: (boolean | string); + /** Zod schema for structured outputs */ + format?: any; } \ No newline at end of file