transcribe 1/2

This commit is contained in:
babayaga 2025-09-13 12:11:36 +02:00
parent 31885673ec
commit c32511c3ab
32 changed files with 242891 additions and 8168 deletions

View File

@ -0,0 +1,3 @@
import { IKBotTask } from '@polymech/ai-tools';
export declare const TranscribeOptionsSchema: () => any;
export declare const transcribeCommand: (opts: IKBotTask) => Promise<void>;

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
import { IKBotTask } from '@polymech/ai-tools';
export declare const transcribe: (options: IKBotTask) => Promise<any>;

View File

@ -0,0 +1,55 @@
import * as fs from 'fs';
import { toFile } from "openai";
import { sync as exists } from '@polymech/fs/exists';
import { sync as write } from '@polymech/fs/write';
import { createClient } from '../client.js';
const createBuffer = (path) => {
try {
const buffer = fs.readFileSync(path);
return buffer;
}
catch (error) {
console.error('Error creating buffer:', error);
return null;
}
};
export const transcribe = async (options) => {
const client = createClient(options);
if (!client) {
options.logger.error('Failed to create client');
return;
}
if (!options.include || options.include.length === 0) {
options.logger.error('No source file provided via --include');
return;
}
const sourceFile = options.include[0];
if (!exists(sourceFile)) {
options.logger.error('Source file does not exist', sourceFile);
return;
}
const file = await toFile(createBuffer(sourceFile), 'audio.mp3', { type: 'audio/mpeg' });
if (!file) {
options.logger.error('Error converting source to file');
return;
}
const completion = await client.audio.transcriptions.create({
model: 'whisper-1',
file: file,
response_format: options.response_format || "verbose_json",
});
if (!completion) {
options.logger.error('OpenAI response is empty');
return;
}
const text_content = completion.text || '';
if (options.dst) {
write(options.dst, text_content);
}
else {
process.stdout.write(text_content);
}
// options.logger.debug('OpenAI Transcribe response:', completion)
return completion;
};
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidHJhbnNjcmliZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9saWIvdHJhbnNjcmliZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEtBQUssRUFBRSxNQUFNLElBQUksQ0FBQTtBQUN4QixPQUFPLEVBQUUsTUFBTSxFQUFFLE1BQU0sUUFBUSxDQUFBO0FBQy9CLE9BQU8sRUFBRSxJQUFJLElBQUksTUFBTSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDcEQsT0FBTyxFQUFFLElBQUksSUFBSSxLQUFLLEVBQUUsTUFBTSxvQkFBb0IsQ0FBQTtBQUVsRCxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sY0FBYyxDQUFBO0FBRTNDLE1BQU0sWUFBWSxHQUFHLENBQUMsSUFBWSxFQUFpQixFQUFFO0lBQ2pELElBQUksQ0FBQztRQUNELE1BQU0sTUFBTSxHQUFHLEVBQUUsQ0FBQyxZQUFZLENBQUMsSUFBSSxDQUFDLENBQUE7UUFDcEMsT0FBTyxNQUFNLENBQUM7SUFDbEIsQ0FBQztJQUFDLE9BQU8sS0FBSyxFQUFFLENBQUM7UUFDYixPQUFPLENBQUMsS0FBSyxDQUFDLHdCQUF3QixFQUFFLEtBQUssQ0FBQyxDQUFDO1FBQy9DLE9BQU8sSUFBSSxDQUFDO0lBQ2hCLENBQUM7QUFDTCxDQUFDLENBQUE7QUFFRCxNQUFNLENBQUMsTUFBTSxVQUFVLEdBQUcsS0FBSyxFQUFFLE9BQWtCLEVBQUUsRUFBRTtJQUNuRCxNQUFNLE1BQU0sR0FBRyxZQUFZLENBQUMsT0FBTyxDQUFDLENBQUE7SUFDcEMsSUFBSSxDQUFDLE1BQU0sRUFBRSxDQUFDO1FBQ1YsT0FBTyxDQUFDLE1BQU0sQ0FBQyxLQUFLLENBQUMseUJBQXlCLENBQUMsQ0FBQTtRQUMvQyxPQUFNO0lBQ1YsQ0FBQztJQUVELElBQUksQ0FBQyxPQUFPLENBQUMsT0FBTyxJQUFJLE9BQU8sQ0FBQyxPQUFPLENBQUMsTUFBTSxLQUFLLENBQUMsRUFBRSxDQUFDO1FBQ25ELE9BQU8sQ0FBQyxNQUFNLENBQUMsS0FBSyxDQUFDLHVDQUF1QyxDQUFDLENBQUE7UUFDN0QsT0FBTztJQUNYLENBQUM7SUFFRCxNQUFNLFVBQVUsR0FBRyxPQUFPLENBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxDQUFDO0lBRXRDLElBQUksQ0FBQyxNQUFNLENBQUMsVUFBVSxDQUFDLEVBQUUsQ0FBQztRQUN0QixPQUFPLENBQUMsTUFBTSxDQUFDLEtBQUssQ0FBQyw0QkFBNEIsRUFBRSxVQUFVLENBQUMsQ0FBQTtRQUM5RCxPQUFPO0lBQ1gsQ0FBQztJQUVELE1BQU0sSUFBSSxHQUFHLE1BQU0sTUFBTSxDQUFDLFlBQVksQ0FBQyxVQUFVLENBQUMsRUFBRSxXQUFXLEVBQUUsRUFBRSxJQUFJLEVBQUUsWUFBWSxFQUFFLENBQUMsQ0FBQztJQUN6RixJQUFJLENBQUMsSUFBSSxFQUFFLENBQUM7UUFDUixPQUFPLENBQUMsTUFBTSxDQUFDLEtBQUssQ0FBQyxpQ0FBaUMsQ0FBQyxDQUFBO1FBQ3ZELE9BQU87SUFDWCxDQUFDO0lBRUQsTUFBTSxVQUFVLEdBQVEsTUFBTSxNQUFNLENBQUMsS0FBSyxDQUFDLGNBQWMsQ0FBQyxNQUFNLENBQUM7UUFDN0QsS0FBSyxFQUFFLFdBQVc7UUFDbEIsSUFBSSxFQUFFLElBQUk7UUFDVixlQUFlLEVBQUcsT0FBZSxDQUFDLGVBQWUsSUFBSSxjQUFjO0tBQ3RFLENBQUMsQ0FBQTtJQUVGLElBQUksQ0FBQyxVQUFVLEVBQUUsQ0FBQztRQUNkLE9BQU8sQ0FBQyxNQUFNLENBQUMsS0FBSyxDQUFDLDBCQUEwQixDQUFDLENBQUE7UUFDaEQsT0FBTztJQUNYLENBQUM7SUFFRCxNQUFNLFlBQVksR0FBRyxVQUFVLENBQUMsSUFBSSxJQUFJLEVBQUUsQ0FBQztJQUUzQyxJQUFJLE9BQU8sQ0FBQyxHQUFHLEVBQUUsQ0FBQztRQUNkLEtBQUssQ0FBQyxPQUFPLENBQUMsR0FBRyxFQUFFLFlBQVksQ0FBQyxDQUFBO0lBQ3BDLENBQUM7U0FBTSxDQUFDO1FBQ0osT0FBTyxDQUFDLE1BQU0sQ0FBQyxLQUFLLENBQUMsWUFBWSxDQUFDLENBQUE7SUFDdEMsQ0FBQztJQUVELGtFQUFrRTtJQUNsRSxPQUFPLFVBQVUsQ0FBQTtBQUNyQixDQUFDLENBQUEifQ==

View File

@ -10,6 +10,7 @@ import { init } from './commands/init.js';
import { build } from './commands/build.js';
import { fetch } from './commands/fetch.js';
import { run } from './commands/run.js';
import { transcribeCommand, TranscribeOptionsSchema } from './commands/transcribe.js';
export const logger = createLogger('llm-tools');
const modify = async (argv) => await run(argv);
const yargOptions = {
@ -29,6 +30,7 @@ const yargOptions = {
yargs(hideBin(process.argv))
.command('init', 'Initialize KBot configuration', (yargs) => toYargs(yargs, OptionsSchema(), yargOptions), init)
.command('modify [prompt]', 'Modify an existing project', (yargs) => toYargs(yargs, OptionsSchema(), yargOptions), modify)
.command('transcribe', 'Transcribe audio files', (yargs) => toYargs(yargs, TranscribeOptionsSchema(), yargOptions), transcribeCommand)
.command('types', 'Generate types', (yargs) => { }, (argv) => types())
.command('schemas', 'Generate schemas', (yargs) => { }, (argv) => schemas())
.command('build', 'Build kbot essentials', (yargs) => { }, (argv) => build())
@ -39,4 +41,4 @@ yargs(hideBin(process.argv))
.help()
//.wrap(yargs.terminalWidth() - 20)
.parse();
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWFpbi5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9tYWluLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7QUFDQSxPQUFPLEtBQUssTUFBTSxPQUFPLENBQUE7QUFDekIsT0FBTyxFQUFFLE9BQU8sRUFBRSxNQUFNLGVBQWUsQ0FBQTtBQUN2QyxPQUFPLEVBQUUsT0FBTyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFDM0MsT0FBTyxFQUFFLFlBQVksRUFBRSxNQUFNLGVBQWUsQ0FBQTtBQUM1QyxPQUFPLEVBQUUsYUFBYSxFQUFFLE9BQU8sRUFBRSxLQUFLLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQTtBQUkvRCxPQUFPLFdBQVcsTUFBTSxvQkFBb0IsQ0FBQTtBQUM1QyxPQUFPLEVBQUUsUUFBUSxFQUFFLE1BQU0sd0JBQXdCLENBQUE7QUFDakQsT0FBTyxFQUFFLElBQUksRUFBRSxNQUFNLG9CQUFvQixDQUFBO0FBQ3pDLE9BQU8sRUFBRSxLQUFLLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUMzQyxPQUFPLEVBQUUsS0FBSyxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDM0MsT0FBTyxFQUFFLEdBQUcsRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBRXZDLE1BQU0sQ0FBQyxNQUFNLE1BQU0sR0FBUSxZQUFZLENBQUMsV0FBVyxDQUFDLENBQUE7QUFFcEQsTUFBTSxNQUFNLEdBQUcsS0FBSyxFQUFFLElBQWUsRUFBRSxFQUFFLENBQUUsTUFBTSxHQUFHLENBQUMsSUFBaUIsQ0FBQyxDQUFBO0FBRXZFLE1BQU0sV0FBVyxHQUFRO0lBQ3ZCLEtBQUssRUFBRSxDQUFDLENBQUMsTUFBTSxFQUFFLEdBQUcsRUFBRSxPQUFPLEVBQUUsRUFBRTtRQUMvQixRQUFRLEdBQUcsRUFBRSxDQUFDO1lBQ1osS0FBSyxRQUFRO2dCQUNYLENBQUM7b0JBQ0MsT0FBTyxNQUFNLENBQUMsVUFBVSxDQUFDLEdBQUcsRUFBRSxPQUFPLENBQUMsQ0FBQTtnQkFDeEMsQ0FBQztZQUNILEtBQUssU0FBUztnQkFDWixDQUFDO29CQUNDLE9BQU8sTUFBTSxDQUFDLE1BQU0sQ0FBQyxHQUFHLEVBQUUsRUFBQyxHQUFHLE9BQU8sRUFBRSxLQUFLLEVBQUUsR0FBRyxDQUFDLENBQUMsQ0FBQyxDQUFDLFdBQVcsRUFBRSxFQUFDLENBQUMsQ0FBQTtnQkFDdEUsQ0FBQztRQUNMLENBQUM7SUFDSCxDQUFDLENBQUM7Q0FDSCxDQUFBO0FBRUQsS0FBSyxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLENBQUM7S0FDekIsT0FBTyxDQUNOLE1BQU0sRUFDTiwrQkFBK0IsRUFDL0IsQ0FBQyxLQUFLLEVBQUUsRUFBRSxDQUFDLE9BQU8sQ0FBQyxLQUFLLEVBQUUsYUFBYSxFQUFFLEVBQUUsV0FBVyxDQUFDLEVBQ3ZELElBQUksQ0FDTDtLQUNBLE9BQU8sQ0FDTixpQkFBaUIsRUFDakIsNEJBQTRCLEVBQzVCLENBQUMsS0FBSyxFQUFFLEVBQUUsQ0FBQyxPQUFPLENBQUMsS0FBSyxFQUFFLGFBQWEsRUFBRSxFQUFFLFdBQVcsQ0FBQyxFQUN2RCxNQUFNLENBQ1A7S0FDQSxPQUFPLENBQ04sT0FBTyxFQUNQLGdCQUFnQixFQUNoQixDQUFDLEtBQUssRUFBRSxFQUFFLEdBQUcsQ0FBQyxFQUNkLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxLQUFLLEVBQUUsQ0FDbEI7S0FDQSxPQUFPLENBQ04sU0FBUyxFQUNULGtCQUFrQixFQUNsQixDQUFDLEtBQUssRUFBRSxFQUFFLEdBQUcsQ0FBQyxFQUNkLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxPQUFPLEVBQUUsQ0FDcEI7S0FDQSxPQUFPLENBQ04sT0FBTyxFQUNQLHVCQUF1QixFQUN2QixDQUFDLEtBQUssRUFBRSxFQUFFLEdBQUcsQ0FBQyxFQUNkLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxLQUFLLEVBQUUsQ0FDbEI7S0FDQSxPQUFPLENBQ04sT0FBTyxFQUNQLCtCQUErQixFQUMvQixDQUFDLEtBQUssRUFBRSxFQUFFLEdBQUcsQ0FBQyxFQUNkLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxLQUFLLEVBQUUsQ0FDbEI7S0FDQSxPQUFPLENBQ04sU0FBUyxFQUNULHdCQUF3QixFQUN4QixDQUFDLEtBQUssRUFBRSxFQUFFLEdBQUcsQ0FBQyxFQUNkLFdBQVcsQ0FDWjtLQUNBLE9BQU8sQ0FDTixVQUFVLEVBQ1YsZUFBZSxFQUNmLENBQUMsS0FBSyxFQUFFLEVBQUUsR0FBRyxDQUFDLEVBQ2QsUUFBUSxDQUNUO0tBQ0EsT0FBTyxDQUFDLENBQUMsaUJBQWlCLEVBQUUsSUFBSSxDQUFDLEVBQUUsd0JBQXdCLEVBQzFELENBQUMsS0FBSyxFQUFFLEVBQUUsQ0FBQyxPQUFPLENBQUMsS0FBSyxFQUFFLGFBQWEsRUFBRSxFQUFFLFdBQVcsQ0FBQyxFQUFFLE1BQU0sQ0FBQztLQUNqRSxJQUFJLEVBQUU7SUFDUCxtQ0FBbUM7S0FDbEMsS0FBSyxFQUFFLENBQUEifQ==
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWFpbi5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9tYWluLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7QUFDQSxPQUFPLEtBQUssTUFBTSxPQUFPLENBQUE7QUFDekIsT0FBTyxFQUFFLE9BQU8sRUFBRSxNQUFNLGVBQWUsQ0FBQTtBQUN2QyxPQUFPLEVBQUUsT0FBTyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFDM0MsT0FBTyxFQUFFLFlBQVksRUFBRSxNQUFNLGVBQWUsQ0FBQTtBQUU1QyxPQUFPLEVBQUUsYUFBYSxFQUFFLE9BQU8sRUFBRSxLQUFLLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQTtBQUcvRCxPQUFPLFdBQVcsTUFBTSxvQkFBb0IsQ0FBQTtBQUM1QyxPQUFPLEVBQUUsUUFBUSxFQUFFLE1BQU0sd0JBQXdCLENBQUE7QUFDakQsT0FBTyxFQUFFLElBQUksRUFBRSxNQUFNLG9CQUFvQixDQUFBO0FBQ3pDLE9BQU8sRUFBRSxLQUFLLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUMzQyxPQUFPLEVBQUUsS0FBSyxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDM0MsT0FBTyxFQUFFLEdBQUcsRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBRXZDLE9BQU8sRUFBRSxpQkFBaUIsRUFBRSx1QkFBdUIsRUFBRSxNQUFNLDBCQUEwQixDQUFBO0FBRXJGLE1BQU0sQ0FBQyxNQUFNLE1BQU0sR0FBUSxZQUFZLENBQUMsV0FBVyxDQUFDLENBQUE7QUFFcEQsTUFBTSxNQUFNLEdBQUcsS0FBSyxFQUFFLElBQWUsRUFBRSxFQUFFLENBQUUsTUFBTSxHQUFHLENBQUMsSUFBaUIsQ0FBQyxDQUFBO0FBRXZFLE1BQU0sV0FBVyxHQUFRO0lBQ3ZCLEtBQUssRUFBRSxDQUFDLENBQUMsTUFBTSxFQUFFLEdBQUcsRUFBRSxPQUFPLEVBQUUsRUFBRTtRQUMvQixRQUFRLEdBQUcsRUFBRSxDQUFDO1lBQ1osS0FBSyxRQUFRO2dCQUNYLENBQUM7b0JBQ0MsT0FBTyxNQUFNLENBQUMsVUFBVSxDQUFDLEdBQUcsRUFBRSxPQUFPLENBQUMsQ0FBQTtnQkFDeEMsQ0FBQztZQUNILEtBQUssU0FBUztnQkFDWixDQUFDO29CQUNDLE9BQU8sTUFBTSxDQUFDLE1BQU0sQ0FBQyxHQUFHLEVBQUUsRUFBQyxHQUFHLE9BQU8sRUFBRSxLQUFLLEVBQUUsR0FBRyxDQUFDLENBQUMsQ0FBQyxDQUFDLFdBQVcsRUFBRSxFQUFDLENBQUMsQ0FBQTtnQkFDdEUsQ0FBQztRQUNMLENBQUM7SUFDSCxDQUFDLENBQUM7Q0FDSCxDQUFBO0FBRUQsS0FBSyxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLENBQUM7S0FDekIsT0FBTyxDQUNOLE1BQU0sRUFDTiwrQkFBK0IsRUFDL0IsQ0FBQyxLQUFLLEVBQUUsRUFBRSxDQUFDLE9BQU8sQ0FBQyxLQUFLLEVBQUUsYUFBYSxFQUFFLEVBQUUsV0FBVyxDQUFDLEVBQ3ZELElBQUksQ0FDTDtLQUNBLE9BQU8sQ0FDTixpQkFBaUIsRUFDakIsNEJBQTRCLEVBQzVCLENBQUMsS0FBSyxFQUFFLEVBQUUsQ0FBQyxPQUFPLENBQUMsS0FBSyxFQUFFLGFBQWEsRUFBRSxFQUFFLFdBQVcsQ0FBQyxFQUN2RCxNQUFNLENBQ1A7S0FDQSxPQUFPLENBQ04sWUFBWSxFQUNaLHdCQUF3QixFQUN4QixDQUFDLEtBQUssRUFBRSxFQUFFLENBQUMsT0FBTyxDQUFDLEtBQUssRUFBRSx1QkFBdUIsRUFBRSxFQUFFLFdBQVcsQ0FBQyxFQUNqRSxpQkFBaUIsQ0FDbEI7S0FDQSxPQUFPLENBQ04sT0FBTyxFQUNQLGdCQUFnQixFQUNoQixDQUFDLEtBQUssRUFBRSxFQUFFLEdBQUcsQ0FBQyxFQUNkLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxLQUFLLEVBQUUsQ0FDbEI7S0FDQSxPQUFPLENBQ04sU0FBUyxFQUNULGtCQUFrQixFQUNsQixDQUFDLEtBQUssRUFBRSxFQUFFLEdBQUcsQ0FBQyxFQUNkLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxPQUFPLEVBQUUsQ0FDcEI7S0FDQSxPQUFPLENBQ04sT0FBTyxFQUNQLHVCQUF1QixFQUN2QixDQUFDLEtBQUssRUFBRSxFQUFFLEdBQUcsQ0FBQyxFQUNkLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxLQUFLLEVBQUUsQ0FDbEI7S0FDQSxPQUFPLENBQ04sT0FBTyxFQUNQLCtCQUErQixFQUMvQixDQUFDLEtBQUssRUFBRSxFQUFFLEdBQUcsQ0FBQyxFQUNkLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQyxLQUFLLEVBQUUsQ0FDbEI7S0FDQSxPQUFPLENBQ04sU0FBUyxFQUNULHdCQUF3QixFQUN4QixDQUFDLEtBQUssRUFBRSxFQUFFLEdBQUcsQ0FBQyxFQUNkLFdBQVcsQ0FDWjtLQUNBLE9BQU8sQ0FDTixVQUFVLEVBQ1YsZUFBZSxFQUNmLENBQUMsS0FBSyxFQUFFLEVBQUUsR0FBRyxDQUFDLEVBQ2QsUUFBUSxDQUNUO0tBQ0EsT0FBTyxDQUFDLENBQUMsaUJBQWlCLEVBQUUsSUFBSSxDQUFDLEVBQUUsd0JBQXdCLEVBQzFELENBQUMsS0FBSyxFQUFFLEVBQUUsQ0FBQyxPQUFPLENBQUMsS0FBSyxFQUFFLGFBQWEsRUFBRSxFQUFFLFdBQVcsQ0FBQyxFQUFFLE1BQU0sQ0FBQztLQUNqRSxJQUFJLEVBQUU7SUFDUCxtQ0FBbUM7S0FDbEMsS0FBSyxFQUFFLENBQUEifQ==

View File

@ -2,11 +2,11 @@ export declare enum E_OPENAI_MODEL {
MODEL_GPT_4_0613 = "gpt-4-0613",
MODEL_GPT_4 = "gpt-4",
MODEL_GPT_3_5_TURBO = "gpt-3.5-turbo",
MODEL_GPT_AUDIO = "gpt-audio",
MODEL_GPT_5_NANO = "gpt-5-nano",
MODEL_GPT_5 = "gpt-5",
MODEL_GPT_5_MINI_2025_08_07 = "gpt-5-mini-2025-08-07",
MODEL_GPT_5_MINI = "gpt-5-mini",
MODEL_GPT_5_NANO_2025_08_07 = "gpt-5-nano-2025-08-07",
MODEL_GPT_AUDIO_2025_08_28 = "gpt-audio-2025-08-28",
MODEL_GPT_REALTIME = "gpt-realtime",
MODEL_GPT_REALTIME_2025_08_28 = "gpt-realtime-2025-08-28",
MODEL_DAVINCI_002 = "davinci-002",
MODEL_BABBAGE_002 = "babbage-002",
MODEL_GPT_3_5_TURBO_INSTRUCT = "gpt-3.5-turbo-instruct",
@ -77,6 +77,10 @@ export declare enum E_OPENAI_MODEL {
MODEL_O4_MINI_DEEP_RESEARCH_2025_06_26 = "o4-mini-deep-research-2025-06-26",
MODEL_GPT_5_CHAT_LATEST = "gpt-5-chat-latest",
MODEL_GPT_5_2025_08_07 = "gpt-5-2025-08-07",
MODEL_GPT_5 = "gpt-5",
MODEL_GPT_5_MINI_2025_08_07 = "gpt-5-mini-2025-08-07",
MODEL_GPT_5_MINI = "gpt-5-mini",
MODEL_GPT_5_NANO_2025_08_07 = "gpt-5-nano-2025-08-07",
MODEL_GPT_3_5_TURBO_16K = "gpt-3.5-turbo-16k",
MODEL_TTS_1 = "tts-1",
MODEL_WHISPER_1 = "whisper-1",

View File

@ -3,11 +3,11 @@ export var E_OPENAI_MODEL;
E_OPENAI_MODEL["MODEL_GPT_4_0613"] = "gpt-4-0613";
E_OPENAI_MODEL["MODEL_GPT_4"] = "gpt-4";
E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO"] = "gpt-3.5-turbo";
E_OPENAI_MODEL["MODEL_GPT_AUDIO"] = "gpt-audio";
E_OPENAI_MODEL["MODEL_GPT_5_NANO"] = "gpt-5-nano";
E_OPENAI_MODEL["MODEL_GPT_5"] = "gpt-5";
E_OPENAI_MODEL["MODEL_GPT_5_MINI_2025_08_07"] = "gpt-5-mini-2025-08-07";
E_OPENAI_MODEL["MODEL_GPT_5_MINI"] = "gpt-5-mini";
E_OPENAI_MODEL["MODEL_GPT_5_NANO_2025_08_07"] = "gpt-5-nano-2025-08-07";
E_OPENAI_MODEL["MODEL_GPT_AUDIO_2025_08_28"] = "gpt-audio-2025-08-28";
E_OPENAI_MODEL["MODEL_GPT_REALTIME"] = "gpt-realtime";
E_OPENAI_MODEL["MODEL_GPT_REALTIME_2025_08_28"] = "gpt-realtime-2025-08-28";
E_OPENAI_MODEL["MODEL_DAVINCI_002"] = "davinci-002";
E_OPENAI_MODEL["MODEL_BABBAGE_002"] = "babbage-002";
E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_INSTRUCT"] = "gpt-3.5-turbo-instruct";
@ -78,9 +78,13 @@ export var E_OPENAI_MODEL;
E_OPENAI_MODEL["MODEL_O4_MINI_DEEP_RESEARCH_2025_06_26"] = "o4-mini-deep-research-2025-06-26";
E_OPENAI_MODEL["MODEL_GPT_5_CHAT_LATEST"] = "gpt-5-chat-latest";
E_OPENAI_MODEL["MODEL_GPT_5_2025_08_07"] = "gpt-5-2025-08-07";
E_OPENAI_MODEL["MODEL_GPT_5"] = "gpt-5";
E_OPENAI_MODEL["MODEL_GPT_5_MINI_2025_08_07"] = "gpt-5-mini-2025-08-07";
E_OPENAI_MODEL["MODEL_GPT_5_MINI"] = "gpt-5-mini";
E_OPENAI_MODEL["MODEL_GPT_5_NANO_2025_08_07"] = "gpt-5-nano-2025-08-07";
E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_16K"] = "gpt-3.5-turbo-16k";
E_OPENAI_MODEL["MODEL_TTS_1"] = "tts-1";
E_OPENAI_MODEL["MODEL_WHISPER_1"] = "whisper-1";
E_OPENAI_MODEL["MODEL_TEXT_EMBEDDING_ADA_002"] = "text-embedding-ada-002";
})(E_OPENAI_MODEL || (E_OPENAI_MODEL = {}));
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbmFpLW1vZGVscy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbmFpLW1vZGVscy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSxjQW1GWDtBQW5GRCxXQUFZLGNBQWM7SUFDeEIsaURBQStCLENBQUE7SUFDL0IsdUNBQXFCLENBQUE7SUFDckIsdURBQXFDLENBQUE7SUFDckMsaURBQStCLENBQUE7SUFDL0IsdUNBQXFCLENBQUE7SUFDckIsdUVBQXFELENBQUE7SUFDckQsaURBQStCLENBQUE7SUFDL0IsdUVBQXFELENBQUE7SUFDckQsbURBQWlDLENBQUE7SUFDakMsbURBQWlDLENBQUE7SUFDakMseUVBQXVELENBQUE7SUFDdkQsbUZBQWlFLENBQUE7SUFDakUsNkNBQTJCLENBQUE7SUFDM0IsNkNBQTJCLENBQUE7SUFDM0IsaUVBQStDLENBQUE7SUFDL0MsaUVBQStDLENBQUE7SUFDL0MsNkNBQTJCLENBQUE7SUFDM0IsaURBQStCLENBQUE7SUFDL0IsdURBQXFDLENBQUE7SUFDckMseUVBQXVELENBQUE7SUFDdkQseUVBQXVELENBQUE7SUFDdkQsaUVBQStDLENBQUE7SUFDL0MsbUVBQWlELENBQUE7SUFDakQsaUVBQStDLENBQUE7SUFDL0MsbURBQWlDLENBQUE7SUFDakMseUVBQXVELENBQUE7SUFDdkQseUNBQXVCLENBQUE7SUFDdkIsK0RBQTZDLENBQUE7SUFDN0MseUVBQXVELENBQUE7SUFDdkQsbURBQWlDLENBQUE7SUFDakMsK0RBQTZDLENBQUE7SUFDN0MsK0RBQTZDLENBQUE7SUFDN0MsaUVBQStDLENBQUE7SUFDL0MsMkNBQXlCLENBQUE7SUFDekIsaUdBQStFLENBQUE7SUFDL0UsMkZBQXlFLENBQUE7SUFDekUscUVBQW1ELENBQUE7SUFDbkQsMkVBQXlELENBQUE7SUFDekQseUVBQXVELENBQUE7SUFDdkQsaUZBQStELENBQUE7SUFDL0QsaUdBQStFLENBQUE7SUFDL0UsMkZBQXlFLENBQUE7SUFDekUsMkdBQXlGLENBQUE7SUFDekYscUdBQW1GLENBQUE7SUFDbkYsdURBQXFDLENBQUE7SUFDckMsaUNBQWUsQ0FBQTtJQUNmLHFGQUFtRSxDQUFBO0lBQ25FLCtFQUE2RCxDQUFBO0lBQzdELDJDQUF5QixDQUFBO0lBQ3pCLGlFQUErQyxDQUFBO0lBQy9DLCtEQUE2QyxDQUFBO0lBQzdDLDZGQUEyRSxDQUFBO0lBQzNFLHVFQUFxRCxDQUFBO0lBQ3JELHVHQUFxRixDQUFBO0lBQ3JGLGlGQUErRCxDQUFBO0lBQy9ELCtEQUE2QyxDQUFBO0lBQzdDLHlFQUF1RCxDQUFBO0lBQ3ZELCtEQUE2QyxDQUFBO0lBQzdDLHlDQUF1QixDQUFBO0lBQ3ZCLDJEQUF5QyxDQUFBO0lBQ3pDLHVEQUFxQyxDQUFBO0lBQ3JDLGlFQUErQyxDQUFBO0lBQy9DLGlDQUFlLENBQUE7SUFDZiwyQ0FBeUIsQ0FBQTtJQUN6QixpRUFBK0MsQ0FBQTtJQUMvQywyQ0FBeUIsQ0FBQTtJQUN6QiwyRUFBeUQsQ0FBQTtJQUN6RCxxREFBbUMsQ0FBQTtJQUNuQywyRUFBeUQsQ0FBQTtJQUN6RCxxREFBbUMsQ0FBQTtJQUNuQyxtREFBaUMsQ0FBQTtJQUNqQywrREFBNkMsQ0FBQTtJQUM3QyxpR0FBK0UsQ0FBQTtJQUMvRSwyRkFBeUUsQ0FBQTtJQUN6RSx1RUFBcUQsQ0FBQTtJQUNyRCw2RkFBMkUsQ0FBQTtJQUMzRSwrREFBNkMsQ0FBQTtJQUM3Qyw2REFBMkMsQ0FBQTtJQUMzQywrREFBNkMsQ0FBQTtJQUM3Qyx1Q0FBcUIsQ0FBQTtJQUNyQiwrQ0FBNkIsQ0FBQTtJQUM3Qix5RUFBdUQsQ0FBQTtBQUN6RCxDQUFDLEVBbkZXLGNBQWMsS0FBZCxjQUFjLFFBbUZ6QiJ9
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbmFpLW1vZGVscy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbmFpLW1vZGVscy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSxjQXVGWDtBQXZGRCxXQUFZLGNBQWM7SUFDeEIsaURBQStCLENBQUE7SUFDL0IsdUNBQXFCLENBQUE7SUFDckIsdURBQXFDLENBQUE7SUFDckMsK0NBQTZCLENBQUE7SUFDN0IsaURBQStCLENBQUE7SUFDL0IscUVBQW1ELENBQUE7SUFDbkQscURBQW1DLENBQUE7SUFDbkMsMkVBQXlELENBQUE7SUFDekQsbURBQWlDLENBQUE7SUFDakMsbURBQWlDLENBQUE7SUFDakMseUVBQXVELENBQUE7SUFDdkQsbUZBQWlFLENBQUE7SUFDakUsNkNBQTJCLENBQUE7SUFDM0IsNkNBQTJCLENBQUE7SUFDM0IsaUVBQStDLENBQUE7SUFDL0MsaUVBQStDLENBQUE7SUFDL0MsNkNBQTJCLENBQUE7SUFDM0IsaURBQStCLENBQUE7SUFDL0IsdURBQXFDLENBQUE7SUFDckMseUVBQXVELENBQUE7SUFDdkQseUVBQXVELENBQUE7SUFDdkQsaUVBQStDLENBQUE7SUFDL0MsbUVBQWlELENBQUE7SUFDakQsaUVBQStDLENBQUE7SUFDL0MsbURBQWlDLENBQUE7SUFDakMseUVBQXVELENBQUE7SUFDdkQseUNBQXVCLENBQUE7SUFDdkIsK0RBQTZDLENBQUE7SUFDN0MseUVBQXVELENBQUE7SUFDdkQsbURBQWlDLENBQUE7SUFDakMsK0RBQTZDLENBQUE7SUFDN0MsK0RBQTZDLENBQUE7SUFDN0MsaUVBQStDLENBQUE7SUFDL0MsMkNBQXlCLENBQUE7SUFDekIsaUdBQStFLENBQUE7SUFDL0UsMkZBQXlFLENBQUE7SUFDekUscUVBQW1ELENBQUE7SUFDbkQsMkVBQXlELENBQUE7SUFDekQseUVBQXVELENBQUE7SUFDdkQsaUZBQStELENBQUE7SUFDL0QsaUdBQStFLENBQUE7SUFDL0UsMkZBQXlFLENBQUE7SUFDekUsMkdBQXlGLENBQUE7SUFDekYscUdBQW1GLENBQUE7SUFDbkYsdURBQXFDLENBQUE7SUFDckMsaUNBQWUsQ0FBQTtJQUNmLHFGQUFtRSxDQUFBO0lBQ25FLCtFQUE2RCxDQUFBO0lBQzdELDJDQUF5QixDQUFBO0lBQ3pCLGlFQUErQyxDQUFBO0lBQy9DLCtEQUE2QyxDQUFBO0lBQzdDLDZGQUEyRSxDQUFBO0lBQzNFLHVFQUFxRCxDQUFBO0lBQ3JELHVHQUFxRixDQUFBO0lBQ3JGLGlGQUErRCxDQUFBO0lBQy9ELCtEQUE2QyxDQUFBO0lBQzdDLHlFQUF1RCxDQUFBO0lBQ3ZELCtEQUE2QyxDQUFBO0lBQzdDLHlDQUF1QixDQUFBO0lBQ3ZCLDJEQUF5QyxDQUFBO0lBQ3pDLHVEQUFxQyxDQUFBO0lBQ3JDLGlFQUErQyxDQUFBO0lBQy9DLGlDQUFlLENBQUE7SUFDZiwyQ0FBeUIsQ0FBQTtJQUN6QixpRUFBK0MsQ0FBQTtJQUMvQywyQ0FBeUIsQ0FBQTtJQUN6QiwyRUFBeUQsQ0FBQTtJQUN6RCxxREFBbUMsQ0FBQTtJQUNuQywyRUFBeUQsQ0FBQTtJQUN6RCxxREFBbUMsQ0FBQTtJQUNuQyxtREFBaUMsQ0FBQTtJQUNqQywrREFBNkMsQ0FBQTtJQUM3QyxpR0FBK0UsQ0FBQTtJQUMvRSwyRkFBeUUsQ0FBQTtJQUN6RSx1RUFBcUQsQ0FBQTtJQUNyRCw2RkFBMkUsQ0FBQTtJQUMzRSwrREFBNkMsQ0FBQTtJQUM3Qyw2REFBMkMsQ0FBQTtJQUMzQyx1Q0FBcUIsQ0FBQTtJQUNyQix1RUFBcUQsQ0FBQTtJQUNyRCxpREFBK0IsQ0FBQTtJQUMvQix1RUFBcUQsQ0FBQTtJQUNyRCwrREFBNkMsQ0FBQTtJQUM3Qyx1Q0FBcUIsQ0FBQTtJQUNyQiwrQ0FBNkIsQ0FBQTtJQUM3Qix5RUFBdUQsQ0FBQTtBQUN6RCxDQUFDLEVBdkZXLGNBQWMsS0FBZCxjQUFjLFFBdUZ6QiJ9

View File

@ -1,6 +1,9 @@
export declare enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE = "deepseek/deepseek-chat-v3.1:free",
MODEL_FREE_OPENAI_GPT_OSS_120B_FREE = "openai/gpt-oss-120b:free",
MODEL_FREE_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",
MODEL_FREE_Z_AI_GLM_4_5_AIR_FREE = "z-ai/glm-4.5-air:free",
MODEL_FREE_QWEN_QWEN3_CODER_FREE = "qwen/qwen3-coder:free",
MODEL_FREE_MOONSHOTAI_KIMI_K2_FREE = "moonshotai/kimi-k2:free",
MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN_MISTRAL_24B_VENICE_EDITION_FREE = "cognitivecomputations/dolphin-mistral-24b-venice-edition:free",
MODEL_FREE_GOOGLE_GEMMA_3N_E2B_IT_FREE = "google/gemma-3n-e2b-it:free",
@ -10,9 +13,9 @@ export declare enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_MOONSHOTAI_KIMI_DEV_72B_FREE = "moonshotai/kimi-dev-72b:free",
MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_0528_QWEN3_8B_FREE = "deepseek/deepseek-r1-0528-qwen3-8b:free",
MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_0528_FREE = "deepseek/deepseek-r1-0528:free",
MODEL_FREE_SARVAMAI_SARVAM_M_FREE = "sarvamai/sarvam-m:free",
MODEL_FREE_MISTRALAI_DEVSTRAL_SMALL_2505_FREE = "mistralai/devstral-small-2505:free",
MODEL_FREE_GOOGLE_GEMMA_3N_E4B_IT_FREE = "google/gemma-3n-e4b-it:free",
MODEL_FREE_META_LLAMA_LLAMA_3_3_8B_INSTRUCT_FREE = "meta-llama/llama-3.3-8b-instruct:free",
MODEL_FREE_QWEN_QWEN3_4B_FREE = "qwen/qwen3-4b:free",
MODEL_FREE_QWEN_QWEN3_30B_A3B_FREE = "qwen/qwen3-30b-a3b:free",
MODEL_FREE_QWEN_QWEN3_8B_FREE = "qwen/qwen3-8b:free",
@ -20,16 +23,16 @@ export declare enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_QWEN_QWEN3_235B_A22B_FREE = "qwen/qwen3-235b-a22b:free",
MODEL_FREE_TNGTECH_DEEPSEEK_R1T_CHIMERA_FREE = "tngtech/deepseek-r1t-chimera:free",
MODEL_FREE_MICROSOFT_MAI_DS_R1_FREE = "microsoft/mai-ds-r1:free",
MODEL_FREE_THUDM_GLM_Z1_32B_FREE = "thudm/glm-z1-32b:free",
MODEL_FREE_SHISA_AI_SHISA_V2_LLAMA3_3_70B_FREE = "shisa-ai/shisa-v2-llama3.3-70b:free",
MODEL_FREE_ARLIAI_QWQ_32B_ARLIAI_RPR_V1_FREE = "arliai/qwq-32b-arliai-rpr-v1:free",
MODEL_FREE_AGENTICA_ORG_DEEPCODER_14B_PREVIEW_FREE = "agentica-org/deepcoder-14b-preview:free",
MODEL_FREE_MOONSHOTAI_KIMI_VL_A3B_THINKING_FREE = "moonshotai/kimi-vl-a3b-thinking:free",
MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free",
MODEL_FREE_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free",
MODEL_FREE_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free",
MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25 = "google/gemini-2.5-pro-exp-03-25",
MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free",
MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free",
MODEL_FREE_FEATHERLESS_QWERKY_72B_FREE = "featherless/qwerky-72b:free",
MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free",
MODEL_FREE_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free",
MODEL_FREE_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free",
@ -48,7 +51,6 @@ export declare enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE = "meta-llama/llama-3.3-70b-instruct:free",
MODEL_FREE_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free",
MODEL_FREE_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free",
MODEL_FREE_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free",
MODEL_FREE_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free",
MODEL_FREE_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE = "meta-llama/llama-3.1-405b-instruct:free",
MODEL_FREE_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free",

View File

@ -1,7 +1,10 @@
export var E_OPENROUTER_MODEL_FREE;
(function (E_OPENROUTER_MODEL_FREE) {
E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE"] = "deepseek/deepseek-chat-v3.1:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENAI_GPT_OSS_120B_FREE"] = "openai/gpt-oss-120b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENAI_GPT_OSS_20B_FREE"] = "openai/gpt-oss-20b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_Z_AI_GLM_4_5_AIR_FREE"] = "z-ai/glm-4.5-air:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN3_CODER_FREE"] = "qwen/qwen3-coder:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MOONSHOTAI_KIMI_K2_FREE"] = "moonshotai/kimi-k2:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN_MISTRAL_24B_VENICE_EDITION_FREE"] = "cognitivecomputations/dolphin-mistral-24b-venice-edition:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3N_E2B_IT_FREE"] = "google/gemma-3n-e2b-it:free";
@ -11,9 +14,9 @@ export var E_OPENROUTER_MODEL_FREE;
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MOONSHOTAI_KIMI_DEV_72B_FREE"] = "moonshotai/kimi-dev-72b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_0528_QWEN3_8B_FREE"] = "deepseek/deepseek-r1-0528-qwen3-8b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_0528_FREE"] = "deepseek/deepseek-r1-0528:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_SARVAMAI_SARVAM_M_FREE"] = "sarvamai/sarvam-m:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_DEVSTRAL_SMALL_2505_FREE"] = "mistralai/devstral-small-2505:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3N_E4B_IT_FREE"] = "google/gemma-3n-e4b-it:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_3_8B_INSTRUCT_FREE"] = "meta-llama/llama-3.3-8b-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN3_4B_FREE"] = "qwen/qwen3-4b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN3_30B_A3B_FREE"] = "qwen/qwen3-30b-a3b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN3_8B_FREE"] = "qwen/qwen3-8b:free";
@ -21,16 +24,16 @@ export var E_OPENROUTER_MODEL_FREE;
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN3_235B_A22B_FREE"] = "qwen/qwen3-235b-a22b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_TNGTECH_DEEPSEEK_R1T_CHIMERA_FREE"] = "tngtech/deepseek-r1t-chimera:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MICROSOFT_MAI_DS_R1_FREE"] = "microsoft/mai-ds-r1:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_THUDM_GLM_Z1_32B_FREE"] = "thudm/glm-z1-32b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_SHISA_AI_SHISA_V2_LLAMA3_3_70B_FREE"] = "shisa-ai/shisa-v2-llama3.3-70b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_ARLIAI_QWQ_32B_ARLIAI_RPR_V1_FREE"] = "arliai/qwq-32b-arliai-rpr-v1:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_AGENTICA_ORG_DEEPCODER_14B_PREVIEW_FREE"] = "agentica-org/deepcoder-14b-preview:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MOONSHOTAI_KIMI_VL_A3B_THINKING_FREE"] = "moonshotai/kimi-vl-a3b-thinking:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE"] = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_4_MAVERICK_FREE"] = "meta-llama/llama-4-maverick:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_4_SCOUT_FREE"] = "meta-llama/llama-4-scout:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25"] = "google/gemini-2.5-pro-exp-03-25";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-32b-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE"] = "deepseek/deepseek-chat-v3-0324:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_FEATHERLESS_QWERKY_72B_FREE"] = "featherless/qwerky-72b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE"] = "mistralai/mistral-small-3.1-24b-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_4B_IT_FREE"] = "google/gemma-3-4b-it:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_12B_IT_FREE"] = "google/gemma-3-12b-it:free";
@ -49,11 +52,10 @@ export var E_OPENROUTER_MODEL_FREE;
E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE"] = "meta-llama/llama-3.3-70b-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE"] = "qwen/qwen-2.5-coder-32b-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE"] = "meta-llama/llama-3.2-3b-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE"] = "meta-llama/llama-3.2-11b-vision-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN_2_5_72B_INSTRUCT_FREE"] = "qwen/qwen-2.5-72b-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE"] = "meta-llama/llama-3.1-405b-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_NEMO_FREE"] = "mistralai/mistral-nemo:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE"] = "google/gemma-2-9b-it:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free";
})(E_OPENROUTER_MODEL_FREE || (E_OPENROUTER_MODEL_FREE = {}));
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkF3RFg7QUF4REQsV0FBWSx1QkFBdUI7SUFDakMseUZBQThELENBQUE7SUFDOUQscUZBQTBELENBQUE7SUFDMUQseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsdUZBQTRELENBQUE7SUFDNUQsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUscUZBQTBELENBQUE7SUFDMUQsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsbUlBQXdHLENBQUE7SUFDeEcseUdBQThFLENBQUE7SUFDOUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYsaUdBQXNFLENBQUE7SUFDdEUscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsMkZBQWdFLENBQUE7SUFDaEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsK0hBQW9HLENBQUE7SUFDcEcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYscUlBQTBHLENBQUE7SUFDMUcseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQXhEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBd0RsQyJ9
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkEwRFg7QUExREQsV0FBWSx1QkFBdUI7SUFDakMsMkdBQWdGLENBQUE7SUFDaEYsMkZBQWdFLENBQUE7SUFDaEUseUZBQThELENBQUE7SUFDOUQscUZBQTBELENBQUE7SUFDMUQscUZBQTBELENBQUE7SUFDMUQseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUscUhBQTBGLENBQUE7SUFDMUYsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsbUlBQXdHLENBQUE7SUFDeEcsMkdBQWdGLENBQUE7SUFDaEYscUdBQTBFLENBQUE7SUFDMUUseUdBQThFLENBQUE7SUFDOUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsMkZBQWdFLENBQUE7SUFDaEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsK0hBQW9HLENBQUE7SUFDcEcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQTFEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBMERsQyJ9

View File

@ -1,19 +1,39 @@
export declare enum E_OPENROUTER_MODEL {
MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_LLAMA_109B_MOE = "deepcogito/cogito-v2-preview-llama-109b-moe",
MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_DEEPSEEK_671B = "deepcogito/cogito-v2-preview-deepseek-671b",
MODEL_QWEN_QWEN3_30B_A3B_THINKING_2507 = "qwen/qwen3-30b-a3b-thinking-2507",
MODEL_X_AI_GROK_CODE_FAST_1 = "x-ai/grok-code-fast-1",
MODEL_NOUSRESEARCH_HERMES_4_70B = "nousresearch/hermes-4-70b",
MODEL_NOUSRESEARCH_HERMES_4_405B = "nousresearch/hermes-4-405b",
MODEL_GOOGLE_GEMINI_2_5_FLASH_IMAGE_PREVIEW = "google/gemini-2.5-flash-image-preview",
MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE = "deepseek/deepseek-chat-v3.1:free",
MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_1 = "deepseek/deepseek-chat-v3.1",
MODEL_DEEPSEEK_DEEPSEEK_V3_1_BASE = "deepseek/deepseek-v3.1-base",
MODEL_OPENAI_GPT_4O_AUDIO_PREVIEW = "openai/gpt-4o-audio-preview",
MODEL_MISTRALAI_MISTRAL_MEDIUM_3_1 = "mistralai/mistral-medium-3.1",
MODEL_BAIDU_ERNIE_4_5_21B_A3B = "baidu/ernie-4.5-21b-a3b",
MODEL_BAIDU_ERNIE_4_5_VL_28B_A3B = "baidu/ernie-4.5-vl-28b-a3b",
MODEL_Z_AI_GLM_4_5V = "z-ai/glm-4.5v",
MODEL_AI21_JAMBA_MINI_1_7 = "ai21/jamba-mini-1.7",
MODEL_AI21_JAMBA_LARGE_1_7 = "ai21/jamba-large-1.7",
MODEL_OPENAI_GPT_5_CHAT = "openai/gpt-5-chat",
MODEL_OPENAI_GPT_5 = "openai/gpt-5",
MODEL_OPENAI_GPT_5_MINI = "openai/gpt-5-mini",
MODEL_OPENAI_GPT_5_NANO = "openai/gpt-5-nano",
MODEL_OPENAI_GPT_OSS_120B_FREE = "openai/gpt-oss-120b:free",
MODEL_OPENAI_GPT_OSS_120B = "openai/gpt-oss-120b",
MODEL_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",
MODEL_OPENAI_GPT_OSS_20B = "openai/gpt-oss-20b",
MODEL_ANTHROPIC_CLAUDE_OPUS_4_1 = "anthropic/claude-opus-4.1",
MODEL_MISTRALAI_CODESTRAL_2508 = "mistralai/codestral-2508",
MODEL_QWEN_QWEN3_CODER_30B_A3B_INSTRUCT = "qwen/qwen3-coder-30b-a3b-instruct",
MODEL_QWEN_QWEN3_30B_A3B_INSTRUCT_2507 = "qwen/qwen3-30b-a3b-instruct-2507",
MODEL_Z_AI_GLM_4_5 = "z-ai/glm-4.5",
MODEL_Z_AI_GLM_4_5_AIR_FREE = "z-ai/glm-4.5-air:free",
MODEL_Z_AI_GLM_4_5_AIR = "z-ai/glm-4.5-air",
MODEL_QWEN_QWEN3_235B_A22B_THINKING_2507 = "qwen/qwen3-235b-a22b-thinking-2507",
MODEL_Z_AI_GLM_4_32B = "z-ai/glm-4-32b",
MODEL_QWEN_QWEN3_CODER_FREE = "qwen/qwen3-coder:free",
MODEL_QWEN_QWEN3_CODER = "qwen/qwen3-coder",
MODEL_BYTEDANCE_UI_TARS_1_5_7B = "bytedance/ui-tars-1.5-7b",
MODEL_GOOGLE_GEMINI_2_5_FLASH_LITE = "google/gemini-2.5-flash-lite",
@ -32,6 +52,7 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_TNGTECH_DEEPSEEK_R1T2_CHIMERA_FREE = "tngtech/deepseek-r1t2-chimera:free",
MODEL_MORPH_MORPH_V3_LARGE = "morph/morph-v3-large",
MODEL_MORPH_MORPH_V3_FAST = "morph/morph-v3-fast",
MODEL_BAIDU_ERNIE_4_5_VL_424B_A47B = "baidu/ernie-4.5-vl-424b-a47b",
MODEL_BAIDU_ERNIE_4_5_300B_A47B = "baidu/ernie-4.5-300b-a47b",
MODEL_THEDRUMMER_ANUBIS_70B_V1_1 = "thedrummer/anubis-70b-v1.1",
MODEL_INCEPTION_MERCURY = "inception/mercury",
@ -42,6 +63,7 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_GOOGLE_GEMINI_2_5_FLASH = "google/gemini-2.5-flash",
MODEL_GOOGLE_GEMINI_2_5_PRO = "google/gemini-2.5-pro",
MODEL_MOONSHOTAI_KIMI_DEV_72B_FREE = "moonshotai/kimi-dev-72b:free",
MODEL_MOONSHOTAI_KIMI_DEV_72B = "moonshotai/kimi-dev-72b",
MODEL_OPENAI_O3_PRO = "openai/o3-pro",
MODEL_X_AI_GROK_3_MINI = "x-ai/grok-3-mini",
MODEL_X_AI_GROK_3 = "x-ai/grok-3",
@ -49,13 +71,10 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_MISTRALAI_MAGISTRAL_MEDIUM_2506 = "mistralai/magistral-medium-2506",
MODEL_MISTRALAI_MAGISTRAL_MEDIUM_2506_THINKING = "mistralai/magistral-medium-2506:thinking",
MODEL_GOOGLE_GEMINI_2_5_PRO_PREVIEW = "google/gemini-2.5-pro-preview",
MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_7B = "deepseek/deepseek-r1-distill-qwen-7b",
MODEL_DEEPSEEK_DEEPSEEK_R1_0528_QWEN3_8B_FREE = "deepseek/deepseek-r1-0528-qwen3-8b:free",
MODEL_DEEPSEEK_DEEPSEEK_R1_0528_QWEN3_8B = "deepseek/deepseek-r1-0528-qwen3-8b",
MODEL_DEEPSEEK_DEEPSEEK_R1_0528_FREE = "deepseek/deepseek-r1-0528:free",
MODEL_DEEPSEEK_DEEPSEEK_R1_0528 = "deepseek/deepseek-r1-0528",
MODEL_SARVAMAI_SARVAM_M_FREE = "sarvamai/sarvam-m:free",
MODEL_THEDRUMMER_VALKYRIE_49B_V1 = "thedrummer/valkyrie-49b-v1",
MODEL_ANTHROPIC_CLAUDE_OPUS_4 = "anthropic/claude-opus-4",
MODEL_ANTHROPIC_CLAUDE_SONNET_4 = "anthropic/claude-sonnet-4",
MODEL_MISTRALAI_DEVSTRAL_SMALL_2505_FREE = "mistralai/devstral-small-2505:free",
@ -63,6 +82,7 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_GOOGLE_GEMMA_3N_E4B_IT_FREE = "google/gemma-3n-e4b-it:free",
MODEL_GOOGLE_GEMMA_3N_E4B_IT = "google/gemma-3n-e4b-it",
MODEL_OPENAI_CODEX_MINI = "openai/codex-mini",
MODEL_META_LLAMA_LLAMA_3_3_8B_INSTRUCT_FREE = "meta-llama/llama-3.3-8b-instruct:free",
MODEL_NOUSRESEARCH_DEEPHERMES_3_MISTRAL_24B_PREVIEW = "nousresearch/deephermes-3-mistral-24b-preview",
MODEL_MISTRALAI_MISTRAL_MEDIUM_3 = "mistralai/mistral-medium-3",
MODEL_GOOGLE_GEMINI_2_5_PRO_PREVIEW_05_06 = "google/gemini-2.5-pro-preview-05-06",
@ -73,7 +93,6 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_MICROSOFT_PHI_4_REASONING_PLUS = "microsoft/phi-4-reasoning-plus",
MODEL_INCEPTION_MERCURY_CODER = "inception/mercury-coder",
MODEL_QWEN_QWEN3_4B_FREE = "qwen/qwen3-4b:free",
MODEL_OPENGVLAB_INTERNVL3_14B = "opengvlab/internvl3-14b",
MODEL_DEEPSEEK_DEEPSEEK_PROVER_V2 = "deepseek/deepseek-prover-v2",
MODEL_META_LLAMA_LLAMA_GUARD_4_12B = "meta-llama/llama-guard-4-12b",
MODEL_QWEN_QWEN3_30B_A3B_FREE = "qwen/qwen3-30b-a3b:free",
@ -89,7 +108,7 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_TNGTECH_DEEPSEEK_R1T_CHIMERA = "tngtech/deepseek-r1t-chimera",
MODEL_MICROSOFT_MAI_DS_R1_FREE = "microsoft/mai-ds-r1:free",
MODEL_MICROSOFT_MAI_DS_R1 = "microsoft/mai-ds-r1",
MODEL_THUDM_GLM_Z1_32B_FREE = "thudm/glm-z1-32b:free",
MODEL_THUDM_GLM_Z1_32B = "thudm/glm-z1-32b",
MODEL_THUDM_GLM_4_32B = "thudm/glm-4-32b",
MODEL_OPENAI_O4_MINI_HIGH = "openai/o4-mini-high",
MODEL_OPENAI_O3 = "openai/o3",
@ -112,16 +131,15 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_NVIDIA_LLAMA_3_3_NEMOTRON_SUPER_49B_V1 = "nvidia/llama-3.3-nemotron-super-49b-v1",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1 = "nvidia/llama-3.1-nemotron-ultra-253b-v1",
MODEL_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free",
MODEL_META_LLAMA_LLAMA_4_MAVERICK = "meta-llama/llama-4-maverick",
MODEL_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free",
MODEL_META_LLAMA_LLAMA_4_SCOUT = "meta-llama/llama-4-scout",
MODEL_DEEPSEEK_DEEPSEEK_V3_BASE = "deepseek/deepseek-v3-base",
MODEL_SCB10X_LLAMA3_1_TYPHOON2_70B_INSTRUCT = "scb10x/llama3.1-typhoon2-70b-instruct",
MODEL_GOOGLE_GEMINI_2_5_PRO_EXP_03_25 = "google/gemini-2.5-pro-exp-03-25",
MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free",
MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT = "qwen/qwen2.5-vl-32b-instruct",
MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free",
MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324 = "deepseek/deepseek-chat-v3-0324",
MODEL_FEATHERLESS_QWERKY_72B_FREE = "featherless/qwerky-72b:free",
MODEL_OPENAI_O1_PRO = "openai/o1-pro",
MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free",
MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT = "mistralai/mistral-small-3.1-24b-instruct",
@ -147,7 +165,6 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_GOOGLE_GEMINI_2_0_FLASH_LITE_001 = "google/gemini-2.0-flash-lite-001",
MODEL_ANTHROPIC_CLAUDE_3_7_SONNET = "anthropic/claude-3.7-sonnet",
MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_THINKING = "anthropic/claude-3.7-sonnet:thinking",
MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_BETA = "anthropic/claude-3.7-sonnet:beta",
MODEL_PERPLEXITY_R1_1776 = "perplexity/r1-1776",
MODEL_MISTRALAI_MISTRAL_SABA = "mistralai/mistral-saba",
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free",
@ -169,7 +186,6 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_QWEN_QWEN_PLUS = "qwen/qwen-plus",
MODEL_QWEN_QWEN_MAX = "qwen/qwen-max",
MODEL_OPENAI_O3_MINI = "openai/o3-mini",
MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_1_5B = "deepseek/deepseek-r1-distill-qwen-1.5b",
MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE = "mistralai/mistral-small-24b-instruct-2501:free",
MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501 = "mistralai/mistral-small-24b-instruct-2501",
MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B = "deepseek/deepseek-r1-distill-qwen-32b",
@ -210,35 +226,31 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_RAIFLE_SORCERERLM_8X22B = "raifle/sorcererlm-8x22b",
MODEL_THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b",
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_BETA = "anthropic/claude-3.5-haiku:beta",
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku",
MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b",
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_BETA = "anthropic/claude-3.5-sonnet:beta",
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet",
MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b",
MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b",
MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b",
MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct",
MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity",
MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi",
MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b",
MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b",
MODEL_LIQUID_LFM_40B = "liquid/lfm-40b",
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct",
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free",
MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b",
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct",
MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct",
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct",
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free",
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct",
MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b",
MODEL_OPENAI_O1_MINI = "openai/o1-mini",
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
MODEL_OPENAI_O1_MINI = "openai/o1-mini",
MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b",
MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024",
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024",
MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b",
MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct",
MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct",
@ -254,20 +266,18 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct",
MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free",
MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo",
MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini",
MODEL_OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18",
MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini",
MODEL_GOOGLE_GEMMA_2_27B_IT = "google/gemma-2-27b-it",
MODEL_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free",
MODEL_GOOGLE_GEMMA_2_9B_IT = "google/gemma-2-9b-it",
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620_BETA = "anthropic/claude-3.5-sonnet-20240620:beta",
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620 = "anthropic/claude-3.5-sonnet-20240620",
MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b",
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b",
MODEL_QWEN_QWEN_2_72B_INSTRUCT = "qwen/qwen-2-72b-instruct",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct",
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct",
MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct",
MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b",
@ -280,38 +290,34 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct",
MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT = "mistralai/mixtral-8x22b-instruct",
MODEL_MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b",
MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo",
MODEL_GOOGLE_GEMINI_PRO_1_5 = "google/gemini-pro-1.5",
MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo",
MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus",
MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024",
MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b",
MODEL_COHERE_COMMAND_R = "cohere/command-r",
MODEL_COHERE_COMMAND = "cohere/command",
MODEL_ANTHROPIC_CLAUDE_3_HAIKU_BETA = "anthropic/claude-3-haiku:beta",
MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku",
MODEL_ANTHROPIC_CLAUDE_3_OPUS_BETA = "anthropic/claude-3-opus:beta",
MODEL_ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus",
MODEL_COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024",
MODEL_MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large",
MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613",
MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview",
MODEL_NOUSRESEARCH_NOUS_HERMES_2_MIXTRAL_8X7B_DPO = "nousresearch/nous-hermes-2-mixtral-8x7b-dpo",
MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613",
MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small",
MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_2 = "mistralai/mistral-7b-instruct-v0.2",
MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct",
MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b",
MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b",
MODEL_OPENROUTER_AUTO = "openrouter/auto",
MODEL_OPENAI_GPT_4_1106_PREVIEW = "openai/gpt-4-1106-preview",
MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1",
MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct",
MODEL_PYGMALIONAI_MYTHALION_13B = "pygmalionai/mythalion-13b",
MODEL_OPENAI_GPT_3_5_TURBO_16K = "openai/gpt-3.5-turbo-16k",
MODEL_MANCER_WEAVER = "mancer/weaver",
MODEL_UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b",
MODEL_GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b",
MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo",
MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314",
MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo",
MODEL_OPENAI_GPT_4 = "openai/gpt-4"
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -33,6 +33,8 @@ export interface IKBotOptions {

agentica-org/deepcoder-14b-preview | paid
agentica-org/deepcoder-14b-preview:free | free
ai21/jamba-large-1.7 | paid
ai21/jamba-mini-1.7 | paid
aion-labs/aion-1.0 | paid
aion-labs/aion-1.0-mini | paid
aion-labs/aion-rp-llama-3.1-8b | paid
@ -41,18 +43,12 @@ export interface IKBotOptions {
amazon/nova-micro-v1 | paid
amazon/nova-pro-v1 | paid
anthropic/claude-3-haiku | paid
anthropic/claude-3-haiku:beta | paid
anthropic/claude-3-opus | paid
anthropic/claude-3-opus:beta | paid
anthropic/claude-3.5-haiku | paid
anthropic/claude-3.5-haiku-20241022 | paid
anthropic/claude-3.5-haiku:beta | paid
anthropic/claude-3.5-sonnet | paid
anthropic/claude-3.5-sonnet-20240620 | paid
anthropic/claude-3.5-sonnet-20240620:beta | paid
anthropic/claude-3.5-sonnet:beta | paid
anthropic/claude-3.7-sonnet | paid
anthropic/claude-3.7-sonnet:beta | paid
anthropic/claude-3.7-sonnet:thinking | paid
anthropic/claude-opus-4 | paid
anthropic/claude-opus-4.1 | paid
@ -64,8 +60,12 @@ export interface IKBotOptions {
arliai/qwq-32b-arliai-rpr-v1 | paid
arliai/qwq-32b-arliai-rpr-v1:free | free
openrouter/auto | paid
baidu/ernie-4.5-21b-a3b | paid
baidu/ernie-4.5-300b-a47b | paid
baidu/ernie-4.5-vl-28b-a3b | paid
baidu/ernie-4.5-vl-424b-a47b | paid
bytedance/ui-tars-1.5-7b | paid
deepcogito/cogito-v2-preview-llama-109b-moe | paid
cohere/command | paid
cohere/command-a | paid
cohere/command-r | paid
@ -75,13 +75,16 @@ export interface IKBotOptions {
cohere/command-r-plus-04-2024 | paid
cohere/command-r-plus-08-2024 | paid
cohere/command-r7b-12-2024 | paid
deepcogito/cogito-v2-preview-deepseek-671b | paid
deepseek/deepseek-prover-v2 | paid
deepseek/deepseek-r1-0528-qwen3-8b | paid
deepseek/deepseek-r1-0528-qwen3-8b:free | free
deepseek/deepseek-chat | paid
deepseek/deepseek-chat-v3-0324 | paid
deepseek/deepseek-chat-v3-0324:free | free
deepseek/deepseek-v3-base | paid
deepseek/deepseek-chat-v3.1 | paid
deepseek/deepseek-chat-v3.1:free | free
deepseek/deepseek-v3.1-base | paid
deepseek/deepseek-r1 | paid
deepseek/deepseek-r1:free | free
deepseek/deepseek-r1-0528 | paid
@ -89,11 +92,9 @@ export interface IKBotOptions {
deepseek/deepseek-r1-distill-llama-70b | paid
deepseek/deepseek-r1-distill-llama-70b:free | free
deepseek/deepseek-r1-distill-llama-8b | paid
deepseek/deepseek-r1-distill-qwen-1.5b | paid
deepseek/deepseek-r1-distill-qwen-14b | paid
deepseek/deepseek-r1-distill-qwen-14b:free | free
deepseek/deepseek-r1-distill-qwen-32b | paid
deepseek/deepseek-r1-distill-qwen-7b | paid
cognitivecomputations/dolphin-mixtral-8x22b | paid
cognitivecomputations/dolphin3.0-mistral-24b | paid
cognitivecomputations/dolphin3.0-mistral-24b:free | free
@ -108,6 +109,7 @@ export interface IKBotOptions {
google/gemini-2.0-flash-exp:free | free
google/gemini-2.0-flash-lite-001 | paid
google/gemini-2.5-flash | paid
google/gemini-2.5-flash-image-preview | paid
google/gemini-2.5-flash-lite | paid
google/gemini-2.5-flash-lite-preview-06-17 | paid
google/gemini-2.5-pro | paid
@ -131,9 +133,7 @@ export interface IKBotOptions {
infermatic/mn-inferor-12b | paid
inflection/inflection-3-pi | paid
inflection/inflection-3-productivity | paid
moonshotai/kimi-dev-72b:free | free
liquid/lfm-3b | paid
liquid/lfm-40b | paid
liquid/lfm-7b | paid
meta-llama/llama-guard-3-8b | paid
anthracite-org/magnum-v2-72b | paid
@ -147,15 +147,17 @@ export interface IKBotOptions {
meta-llama/llama-3.1-70b-instruct | paid
meta-llama/llama-3.1-8b-instruct | paid
meta-llama/llama-3.2-11b-vision-instruct | paid
meta-llama/llama-3.2-11b-vision-instruct:free | free
meta-llama/llama-3.2-1b-instruct | paid
meta-llama/llama-3.2-3b-instruct | paid
meta-llama/llama-3.2-3b-instruct:free | free
meta-llama/llama-3.2-90b-vision-instruct | paid
meta-llama/llama-3.3-70b-instruct | paid
meta-llama/llama-3.3-70b-instruct:free | free
meta-llama/llama-3.3-8b-instruct:free | free
meta-llama/llama-4-maverick | paid
meta-llama/llama-4-maverick:free | free
meta-llama/llama-4-scout | paid
meta-llama/llama-4-scout:free | free
meta-llama/llama-guard-4-12b | paid
meta-llama/llama-guard-2-8b | paid
microsoft/mai-ds-r1 | paid
@ -188,9 +190,9 @@ export interface IKBotOptions {
mistralai/mistral-7b-instruct | paid
mistralai/mistral-7b-instruct:free | free
mistralai/mistral-7b-instruct-v0.1 | paid
mistralai/mistral-7b-instruct-v0.2 | paid
mistralai/mistral-7b-instruct-v0.3 | paid
mistralai/mistral-medium-3 | paid
mistralai/mistral-medium-3.1 | paid
mistralai/mistral-nemo | paid
mistralai/mistral-nemo:free | free
mistralai/mistral-small-24b-instruct-2501 | paid
@ -204,10 +206,12 @@ export interface IKBotOptions {
mistralai/pixtral-12b | paid
mistralai/pixtral-large-2411 | paid
mistralai/mistral-saba | paid
moonshotai/kimi-vl-a3b-thinking | paid
moonshotai/kimi-vl-a3b-thinking:free | free
moonshotai/kimi-dev-72b | paid
moonshotai/kimi-dev-72b:free | free
moonshotai/kimi-k2 | paid
moonshotai/kimi-k2:free | free
moonshotai/kimi-vl-a3b-thinking | paid
moonshotai/kimi-vl-a3b-thinking:free | free
morph/morph-v3-fast | paid
morph/morph-v3-large | paid
gryphe/mythomax-l2-13b | paid
@ -216,9 +220,10 @@ export interface IKBotOptions {
neversleep/noromaid-20b | paid
nousresearch/deephermes-3-llama-3-8b-preview:free | free
nousresearch/deephermes-3-mistral-24b-preview | paid
nousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid
nousresearch/hermes-3-llama-3.1-405b | paid
nousresearch/hermes-3-llama-3.1-70b | paid
nousresearch/hermes-4-405b | paid
nousresearch/hermes-4-70b | paid
nousresearch/hermes-2-pro-llama-3-8b | paid
nvidia/llama-3.1-nemotron-70b-instruct | paid
nvidia/llama-3.1-nemotron-ultra-253b-v1 | paid
@ -243,6 +248,7 @@ export interface IKBotOptions {
openai/gpt-4o-2024-08-06 | paid
openai/gpt-4o-2024-11-20 | paid
openai/gpt-4o:extended | paid
openai/gpt-4o-audio-preview | paid
openai/gpt-4o-search-preview | paid
openai/gpt-4o-mini | paid
openai/gpt-4o-mini-2024-07-18 | paid
@ -252,6 +258,7 @@ export interface IKBotOptions {
openai/gpt-5-mini | paid
openai/gpt-5-nano | paid
openai/gpt-oss-120b | paid
openai/gpt-oss-120b:free | free
openai/gpt-oss-20b | paid
openai/gpt-oss-20b:free | free
openai/o1 | paid
@ -264,7 +271,6 @@ export interface IKBotOptions {
openai/o3-pro | paid
openai/o4-mini | paid
openai/o4-mini-high | paid
opengvlab/internvl3-14b | paid
perplexity/r1-1776 | paid
perplexity/sonar | paid
perplexity/sonar-deep-research | paid
@ -272,8 +278,6 @@ export interface IKBotOptions {
perplexity/sonar-reasoning | paid
perplexity/sonar-reasoning-pro | paid
pygmalionai/mythalion-13b | paid
featherless/qwerky-72b:free | free
qwen/qwen-2-72b-instruct | paid
qwen/qwen-vl-max | paid
qwen/qwen-vl-plus | paid
qwen/qwen-max | paid
@ -293,11 +297,14 @@ export interface IKBotOptions {
qwen/qwen3-30b-a3b | paid
qwen/qwen3-30b-a3b:free | free
qwen/qwen3-30b-a3b-instruct-2507 | paid
qwen/qwen3-30b-a3b-thinking-2507 | paid
qwen/qwen3-32b | paid
qwen/qwen3-4b:free | free
qwen/qwen3-8b | paid
qwen/qwen3-8b:free | free
qwen/qwen3-coder-30b-a3b-instruct | paid
qwen/qwen3-coder | paid
qwen/qwen3-coder:free | free
qwen/qwq-32b | paid
qwen/qwq-32b:free | free
qwen/qwq-32b-preview | paid
@ -312,7 +319,6 @@ export interface IKBotOptions {
sao10k/l3-euryale-70b | paid
sao10k/l3.1-euryale-70b | paid
sao10k/l3.3-euryale-70b | paid
sarvamai/sarvam-m:free | free
shisa-ai/shisa-v2-llama3.3-70b | paid
shisa-ai/shisa-v2-llama3.3-70b:free | free
raifle/sorcererlm-8x22b | paid
@ -324,14 +330,12 @@ export interface IKBotOptions {
thedrummer/rocinante-12b | paid
thedrummer/skyfall-36b-v2 | paid
thedrummer/unslopnemo-12b | paid
thedrummer/valkyrie-49b-v1 | paid
thudm/glm-4-32b | paid
thudm/glm-4.1v-9b-thinking | paid
thudm/glm-z1-32b:free | free
thudm/glm-z1-32b | paid
tngtech/deepseek-r1t-chimera | paid
tngtech/deepseek-r1t-chimera:free | free
tngtech/deepseek-r1t2-chimera:free | free
scb10x/llama3.1-typhoon2-70b-instruct | paid
cognitivecomputations/dolphin-mistral-24b-venice-edition:free | free
microsoft/wizardlm-2-8x22b | paid
x-ai/grok-2-1212 | paid
@ -341,11 +345,13 @@ export interface IKBotOptions {
x-ai/grok-3-mini | paid
x-ai/grok-3-mini-beta | paid
x-ai/grok-4 | paid
x-ai/grok-code-fast-1 | paid
x-ai/grok-vision-beta | paid
z-ai/glm-4-32b | paid
z-ai/glm-4.5 | paid
z-ai/glm-4.5-air | paid
z-ai/glm-4.5-air:free | free
z-ai/glm-4.5v | paid

 OpenAI models:

@ -406,7 +412,11 @@ export interface IKBotOptions {
gpt-5-mini-2025-08-07
gpt-5-nano
gpt-5-nano-2025-08-07
gpt-audio
gpt-audio-2025-08-28
gpt-image-1
gpt-realtime
gpt-realtime-2025-08-28
o1
o1-2024-12-17
o1-mini

230470
packages/kbot/dist/main_node.cjs vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,12 +1,12 @@
{
"name": "@plastichub/kbot",
"version": "1.1.49",
"version": "1.1.50",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@plastichub/kbot",
"version": "1.1.49",
"version": "1.1.50",
"license": "ISC",
"dependencies": {
"node-emoji": "^2.2.0"

View File

@ -1,6 +1,6 @@
{
"name": "@plastichub/kbot",
"version": "1.1.49",
"version": "1.1.50",
"main": "main_node.js",
"author": "",
"license": "ISC",

File diff suppressed because it is too large Load Diff

View File

@ -26,6 +26,7 @@
"register-commands": "pm-cli register-commands --config=salamand.json --group=kbot",
"test": "vitest run",
"test:basic": "vitest run tests/unit/basic.test.ts",
"test:transcribe": "vitest run tests/unit/transcribe/transcribe.test.ts",
"test:math": "vitest run tests/unit/math.test.ts",
"test:format": "vitest run tests/unit/format.test.ts",
"test:options-glob": "vitest run tests/unit/options-glob.test.ts",
@ -118,6 +119,7 @@
"@vitest/coverage-v8": "^2.1.8",
"@vitest/ui": "2.1.9",
"eslint": "^8.57.1",
"nexe": "5.0.0-beta.4",
"rimraf": "6.0.1",
"ts-json-schema-generator": "^2.3.0",
"ts-loader": "9.5.1",

View File

@ -4,5 +4,11 @@
"command": "kbot-d",
"args": "modify --prompt=\"&{POLYMECH-ROOT}/nordin-exe/templates/product-description.md\" --alt=true --logLevel=3 --include=\"$(FullName)\" --dst=\"&{SRC_DIR}/&{SRC_NAME}_description.md\" --mode=completion",
"description": "Generate product description"
},
"transcribe": {
"name": "Transcribe Audio",
"command": "kbot",
"args": "transcribe --include=\"$(FullName)\" --dst=\"&{SRC_DIR}/&{SRC_NAME}.md\"",
"description": "Transcribe audio file"
}
}

View File

@ -0,0 +1,94 @@
// nexe.js - Compile pm-media to Windows executable using nexe Node.js API
import { compile } from 'nexe';
import path from 'path';
import fs from 'fs';
async function buildExecutable() {
const outputDir = './dist/win-64';
const outputFile = 'kbot.exe';
const entryPoint = './dist/main_node.cjs';
const nexeTemp = '../../nexe-24';
const nodeVersion = '24.5.0';
// Ensure output directory exists
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir, { recursive: true });
console.log(`📁 Created output directory: ${outputDir}`);
}
// Ensure nexe temp directory exists
if (!fs.existsSync(nexeTemp)) {
fs.mkdirSync(nexeTemp, { recursive: true });
console.log(`📁 Created temp directory: ${nexeTemp}`);
}
// Check if entry point exists
if (!fs.existsSync(entryPoint)) {
console.log(`❌ Entry point ${entryPoint} not found. Please run 'npm run build' first.`);
process.exit(1);
}
const outputPath = path.join(outputDir, outputFile);
console.log('📦 Compiling with nexe...');
console.log(` Entry: ${entryPoint}`);
console.log(` Output: ${outputPath}`);
console.log(` Temp: ${nexeTemp}`);
console.log(` Target: windows-x64-${nodeVersion}`);
try {
await compile({
input: entryPoint,
output: outputPath,
target: `windows-x64-${nodeVersion}`,
build: true, // Build from source for native modules like sharp
temp: nexeTemp,
clean: false,
name: 'pm-media',
configure: ['--with-intl=full-icu'], // Full ICU support
make: ['-j4'], // Parallel build
loglevel: 'verbose',
// Resources - include any additional files if needed
resources: [
// Add any resource patterns here if needed
// './assets/**/*'
],
patches: [
// Patch for better native module support
async (compiler, next) => {
// This patch helps with native modules like sharp
await compiler.replaceInFileAsync(
'lib/internal/bootstrap/pre_execution.js',
'process.dlopen = function(',
`
// Nexe patch for native modules
const originalDlopen = process.dlopen;
process.dlopen = function(`
);
return next();
}
]
});
console.log(`✅ Successfully compiled to ${outputPath}`);
// Show file size
if (fs.existsSync(outputPath)) {
const stats = fs.statSync(outputPath);
const fileSizeInMB = (stats.size / (1024 * 1024)).toFixed(2);
console.log(`📊 Executable size: ${fileSizeInMB} MB`);
}
console.log('🎉 Build complete!');
} catch (error) {
console.error('❌ Compilation failed:', error.message);
if (error.stack) {
console.error(error.stack);
}
process.exit(1);
}
}
// Run the build
buildExecutable().catch(console.error);

View File

@ -0,0 +1,5 @@
npm run webpack
cp ./dist/main_node.js ./dist/main_node.cjs
node scripts/nexe.js
#mkdir -p dist/win-64/dist
#cp dist/win-64/*.wasm dist/win-64/dist/

View File

@ -0,0 +1,137 @@
import * as path from 'node:path'
import { isString, isArray } from '@polymech/core/primitives'
import { hasMagic } from 'glob'
import { sync as exists } from '@polymech/fs/exists'
import { forward_slash, resolve, pathInfoEx } from '@polymech/commons'
import { IKBotTask } from '@polymech/ai-tools'
import { OptionsSchema } from '../zod_schema.js'
import { transcribe } from '../lib/transcribe.js'
import { isWebUrl } from '../glob.js'
import { default_sort } from './run.js'
import { getLogger } from '../index.js'
import { variables } from '../variables.js'
export const TranscribeOptionsSchema = () => {
return OptionsSchema().pick({
include: true,
dst: true,
api_key: true,
model: true,
router: true,
logLevel: true,
config: true,
baseURL: true,
alt: true,
variables: true,
profile: true,
env: true,
}).passthrough()
}
function extractPaths(input: string): string[] {
if (isWebUrl(input)) {
return [input]
}
if (exists(path.resolve(resolve(input)))) {
return [input]
}
if (hasMagic(input)) {
return [input]
}
const pathStartRegex = /([A-Za-z]:\\)|\/|(https?:\/\/)/g;
const matchIndices: number[] = [];
let match: RegExpExecArray | null;
while ((match = pathStartRegex.exec(input)) !== null) {
matchIndices.push(match.index);
}
if (!matchIndices.length) {
return [];
}
const paths: string[] = [];
for (let i = 0; i < matchIndices.length; i++) {
const start = matchIndices[i];
const end = i < matchIndices.length - 1 ? matchIndices[i + 1] : input.length;
const pathStr = input.substring(start, end).trim();
if (pathStr) {
paths.push(pathStr);
}
}
return paths;
}
function flattenArrays<T>(arrays: T[][]): T[] {
return arrays.reduce((accumulator, current) => {
return accumulator.concat(current);
}, [] as T[]);
}
export const transcribeCommand = async (opts: IKBotTask) => {
opts.logger = getLogger(opts)
if (opts.include) {
if (isString(opts.include)) {
opts.include = [opts.include]
}
if (isArray(opts.include)) {
const specialPatterns = opts.include.filter((p: string) => hasMagic(p) || isWebUrl(p))
const normalPaths = opts.include.filter((p: string) => !hasMagic(p) && !isWebUrl(p))
const processedPaths = flattenArrays(normalPaths.map(extractPaths))
opts.include = [...specialPatterns, ...processedPaths]
}
} else {
opts.include = []
}
let files: string[] = []
for (const includePath of opts.include) {
if (hasMagic(includePath)) {
const info = pathInfoEx(forward_slash(path.resolve(resolve(includePath))), false, {
absolute: true,
})
files.push(...default_sort(info.FILES))
} else if (exists(includePath)) {
files.push(includePath)
}
}
if (files.length === 0) {
opts.logger.warn(`No files found for --include patterns: ${opts.include.join(', ')}`)
return
}
opts.logger.info(`Found ${files.length} files to transcribe.`)
for (const file of files) {
const fileInfo = path.parse(file)
const CWD = process.cwd()
const current_variables = {
...variables(opts),
...opts.variables,
SRC_PATH: file,
SRC_NAME: fileInfo.name,
SRC_EXT: fileInfo.ext,
SRC_DIR: fileInfo.dir,
CWD: CWD
}
const itemOpts: IKBotTask = {
...opts,
include: [file],
variables: current_variables
};
if (!itemOpts.dst) {
itemOpts.dst = '${SRC_DIR}/${SRC_NAME}.md';
}
itemOpts.dst = path.resolve(resolve(itemOpts.dst, itemOpts.alt, itemOpts.variables))
opts.logger.info(`Transcribing ${file}...`)
if(itemOpts.dst) {
opts.logger.info(`Output will be saved to ${itemOpts.dst}`)
}
await transcribe(itemOpts)
}
}

View File

@ -0,0 +1,242 @@
import * as path from 'path'
import * as fs from 'fs'
import { OpenAI, toFile } from "openai"
import { isString, isObject } from '@plastichub/core/primitives'
import { sync as write } from '@plastichub/fs/write'
import { sync as read } from '@plastichub/fs/read'
import { sync as exists } from '@plastichub/fs/exists'
import { resolve } from '@plastichub/osr-commons'
import { dumpAsScript, IOptions, logger, toImages } from '../..'
import { Typescript, Commons, Markdown, Documents, Rust } from './system'
import { Filters } from '../filters'
import { parse } from './options'
import { web_prompt as eprompt } from '../ui/electron'
import { Filter } from './types'
import { deepmerge as merge } from 'deepmerge-ts'
import { Tool, ToolInterface } from './tool'
export const deepMerge = async (target: string, source: any) => {
if (!isString(target) || !source) {
logger.error(`Invalid deepmerge parameters:`, target, source)
return source
}
target = read(target, 'json') as any || []
try {
source = isString(source) ? JSON.parse(source) : source
} catch (e) {
logger.error('Error parsing completion:', e)
return source
}
try {
const ret = merge(target, source)
return JSON.stringify(ret, null, 2)
} catch (error) {
logger.error('Error merging completion:', error)
}
return target;
}
export const mergers = { deepMerge }
export const onCompletion = async (query: string, ret: string, opts: IOptions) => {
if (!isString(ret)) {
logger.warn(`Invalid response :${query}`)
return
}
const filters = (opts.filters as string).split(',')
opts.filters = []
filters.forEach((f) => {
if (Filters[f]) {
((opts.filters) as string[]).push(Filters[f])
}
})
if (opts.filters) {
(opts.filters as Filter[]).forEach((f) => { ret = f(ret) })
}
if (opts.append && mergers[opts.append] && opts.dst) {
ret = await mergers[opts.append](opts.dst, ret)
}
if (opts.dst) {
let header = `${opts.showPrompt ? `// ${opts.query}` : ''}\n`
let content = `${header}${ret}`
write(opts.dst, content)
}else{
process.stdout.write(ret)
}
return ret
}
export const createOpenAIFile = async (client: OpenAI, filePath: string, purpose: string = 'assistants') => {
return client.files.create({
file: fs.createReadStream(filePath),
purpose: purpose as any
})
}
export const queryEx = async (api_key: string, options: IOptions) => {
let ui_opts = null
let ui_opts_variables = {}
if (options.gui === 'electron') {
const promptsFile = path.resolve(resolve(options.prompts))
const prompts = JSON.stringify(read(promptsFile, 'json') || [])
ui_opts = await eprompt(options.query, options.dst, { ...ui_opts_variables, PROMPTS: prompts, MODELS: [] })
}
options = parse(options)
if (ui_opts) {
if (ui_opts.files && ui_opts.files.length > 0) {
options.files = ui_opts.files
}
if (ui_opts.textAreaValue !== options.query) {
options.query = ui_opts.textAreaValue
}
if (ui_opts.target.length && ui_opts.target !== options.dst) {
options.dst = ui_opts.target
}
}
const client = new OpenAI({ apiKey: api_key })
let messages: any[] = []
let defaults = [...Typescript(), ...Documents()]
if (options.system && exists(options.system)) {
options.debug && logger.debug('Reading system instructions from', options.system)
try {
const system = read(options.system, 'json')
if (system) {
messages = [...system as []]
}
} catch (error) {
logger.error('Error reading system instructions', error)
messages = defaults
}
} else {
messages = defaults
}
const attachments = await Promise.all((options.files as string[]).map(async (file: string) => {
const file_id = await createOpenAIFile(client, file)
return {
file_id: file_id.id,
tools: [{ type: "file_search" }]
}
}))
const assistant = await client.beta.assistants.create({
name: "Documents Assistant",
instructions: "You are an expert data analyst.",
model: "gpt-4o",
tools: [{ type: "file_search" }],
})
const thread = await client.beta.threads.create({
messages: [
{
role: "user",
content: options.query,
attachments
} as any
]
})
return new Promise((resolve, reject) => {
try {
const stream = client.beta.threads.runs
.stream(thread.id, {
assistant_id: assistant.id,
})
.on("textCreated", () => console.log("assistant >"))
.on("toolCallCreated", (event) => console.log("assistant " + event.type))
.on("messageDone", async (event) => {
if (event.content[0].type === "text") {
const { text } = event.content[0];
const { annotations } = text;
const citations: string[] = [];
let index = 0;
/*
for (let annotation of annotations) {
text.value = text.value.replace(annotation.text, "[" + index + "]");
const { file_citation } = annotation;
if (file_citation) {
const citedFile = await openai.files.retrieve(file_citation.file_id);
citations.push("[" + index + "]" + citedFile.filename);
}
index++;
}*/
logger.debug('OpenAI response:', text.value)
resolve(text.value)
}
})
return stream
} catch (error) {
reject(error)
}
})
}
export const query = async (query: string, api_key: string, dst: string, options: IOptions) => {
const client = new OpenAI({ apiKey: api_key })
let messages: any[] = []
let defaults = [...Typescript(), ...Markdown(), ...Commons(), ...Rust()]
if (options.system && exists(options.system)) {
logger.debug('Reading system instructions from', options.system)
try {
const system = read(options.system, 'json')
if (system) {
messages = [...system as []]
}
} catch (error) {
logger.error('Error reading system instructions', error)
messages = defaults
}
} else {
messages = defaults
}
const requestMessage: any = {
role: "user",
content: query
}
messages.push(requestMessage)
if (options.files && options.filesInfo.FILES) {
const images = toImages(options.filesInfo.FILES).map((image) => {
return {
role: "user",
content: [{ ...image }]
}
})
messages = [...messages, ...images]
}
const tools = [
{
type: "function",
function: {
name: "list_files",
description: "List files in a given directory.",
parameters: {
type: "object",
properties: {
order_id: {
type: "string",
description: "The directory to list files in.",
},
},
required: ["dir"],
additionalProperties: false
}
}
}
]
const completion = await client.chat.completions.create({
model: options.model || "gpt-4o",
messages: messages,
//tools: tools as any
})
if (completion.choices.length === 0) {
logger.error('OpenAI response is empty');
return
}
let ret = completion.choices[0].message.content
ret = await onCompletion(query, ret, options)
dumpAsScript(options)
return ret
}

View File

@ -0,0 +1,64 @@
import * as fs from 'fs'
import { toFile } from "openai"
import { sync as exists } from '@polymech/fs/exists'
import { sync as write } from '@polymech/fs/write'
import { IKBotTask } from '@polymech/ai-tools'
import { createClient } from '../client.js'
const createBuffer = (path: string): Buffer | null => {
try {
const buffer = fs.readFileSync(path)
return buffer;
} catch (error) {
console.error('Error creating buffer:', error);
return null;
}
}
export const transcribe = async (options: IKBotTask) => {
const client = createClient(options)
if (!client) {
options.logger.error('Failed to create client')
return
}
if (!options.include || options.include.length === 0) {
options.logger.error('No source file provided via --include')
return;
}
const sourceFile = options.include[0];
if (!exists(sourceFile)) {
options.logger.error('Source file does not exist', sourceFile)
return;
}
const file = await toFile(createBuffer(sourceFile), 'audio.mp3', { type: 'audio/mpeg' });
if (!file) {
options.logger.error('Error converting source to file')
return;
}
const completion: any = await client.audio.transcriptions.create({
model: 'whisper-1',
file: file,
response_format: (options as any).response_format || "verbose_json",
})
if (!completion) {
options.logger.error('OpenAI response is empty')
return;
}
const text_content = completion.text || '';
if (options.dst) {
write(options.dst, text_content)
} else {
process.stdout.write(text_content)
}
// options.logger.debug('OpenAI Transcribe response:', completion)
return completion
}

View File

@ -3,8 +3,8 @@ import yargs from 'yargs'
import { hideBin } from 'yargs/helpers'
import { toYargs } from '@polymech/commons'
import { createLogger } from '@polymech/log'
import { OptionsSchema, schemas, types } from './zod_schema.js'
import { OptionsSchema, schemas, types } from './zod_schema.js'
import { IKBotTask } from '@polymech/ai-tools'
import helpCommand from './commands/help.js'
@ -14,6 +14,8 @@ import { build } from './commands/build.js'
import { fetch } from './commands/fetch.js'
import { run } from './commands/run.js'
import { transcribeCommand, TranscribeOptionsSchema } from './commands/transcribe.js'
export const logger: any = createLogger('llm-tools')
const modify = async (argv: IKBotTask) => await run(argv as IKBotTask)
@ -46,6 +48,12 @@ yargs(hideBin(process.argv))
(yargs) => toYargs(yargs, OptionsSchema(), yargOptions),
modify
)
.command(
'transcribe',
'Transcribe audio files',
(yargs) => toYargs(yargs, TranscribeOptionsSchema(), yargOptions),
transcribeCommand
)
.command(
'types',
'Generate types',

View File

@ -1,18 +1,12 @@
import * as path from 'node:path'
import * as fs from 'node:fs'
import { sync as read } from '@polymech/fs/read'
// import { sync as dir } from '@polymech/fs/dir' // Moved to glob.ts if only used there
// import { createItem as toNode } from '@polymech/fs/inspect' // Moved to glob.ts
import { sync as exists } from '@polymech/fs/exists' // Still needed for vectorize
import { isFile, forward_slash, resolve as resolvePath } from '@polymech/commons' // Renamed resolve to resolvePath to avoid conflict
import { logger } from './index.js'
import { lookup } from 'mime-types'
// import { globSync } from 'glob' // Moved to glob.ts
// import { EXCLUDE_GLOB, MAX_FILE_SIZE } from './constants.js' // Moved to glob.ts
import { defaultMimeRegistry, IHandlerResult } from './mime-handlers.js'
import { ChatCompletionContentPartImage } from 'openai/resources/index.mjs'
import { IKBotTask, ICollector } from '@polymech/ai-tools'
import { IKBotTask } from '@polymech/ai-tools'
import { supported } from './commands/run-assistant.js'
import { handleWebUrl } from './http.js'
import { glob } from './glob.js' // Import glob from glob.ts

View File

@ -0,0 +1,38 @@
{
"task": "transcribe",
"language": "english",
"duration": 6.03000020980835,
"text": "1 2 3 4 5 6 7 8 9 10 11",
"segments": [
{
"id": 0,
"seek": 0,
"start": 0,
"end": 7,
"text": " 1 2 3 4 5 6 7 8 9 10 11",
"tokens": [
50364,
502,
568,
805,
1017,
1025,
1386,
1614,
1649,
1722,
1266,
2975,
50714
],
"temperature": 0,
"avg_logprob": -0.46747010946273804,
"compression_ratio": 0.7419354915618896,
"no_speech_prob": 0.13440847396850586
}
],
"usage": {
"type": "duration",
"seconds": 7
}
}

View File

@ -0,0 +1 @@
The lazy fox jumps over the cat.

Binary file not shown.

View File

@ -0,0 +1,43 @@
import { describe, it, expect, afterAll, beforeAll } from 'vitest'
import * as path from 'node:path'
import * as fs from 'node:fs'
import { sync as exists } from "@polymech/fs/exists"
import { sync as read } from "@polymech/fs/read"
import { transcribeCommand } from '../../../dist-in/commands/transcribe.js'
import { IKBotTask } from '@polymech/ai-tools'
const TEST_DATA_DIR = './tests/unit/transcribe'
const TEST_MP3 = path.join(TEST_DATA_DIR, 'test.mp3')
const TEST_TIMEOUT = 30000 // 30 seconds
describe('Transcribe Command', () => {
const defaultOutputFile = path.resolve(path.join(TEST_DATA_DIR, 'test.md'))
beforeAll(() => {
if (fs.existsSync(defaultOutputFile)) {
fs.unlinkSync(defaultOutputFile)
}
})
it('should transcribe an audio file and save the output to a default markdown file', async () => {
const options: IKBotTask = {
include: [TEST_MP3],
router: 'openai',
logLevel: 2,
}
await transcribeCommand(options)
expect(exists(defaultOutputFile)).toBe('file')
const result = read(defaultOutputFile, 'text') as string
expect(result).toBeDefined()
const lowerCaseResult = result.toLowerCase()
expect(lowerCaseResult).toContain("jump")
expect(lowerCaseResult).toContain("fox")
expect(lowerCaseResult).toContain("cat")
}, TEST_TIMEOUT)
})

View File

@ -0,0 +1,35 @@
{
"task": "transcribe",
"language": "english",
"duration": 5.130000114440918,
"text": "The lazy fox jumps over the cat.",
"segments": [
{
"id": 0,
"seek": 0,
"start": 0,
"end": 5,
"text": " The lazy fox jumps over the cat.",
"tokens": [
50364,
440,
14847,
21026,
16704,
670,
264,
3857,
13,
50614
],
"temperature": 0,
"avg_logprob": -0.40687674283981323,
"compression_ratio": 0.8205128312110901,
"no_speech_prob": 0.03593870624899864
}
],
"usage": {
"type": "duration",
"seconds": 6
}
}