maintainence love:)
This commit is contained in:
parent
20608319d8
commit
8909093b91
@ -10,18 +10,25 @@ export function createHandlers(options) {
|
||||
new TextHandler(),
|
||||
];
|
||||
}
|
||||
import { isFile } from '@polymech/commons';
|
||||
export async function detectAndHandle(content, options) {
|
||||
const handlers = createHandlers(options);
|
||||
// Check if content is a file path
|
||||
const contentStr = content.toString();
|
||||
if (exists(contentStr)) {
|
||||
const filePath = path.resolve(contentStr);
|
||||
for (const handler of handlers) {
|
||||
if (await handler.canHandle(filePath, true)) {
|
||||
return handler.handle(filePath, true);
|
||||
const contentStr = content.toString().trim();
|
||||
try {
|
||||
if (isFile(contentStr) && exists(contentStr)) {
|
||||
const filePath = path.resolve(contentStr);
|
||||
for (const handler of handlers) {
|
||||
if (await handler.canHandle(filePath, true)) {
|
||||
return handler.handle(filePath, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
// options.logger?.warn(`Malformed path detected. Treating as content. Error: ${e.message}`);
|
||||
// If it's not a valid path, it will throw an error, so we just continue
|
||||
console.error(`Malformed path detected. Treating as content. Error: ${e.message}`);
|
||||
}
|
||||
// Handle as raw content
|
||||
for (const handler of handlers) {
|
||||
if (await handler.canHandle(content, false)) {
|
||||
@ -30,4 +37,4 @@ export async function detectAndHandle(content, options) {
|
||||
}
|
||||
throw new Error('No suitable handler found for the input content');
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvY29tbWFuZHMvaGFuZGxlcnMvaW5kZXgudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxLQUFLLElBQUksTUFBTSxXQUFXLENBQUE7QUFFakMsT0FBTyxFQUFFLFdBQVcsRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQy9DLE9BQU8sRUFBRSxZQUFZLEVBQUUsTUFBTSxvQkFBb0IsQ0FBQTtBQUNqRCxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sb0JBQW9CLENBQUE7QUFDakQsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUlwRCxNQUFNLFVBQVUsY0FBYyxDQUFDLE9BQXFCO0lBQ2xELE9BQU87UUFDTCxJQUFJLFlBQVksQ0FBQyxPQUFPLENBQUM7UUFDekIsSUFBSSxZQUFZLENBQUMsT0FBTyxDQUFDO1FBQ3pCLElBQUksV0FBVyxFQUFFO0tBQ2xCLENBQUE7QUFDSCxDQUFDO0FBQ0QsTUFBTSxDQUFDLEtBQUssVUFBVSxlQUFlLENBQUMsT0FBd0IsRUFBRSxPQUFxQjtJQUNuRixNQUFNLFFBQVEsR0FBRyxjQUFjLENBQUMsT0FBTyxDQUFDLENBQUE7SUFDeEMsa0NBQWtDO0lBQ2xDLE1BQU0sVUFBVSxHQUFHLE9BQU8sQ0FBQyxRQUFRLEVBQUUsQ0FBQztJQUN0QyxJQUFJLE1BQU0sQ0FBQyxVQUFVLENBQUMsRUFBRSxDQUFDO1FBQ3ZCLE1BQU0sUUFBUSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsVUFBVSxDQUFDLENBQUM7UUFDMUMsS0FBSyxNQUFNLE9BQU8sSUFBSSxRQUFRLEVBQUUsQ0FBQztZQUMvQixJQUFJLE1BQU0sT0FBTyxDQUFDLFNBQVMsQ0FBQyxRQUFRLEVBQUUsSUFBSSxDQUFDLEVBQUUsQ0FBQztnQkFDNUMsT0FBTyxPQUFPLENBQUMsTUFBTSxDQUFDLFFBQVEsRUFBRSxJQUFJLENBQUMsQ0FBQztZQUN4QyxDQUFDO1FBQ0gsQ0FBQztJQUNILENBQUM7SUFFRCx3QkFBd0I7SUFDeEIsS0FBSyxNQUFNLE9BQU8sSUFBSSxRQUFRLEVBQUUsQ0FBQztRQUMvQixJQUFJLE1BQU0sT0FBTyxDQUFDLFNBQVMsQ0FBQyxPQUFPLEVBQUUsS0FBSyxDQUFDLEVBQUUsQ0FBQztZQUM1QyxPQUFPLE9BQU8sQ0FBQyxNQUFNLENBQUMsT0FBTyxFQUFFLEtBQUssQ0FBQyxDQUFDO1FBQ3hDLENBQUM7SUFDSCxDQUFDO0lBQ0QsTUFBTSxJQUFJLEtBQUssQ0FBQyxpREFBaUQsQ0FBQyxDQUFDO0FBQ3JFLENBQUMifQ==
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvY29tbWFuZHMvaGFuZGxlcnMvaW5kZXgudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxLQUFLLElBQUksTUFBTSxXQUFXLENBQUE7QUFFakMsT0FBTyxFQUFFLFdBQVcsRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQy9DLE9BQU8sRUFBRSxZQUFZLEVBQUUsTUFBTSxvQkFBb0IsQ0FBQTtBQUNqRCxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sb0JBQW9CLENBQUE7QUFDakQsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUlwRCxNQUFNLFVBQVUsY0FBYyxDQUFDLE9BQXFCO0lBQ2xELE9BQU87UUFDTCxJQUFJLFlBQVksQ0FBQyxPQUFPLENBQUM7UUFDekIsSUFBSSxZQUFZLENBQUMsT0FBTyxDQUFDO1FBQ3pCLElBQUksV0FBVyxFQUFFO0tBQ2xCLENBQUE7QUFDSCxDQUFDO0FBQ0QsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLG1CQUFtQixDQUFDO0FBRTNDLE1BQU0sQ0FBQyxLQUFLLFVBQVUsZUFBZSxDQUFDLE9BQXdCLEVBQUUsT0FBcUI7SUFDbkYsTUFBTSxRQUFRLEdBQUcsY0FBYyxDQUFDLE9BQU8sQ0FBQyxDQUFBO0lBQ3hDLE1BQU0sVUFBVSxHQUFHLE9BQU8sQ0FBQyxRQUFRLEVBQUUsQ0FBQyxJQUFJLEVBQUUsQ0FBQztJQUM3QyxJQUFJLENBQUM7UUFDSCxJQUFJLE1BQU0sQ0FBQyxVQUFVLENBQUMsSUFBSSxNQUFNLENBQUMsVUFBVSxDQUFDLEVBQUUsQ0FBQztZQUM3QyxNQUFNLFFBQVEsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLFVBQVUsQ0FBQyxDQUFDO1lBQzFDLEtBQUssTUFBTSxPQUFPLElBQUksUUFBUSxFQUFFLENBQUM7Z0JBQy9CLElBQUksTUFBTSxPQUFPLENBQUMsU0FBUyxDQUFDLFFBQVEsRUFBRSxJQUFJLENBQUMsRUFBRSxDQUFDO29CQUM1QyxPQUFPLE9BQU8sQ0FBQyxNQUFNLENBQUMsUUFBUSxFQUFFLElBQUksQ0FBQyxDQUFDO2dCQUN4QyxDQUFDO1lBQ0gsQ0FBQztRQUNILENBQUM7SUFDSCxDQUFDO0lBQUMsT0FBTyxDQUFDLEVBQUUsQ0FBQztRQUNYLDZGQUE2RjtRQUM3Rix3RUFBd0U7UUFDeEUsT0FBTyxDQUFDLEtBQUssQ0FBQyx3REFBd0QsQ0FBQyxDQUFDLE9BQU8sRUFBRSxDQUFDLENBQUM7SUFDckYsQ0FBQztJQUVELHdCQUF3QjtJQUN4QixLQUFLLE1BQU0sT0FBTyxJQUFJLFFBQVEsRUFBRSxDQUFDO1FBQy9CLElBQUksTUFBTSxPQUFPLENBQUMsU0FBUyxDQUFDLE9BQU8sRUFBRSxLQUFLLENBQUMsRUFBRSxDQUFDO1lBQzVDLE9BQU8sT0FBTyxDQUFDLE1BQU0sQ0FBQyxPQUFPLEVBQUUsS0FBSyxDQUFDLENBQUM7UUFDeEMsQ0FBQztJQUNILENBQUM7SUFFRCxNQUFNLElBQUksS0FBSyxDQUFDLGlEQUFpRCxDQUFDLENBQUM7QUFDckUsQ0FBQyJ9
|
||||
@ -19,6 +19,7 @@ export const onCompletion = async (result = "", options) => {
|
||||
writeOrAppend(dstPath, result, options.append);
|
||||
const action = options.append ? `${options.append} content to` : 'Wrote completion result to';
|
||||
options.logger?.debug(`${action} ${dstPath} : ${options.dst}`);
|
||||
console.log(`wrote ${action} ${dstPath} : ${options.dst}`);
|
||||
}
|
||||
else {
|
||||
marked.use(markedTerminal({
|
||||
@ -48,4 +49,4 @@ export const runCompletion = async (client, params, options) => {
|
||||
result = await onCompletion(result, options);
|
||||
return result;
|
||||
};
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicnVuLWNvbXBsZXRpb24uanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvY29tbWFuZHMvcnVuLWNvbXBsZXRpb24udHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQ0EsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLFFBQVEsQ0FBQTtBQUMvQixPQUFPLEVBQUUsY0FBYyxFQUFFLE1BQU0saUJBQWlCLENBQUE7QUFDaEQsT0FBTyxLQUFLLElBQUksTUFBTSxXQUFXLENBQUE7QUFDakMsT0FBTyxFQUFFLE9BQU8sRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBRzNDLE9BQU8sRUFBRSxZQUFZLEVBQUUsTUFBTSxvQkFBb0IsQ0FBQTtBQUNqRCxPQUFPLEVBQUUsWUFBWSxFQUFVLE1BQU0sZUFBZSxDQUFBO0FBQ3BELE9BQU8sRUFBRSxTQUFTLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQTtBQUMzQyxPQUFPLEVBQUUsYUFBYSxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFHakQsTUFBTSxDQUFDLE1BQU0sWUFBWSxHQUFHLEtBQUssRUFBRSxTQUFjLEVBQUUsRUFBRSxPQUFrQixFQUFFLEVBQUU7SUFDekUsTUFBTSxHQUFHLFlBQVksQ0FBQyxNQUFNLEVBQUUsT0FBTyxDQUFDLE9BQW1CLElBQUksRUFBRSxDQUFDLENBQUE7SUFDaEUsSUFBSSxJQUFJLEdBQUcsU0FBUyxDQUFDLE9BQU8sQ0FBQyxDQUFBO0lBQzdCLElBQUksT0FBTyxDQUFDLEdBQUcsRUFBRSxDQUFDO1FBQ2hCLElBQUksR0FBRztZQUNMLEdBQUcsSUFBSTtZQUNQLEtBQUssRUFBRSxPQUFPLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxLQUFLLENBQUMsQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLGVBQWU7WUFDdkUsTUFBTSxFQUFFLE9BQU8sQ0FBQyxNQUFNLElBQUksZ0JBQWdCO1NBQzNDLENBQUE7UUFDRCxNQUFNLE9BQU8sR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUMsR0FBRyxFQUFFLEtBQUssRUFBRSxJQUFJLENBQUMsQ0FBQyxDQUFBO1FBQy9ELGFBQWEsQ0FBQyxPQUFPLEVBQUUsTUFBTSxFQUFFLE9BQU8sQ0FBQyxNQUEwQixDQUFDLENBQUE7UUFDbEUsTUFBTSxNQUFNLEdBQUcsT0FBTyxDQUFDLE1BQU0sQ0FBQyxDQUFDLENBQUMsR0FBRyxPQUFPLENBQUMsTUFBTSxhQUFhLENBQUMsQ0FBQyxDQUFDLDRCQUE0QixDQUFBO1FBQzdGLE9BQU8sQ0FBQyxNQUFNLEVBQUUsS0FBSyxDQUFDLEdBQUcsTUFBTSxJQUFJLE9BQU8sTUFBTSxPQUFPLENBQUMsR0FBRyxFQUFFLENBQUMsQ0FBQTtJQUNoRSxDQUFDO1NBQU0sQ0FBQztRQUNOLE1BQU0sQ0FBQyxHQUFHLENBQUMsY0FBYyxDQUFDO1lBQ3hCLEtBQUssRUFBRSxLQUFLO1NBQ2IsQ0FBQyxDQUFDLENBQUE7UUFDSCxNQUFNLE9BQU8sR0FBVyxNQUFNLENBQUMsTUFBTSxDQUFXLENBQUM7UUFDakQsT0FBTyxDQUFDLE1BQU0sQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLENBQUE7SUFDL0IsQ0FBQztJQUNELFlBQVksQ0FBQyxPQUFPLENBQUMsQ0FBQTtJQUNyQixrQkFBa0I7SUFDbEIsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUE7QUFFRCxNQUFNLENBQUMsTUFBTSxhQUFhLEdBQUcsS0FBSyxFQUFFLE1BQWMsRUFBRSxNQUFXLEVBQUUsT0FBa0IsRUFBRSxFQUFFO0lBQ3JGLElBQUksT0FBTyxDQUFDLEdBQUcsRUFBRSxDQUFDO1FBQ2hCLE9BQU8sQ0FBQyxNQUFNLENBQUMsSUFBSSxDQUFDLDZCQUE2QixDQUFDLENBQUE7UUFDbEQsT0FBTyxLQUFLLENBQUE7SUFDZCxDQUFDO0lBQ0QsTUFBTSxVQUFVLEdBQUcsTUFBTSxNQUFNLENBQUMsSUFBSSxDQUFDLFdBQVcsQ0FBQyxNQUFNLENBQUM7UUFDdEQsS0FBSyxFQUFFLE9BQU8sQ0FBQyxLQUFLO1FBQ3BCLFFBQVEsRUFBRSxNQUFNLENBQUMsUUFBUTtRQUN6QixlQUFlLEVBQUUsT0FBTyxDQUFDLE1BQWE7S0FDdkMsQ0FBQyxDQUFBO0lBQ0YsSUFBSSxDQUFDLFVBQVUsSUFBSSxDQUFDLFVBQVUsQ0FBQyxPQUFPLElBQUksQ0FBQyxVQUFVLENBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxJQUFJLENBQUMsVUFBVSxDQUFDLE9BQU8sQ0FBQyxDQUFDLENBQUMsQ0FBQyxPQUFPLElBQUksQ0FBQyxVQUFVLENBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxDQUFDLE9BQU8sQ0FBQyxPQUFPLEVBQUUsQ0FBQztRQUM3SSxPQUFPLEVBQUUsQ0FBQTtJQUNYLENBQUM7SUFDRCxJQUFJLE1BQU0sR0FBRyxVQUFVLENBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUE7SUFDbEQsTUFBTSxHQUFHLE1BQU0sWUFBWSxDQUFDLE1BQU0sRUFBRSxPQUFPLENBQUMsQ0FBQTtJQUM1QyxPQUFPLE1BQU0sQ0FBQTtBQUNmLENBQUMsQ0FBQSJ9
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicnVuLWNvbXBsZXRpb24uanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvY29tbWFuZHMvcnVuLWNvbXBsZXRpb24udHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQ0EsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLFFBQVEsQ0FBQTtBQUMvQixPQUFPLEVBQUUsY0FBYyxFQUFFLE1BQU0saUJBQWlCLENBQUE7QUFDaEQsT0FBTyxLQUFLLElBQUksTUFBTSxXQUFXLENBQUE7QUFDakMsT0FBTyxFQUFFLE9BQU8sRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBRzNDLE9BQU8sRUFBRSxZQUFZLEVBQUUsTUFBTSxvQkFBb0IsQ0FBQTtBQUNqRCxPQUFPLEVBQUUsWUFBWSxFQUFVLE1BQU0sZUFBZSxDQUFBO0FBQ3BELE9BQU8sRUFBRSxTQUFTLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQTtBQUMzQyxPQUFPLEVBQUUsYUFBYSxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFHakQsTUFBTSxDQUFDLE1BQU0sWUFBWSxHQUFHLEtBQUssRUFBRSxTQUFjLEVBQUUsRUFBRSxPQUFrQixFQUFFLEVBQUU7SUFDekUsTUFBTSxHQUFHLFlBQVksQ0FBQyxNQUFNLEVBQUUsT0FBTyxDQUFDLE9BQW1CLElBQUksRUFBRSxDQUFDLENBQUE7SUFDaEUsSUFBSSxJQUFJLEdBQUcsU0FBUyxDQUFDLE9BQU8sQ0FBQyxDQUFBO0lBQzdCLElBQUksT0FBTyxDQUFDLEdBQUcsRUFBRSxDQUFDO1FBQ2hCLElBQUksR0FBRztZQUNMLEdBQUcsSUFBSTtZQUNQLEtBQUssRUFBRSxPQUFPLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxLQUFLLENBQUMsQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLGVBQWU7WUFDdkUsTUFBTSxFQUFFLE9BQU8sQ0FBQyxNQUFNLElBQUksZ0JBQWdCO1NBQzNDLENBQUE7UUFDRCxNQUFNLE9BQU8sR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUMsR0FBRyxFQUFFLEtBQUssRUFBRSxJQUFJLENBQUMsQ0FBQyxDQUFBO1FBQy9ELGFBQWEsQ0FBQyxPQUFPLEVBQUUsTUFBTSxFQUFFLE9BQU8sQ0FBQyxNQUEwQixDQUFDLENBQUE7UUFDbEUsTUFBTSxNQUFNLEdBQUcsT0FBTyxDQUFDLE1BQU0sQ0FBQyxDQUFDLENBQUMsR0FBRyxPQUFPLENBQUMsTUFBTSxhQUFhLENBQUMsQ0FBQyxDQUFDLDRCQUE0QixDQUFBO1FBQzdGLE9BQU8sQ0FBQyxNQUFNLEVBQUUsS0FBSyxDQUFDLEdBQUcsTUFBTSxJQUFJLE9BQU8sTUFBTSxPQUFPLENBQUMsR0FBRyxFQUFFLENBQUMsQ0FBQTtRQUM5RCxPQUFPLENBQUMsR0FBRyxDQUFDLFNBQVMsTUFBTSxJQUFJLE9BQU8sTUFBTSxPQUFPLENBQUMsR0FBRyxFQUFFLENBQUMsQ0FBQTtJQUM1RCxDQUFDO1NBQU0sQ0FBQztRQUNOLE1BQU0sQ0FBQyxHQUFHLENBQUMsY0FBYyxDQUFDO1lBQ3hCLEtBQUssRUFBRSxLQUFLO1NBQ2IsQ0FBQyxDQUFDLENBQUE7UUFDSCxNQUFNLE9BQU8sR0FBVyxNQUFNLENBQUMsTUFBTSxDQUFXLENBQUM7UUFDakQsT0FBTyxDQUFDLE1BQU0sQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLENBQUE7SUFDL0IsQ0FBQztJQUNELFlBQVksQ0FBQyxPQUFPLENBQUMsQ0FBQTtJQUNyQixrQkFBa0I7SUFDbEIsT0FBTyxNQUFNLENBQUE7QUFDZixDQUFDLENBQUE7QUFFRCxNQUFNLENBQUMsTUFBTSxhQUFhLEdBQUcsS0FBSyxFQUFFLE1BQWMsRUFBRSxNQUFXLEVBQUUsT0FBa0IsRUFBRSxFQUFFO0lBQ3JGLElBQUksT0FBTyxDQUFDLEdBQUcsRUFBRSxDQUFDO1FBQ2hCLE9BQU8sQ0FBQyxNQUFNLENBQUMsSUFBSSxDQUFDLDZCQUE2QixDQUFDLENBQUE7UUFDbEQsT0FBTyxLQUFLLENBQUE7SUFDZCxDQUFDO0lBQ0QsTUFBTSxVQUFVLEdBQUcsTUFBTSxNQUFNLENBQUMsSUFBSSxDQUFDLFdBQVcsQ0FBQyxNQUFNLENBQUM7UUFDdEQsS0FBSyxFQUFFLE9BQU8sQ0FBQyxLQUFLO1FBQ3BCLFFBQVEsRUFBRSxNQUFNLENBQUMsUUFBUTtRQUN6QixlQUFlLEVBQUUsT0FBTyxDQUFDLE1BQWE7S0FDdkMsQ0FBQyxDQUFBO0lBQ0YsSUFBSSxDQUFDLFVBQVUsSUFBSSxDQUFDLFVBQVUsQ0FBQyxPQUFPLElBQUksQ0FBQyxVQUFVLENBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxJQUFJLENBQUMsVUFBVSxDQUFDLE9BQU8sQ0FBQyxDQUFDLENBQUMsQ0FBQyxPQUFPLElBQUksQ0FBQyxVQUFVLENBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxDQUFDLE9BQU8sQ0FBQyxPQUFPLEVBQUUsQ0FBQztRQUM3SSxPQUFPLEVBQUUsQ0FBQTtJQUNYLENBQUM7SUFDRCxJQUFJLE1BQU0sR0FBRyxVQUFVLENBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUE7SUFDbEQsTUFBTSxHQUFHLE1BQU0sWUFBWSxDQUFDLE1BQU0sRUFBRSxPQUFPLENBQUMsQ0FBQTtJQUM1QyxPQUFPLE1BQU0sQ0FBQTtBQUNmLENBQUMsQ0FBQSJ9
|
||||
File diff suppressed because one or more lines are too long
@ -1,5 +1,5 @@
|
||||
{
|
||||
"timestamp": 1754493028882,
|
||||
"timestamp": 1754597769369,
|
||||
"models": [
|
||||
{
|
||||
"id": "gpt-4-0613",
|
||||
@ -20,33 +20,33 @@
|
||||
"owned_by": "openai"
|
||||
},
|
||||
{
|
||||
"id": "o4-mini-deep-research-2025-06-26",
|
||||
"id": "gpt-5-nano",
|
||||
"object": "model",
|
||||
"created": 1750866121,
|
||||
"created": 1754426384,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "codex-mini-latest",
|
||||
"id": "gpt-5",
|
||||
"object": "model",
|
||||
"created": 1746673257,
|
||||
"created": 1754425777,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-realtime-preview-2025-06-03",
|
||||
"id": "gpt-5-mini-2025-08-07",
|
||||
"object": "model",
|
||||
"created": 1748907838,
|
||||
"created": 1754425867,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-audio-preview-2025-06-03",
|
||||
"id": "gpt-5-mini",
|
||||
"object": "model",
|
||||
"created": 1748908498,
|
||||
"created": 1754425928,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o4-mini-deep-research",
|
||||
"id": "gpt-5-nano-2025-08-07",
|
||||
"object": "model",
|
||||
"created": 1749685485,
|
||||
"created": 1754426303,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
@ -361,12 +361,24 @@
|
||||
"created": 1742403959,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o3-2025-04-16",
|
||||
"object": "model",
|
||||
"created": 1744133301,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o4-mini-2025-04-16",
|
||||
"object": "model",
|
||||
"created": 1744133506,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o3",
|
||||
"object": "model",
|
||||
"created": 1744225308,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o4-mini",
|
||||
"object": "model",
|
||||
@ -415,6 +427,48 @@
|
||||
"created": 1745517030,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "codex-mini-latest",
|
||||
"object": "model",
|
||||
"created": 1746673257,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-realtime-preview-2025-06-03",
|
||||
"object": "model",
|
||||
"created": 1748907838,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-audio-preview-2025-06-03",
|
||||
"object": "model",
|
||||
"created": 1748908498,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o4-mini-deep-research",
|
||||
"object": "model",
|
||||
"created": 1749685485,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o4-mini-deep-research-2025-06-26",
|
||||
"object": "model",
|
||||
"created": 1750866121,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-5-chat-latest",
|
||||
"object": "model",
|
||||
"created": 1754073306,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-5-2025-08-07",
|
||||
"object": "model",
|
||||
"created": 1754075360,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-3.5-turbo-16k",
|
||||
"object": "model",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -2,11 +2,11 @@ export declare enum E_OPENAI_MODEL {
|
||||
MODEL_GPT_4_0613 = "gpt-4-0613",
|
||||
MODEL_GPT_4 = "gpt-4",
|
||||
MODEL_GPT_3_5_TURBO = "gpt-3.5-turbo",
|
||||
MODEL_O4_MINI_DEEP_RESEARCH_2025_06_26 = "o4-mini-deep-research-2025-06-26",
|
||||
MODEL_CODEX_MINI_LATEST = "codex-mini-latest",
|
||||
MODEL_GPT_4O_REALTIME_PREVIEW_2025_06_03 = "gpt-4o-realtime-preview-2025-06-03",
|
||||
MODEL_GPT_4O_AUDIO_PREVIEW_2025_06_03 = "gpt-4o-audio-preview-2025-06-03",
|
||||
MODEL_O4_MINI_DEEP_RESEARCH = "o4-mini-deep-research",
|
||||
MODEL_GPT_5_NANO = "gpt-5-nano",
|
||||
MODEL_GPT_5 = "gpt-5",
|
||||
MODEL_GPT_5_MINI_2025_08_07 = "gpt-5-mini-2025-08-07",
|
||||
MODEL_GPT_5_MINI = "gpt-5-mini",
|
||||
MODEL_GPT_5_NANO_2025_08_07 = "gpt-5-nano-2025-08-07",
|
||||
MODEL_DAVINCI_002 = "davinci-002",
|
||||
MODEL_BABBAGE_002 = "babbage-002",
|
||||
MODEL_GPT_3_5_TURBO_INSTRUCT = "gpt-3.5-turbo-instruct",
|
||||
@ -31,8 +31,6 @@ export declare enum E_OPENAI_MODEL {
|
||||
MODEL_GPT_4O_MINI = "gpt-4o-mini",
|
||||
MODEL_GPT_4O_2024_08_06 = "gpt-4o-2024-08-06",
|
||||
MODEL_CHATGPT_4O_LATEST = "chatgpt-4o-latest",
|
||||
MODEL_O1_PREVIEW_2024_09_12 = "o1-preview-2024-09-12",
|
||||
MODEL_O1_PREVIEW = "o1-preview",
|
||||
MODEL_O1_MINI_2024_09_12 = "o1-mini-2024-09-12",
|
||||
MODEL_O1_MINI = "o1-mini",
|
||||
MODEL_GPT_4O_REALTIME_PREVIEW_2024_10_01 = "gpt-4o-realtime-preview-2024-10-01",
|
||||
@ -61,7 +59,9 @@ export declare enum E_OPENAI_MODEL {
|
||||
MODEL_O1_PRO_2025_03_19 = "o1-pro-2025-03-19",
|
||||
MODEL_O1_PRO = "o1-pro",
|
||||
MODEL_GPT_4O_MINI_TTS = "gpt-4o-mini-tts",
|
||||
MODEL_O3_2025_04_16 = "o3-2025-04-16",
|
||||
MODEL_O4_MINI_2025_04_16 = "o4-mini-2025-04-16",
|
||||
MODEL_O3 = "o3",
|
||||
MODEL_O4_MINI = "o4-mini",
|
||||
MODEL_GPT_4_1_2025_04_14 = "gpt-4.1-2025-04-14",
|
||||
MODEL_GPT_4_1 = "gpt-4.1",
|
||||
@ -70,6 +70,13 @@ export declare enum E_OPENAI_MODEL {
|
||||
MODEL_GPT_4_1_NANO_2025_04_14 = "gpt-4.1-nano-2025-04-14",
|
||||
MODEL_GPT_4_1_NANO = "gpt-4.1-nano",
|
||||
MODEL_GPT_IMAGE_1 = "gpt-image-1",
|
||||
MODEL_CODEX_MINI_LATEST = "codex-mini-latest",
|
||||
MODEL_GPT_4O_REALTIME_PREVIEW_2025_06_03 = "gpt-4o-realtime-preview-2025-06-03",
|
||||
MODEL_GPT_4O_AUDIO_PREVIEW_2025_06_03 = "gpt-4o-audio-preview-2025-06-03",
|
||||
MODEL_O4_MINI_DEEP_RESEARCH = "o4-mini-deep-research",
|
||||
MODEL_O4_MINI_DEEP_RESEARCH_2025_06_26 = "o4-mini-deep-research-2025-06-26",
|
||||
MODEL_GPT_5_CHAT_LATEST = "gpt-5-chat-latest",
|
||||
MODEL_GPT_5_2025_08_07 = "gpt-5-2025-08-07",
|
||||
MODEL_GPT_3_5_TURBO_16K = "gpt-3.5-turbo-16k",
|
||||
MODEL_TTS_1 = "tts-1",
|
||||
MODEL_WHISPER_1 = "whisper-1",
|
||||
|
||||
@ -3,11 +3,11 @@ export var E_OPENAI_MODEL;
|
||||
E_OPENAI_MODEL["MODEL_GPT_4_0613"] = "gpt-4-0613";
|
||||
E_OPENAI_MODEL["MODEL_GPT_4"] = "gpt-4";
|
||||
E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO"] = "gpt-3.5-turbo";
|
||||
E_OPENAI_MODEL["MODEL_O4_MINI_DEEP_RESEARCH_2025_06_26"] = "o4-mini-deep-research-2025-06-26";
|
||||
E_OPENAI_MODEL["MODEL_CODEX_MINI_LATEST"] = "codex-mini-latest";
|
||||
E_OPENAI_MODEL["MODEL_GPT_4O_REALTIME_PREVIEW_2025_06_03"] = "gpt-4o-realtime-preview-2025-06-03";
|
||||
E_OPENAI_MODEL["MODEL_GPT_4O_AUDIO_PREVIEW_2025_06_03"] = "gpt-4o-audio-preview-2025-06-03";
|
||||
E_OPENAI_MODEL["MODEL_O4_MINI_DEEP_RESEARCH"] = "o4-mini-deep-research";
|
||||
E_OPENAI_MODEL["MODEL_GPT_5_NANO"] = "gpt-5-nano";
|
||||
E_OPENAI_MODEL["MODEL_GPT_5"] = "gpt-5";
|
||||
E_OPENAI_MODEL["MODEL_GPT_5_MINI_2025_08_07"] = "gpt-5-mini-2025-08-07";
|
||||
E_OPENAI_MODEL["MODEL_GPT_5_MINI"] = "gpt-5-mini";
|
||||
E_OPENAI_MODEL["MODEL_GPT_5_NANO_2025_08_07"] = "gpt-5-nano-2025-08-07";
|
||||
E_OPENAI_MODEL["MODEL_DAVINCI_002"] = "davinci-002";
|
||||
E_OPENAI_MODEL["MODEL_BABBAGE_002"] = "babbage-002";
|
||||
E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_INSTRUCT"] = "gpt-3.5-turbo-instruct";
|
||||
@ -32,8 +32,6 @@ export var E_OPENAI_MODEL;
|
||||
E_OPENAI_MODEL["MODEL_GPT_4O_MINI"] = "gpt-4o-mini";
|
||||
E_OPENAI_MODEL["MODEL_GPT_4O_2024_08_06"] = "gpt-4o-2024-08-06";
|
||||
E_OPENAI_MODEL["MODEL_CHATGPT_4O_LATEST"] = "chatgpt-4o-latest";
|
||||
E_OPENAI_MODEL["MODEL_O1_PREVIEW_2024_09_12"] = "o1-preview-2024-09-12";
|
||||
E_OPENAI_MODEL["MODEL_O1_PREVIEW"] = "o1-preview";
|
||||
E_OPENAI_MODEL["MODEL_O1_MINI_2024_09_12"] = "o1-mini-2024-09-12";
|
||||
E_OPENAI_MODEL["MODEL_O1_MINI"] = "o1-mini";
|
||||
E_OPENAI_MODEL["MODEL_GPT_4O_REALTIME_PREVIEW_2024_10_01"] = "gpt-4o-realtime-preview-2024-10-01";
|
||||
@ -62,7 +60,9 @@ export var E_OPENAI_MODEL;
|
||||
E_OPENAI_MODEL["MODEL_O1_PRO_2025_03_19"] = "o1-pro-2025-03-19";
|
||||
E_OPENAI_MODEL["MODEL_O1_PRO"] = "o1-pro";
|
||||
E_OPENAI_MODEL["MODEL_GPT_4O_MINI_TTS"] = "gpt-4o-mini-tts";
|
||||
E_OPENAI_MODEL["MODEL_O3_2025_04_16"] = "o3-2025-04-16";
|
||||
E_OPENAI_MODEL["MODEL_O4_MINI_2025_04_16"] = "o4-mini-2025-04-16";
|
||||
E_OPENAI_MODEL["MODEL_O3"] = "o3";
|
||||
E_OPENAI_MODEL["MODEL_O4_MINI"] = "o4-mini";
|
||||
E_OPENAI_MODEL["MODEL_GPT_4_1_2025_04_14"] = "gpt-4.1-2025-04-14";
|
||||
E_OPENAI_MODEL["MODEL_GPT_4_1"] = "gpt-4.1";
|
||||
@ -71,9 +71,16 @@ export var E_OPENAI_MODEL;
|
||||
E_OPENAI_MODEL["MODEL_GPT_4_1_NANO_2025_04_14"] = "gpt-4.1-nano-2025-04-14";
|
||||
E_OPENAI_MODEL["MODEL_GPT_4_1_NANO"] = "gpt-4.1-nano";
|
||||
E_OPENAI_MODEL["MODEL_GPT_IMAGE_1"] = "gpt-image-1";
|
||||
E_OPENAI_MODEL["MODEL_CODEX_MINI_LATEST"] = "codex-mini-latest";
|
||||
E_OPENAI_MODEL["MODEL_GPT_4O_REALTIME_PREVIEW_2025_06_03"] = "gpt-4o-realtime-preview-2025-06-03";
|
||||
E_OPENAI_MODEL["MODEL_GPT_4O_AUDIO_PREVIEW_2025_06_03"] = "gpt-4o-audio-preview-2025-06-03";
|
||||
E_OPENAI_MODEL["MODEL_O4_MINI_DEEP_RESEARCH"] = "o4-mini-deep-research";
|
||||
E_OPENAI_MODEL["MODEL_O4_MINI_DEEP_RESEARCH_2025_06_26"] = "o4-mini-deep-research-2025-06-26";
|
||||
E_OPENAI_MODEL["MODEL_GPT_5_CHAT_LATEST"] = "gpt-5-chat-latest";
|
||||
E_OPENAI_MODEL["MODEL_GPT_5_2025_08_07"] = "gpt-5-2025-08-07";
|
||||
E_OPENAI_MODEL["MODEL_GPT_3_5_TURBO_16K"] = "gpt-3.5-turbo-16k";
|
||||
E_OPENAI_MODEL["MODEL_TTS_1"] = "tts-1";
|
||||
E_OPENAI_MODEL["MODEL_WHISPER_1"] = "whisper-1";
|
||||
E_OPENAI_MODEL["MODEL_TEXT_EMBEDDING_ADA_002"] = "text-embedding-ada-002";
|
||||
})(E_OPENAI_MODEL || (E_OPENAI_MODEL = {}));
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbmFpLW1vZGVscy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbmFpLW1vZGVscy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSxjQTRFWDtBQTVFRCxXQUFZLGNBQWM7SUFDeEIsaURBQStCLENBQUE7SUFDL0IsdUNBQXFCLENBQUE7SUFDckIsdURBQXFDLENBQUE7SUFDckMsNkZBQTJFLENBQUE7SUFDM0UsK0RBQTZDLENBQUE7SUFDN0MsaUdBQStFLENBQUE7SUFDL0UsMkZBQXlFLENBQUE7SUFDekUsdUVBQXFELENBQUE7SUFDckQsbURBQWlDLENBQUE7SUFDakMsbURBQWlDLENBQUE7SUFDakMseUVBQXVELENBQUE7SUFDdkQsbUZBQWlFLENBQUE7SUFDakUsNkNBQTJCLENBQUE7SUFDM0IsNkNBQTJCLENBQUE7SUFDM0IsaUVBQStDLENBQUE7SUFDL0MsaUVBQStDLENBQUE7SUFDL0MsNkNBQTJCLENBQUE7SUFDM0IsaURBQStCLENBQUE7SUFDL0IsdURBQXFDLENBQUE7SUFDckMseUVBQXVELENBQUE7SUFDdkQseUVBQXVELENBQUE7SUFDdkQsaUVBQStDLENBQUE7SUFDL0MsbUVBQWlELENBQUE7SUFDakQsaUVBQStDLENBQUE7SUFDL0MsbURBQWlDLENBQUE7SUFDakMseUVBQXVELENBQUE7SUFDdkQseUNBQXVCLENBQUE7SUFDdkIsK0RBQTZDLENBQUE7SUFDN0MseUVBQXVELENBQUE7SUFDdkQsbURBQWlDLENBQUE7SUFDakMsK0RBQTZDLENBQUE7SUFDN0MsK0RBQTZDLENBQUE7SUFDN0MsdUVBQXFELENBQUE7SUFDckQsaURBQStCLENBQUE7SUFDL0IsaUVBQStDLENBQUE7SUFDL0MsMkNBQXlCLENBQUE7SUFDekIsaUdBQStFLENBQUE7SUFDL0UsMkZBQXlFLENBQUE7SUFDekUscUVBQW1ELENBQUE7SUFDbkQsMkVBQXlELENBQUE7SUFDekQseUVBQXVELENBQUE7SUFDdkQsaUZBQStELENBQUE7SUFDL0QsaUdBQStFLENBQUE7SUFDL0UsMkZBQXlFLENBQUE7SUFDekUsMkdBQXlGLENBQUE7SUFDekYscUdBQW1GLENBQUE7SUFDbkYsdURBQXFDLENBQUE7SUFDckMsaUNBQWUsQ0FBQTtJQUNmLHFGQUFtRSxDQUFBO0lBQ25FLCtFQUE2RCxDQUFBO0lBQzdELDJDQUF5QixDQUFBO0lBQ3pCLGlFQUErQyxDQUFBO0lBQy9DLCtEQUE2QyxDQUFBO0lBQzdDLDZGQUEyRSxDQUFBO0lBQzNFLHVFQUFxRCxDQUFBO0lBQ3JELHVHQUFxRixDQUFBO0lBQ3JGLGlGQUErRCxDQUFBO0lBQy9ELCtEQUE2QyxDQUFBO0lBQzdDLHlFQUF1RCxDQUFBO0lBQ3ZELCtEQUE2QyxDQUFBO0lBQzdDLHlDQUF1QixDQUFBO0lBQ3ZCLDJEQUF5QyxDQUFBO0lBQ3pDLGlFQUErQyxDQUFBO0lBQy9DLDJDQUF5QixDQUFBO0lBQ3pCLGlFQUErQyxDQUFBO0lBQy9DLDJDQUF5QixDQUFBO0lBQ3pCLDJFQUF5RCxDQUFBO0lBQ3pELHFEQUFtQyxDQUFBO0lBQ25DLDJFQUF5RCxDQUFBO0lBQ3pELHFEQUFtQyxDQUFBO0lBQ25DLG1EQUFpQyxDQUFBO0lBQ2pDLCtEQUE2QyxDQUFBO0lBQzdDLHVDQUFxQixDQUFBO0lBQ3JCLCtDQUE2QixDQUFBO0lBQzdCLHlFQUF1RCxDQUFBO0FBQ3pELENBQUMsRUE1RVcsY0FBYyxLQUFkLGNBQWMsUUE0RXpCIn0=
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbmFpLW1vZGVscy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbmFpLW1vZGVscy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSxjQW1GWDtBQW5GRCxXQUFZLGNBQWM7SUFDeEIsaURBQStCLENBQUE7SUFDL0IsdUNBQXFCLENBQUE7SUFDckIsdURBQXFDLENBQUE7SUFDckMsaURBQStCLENBQUE7SUFDL0IsdUNBQXFCLENBQUE7SUFDckIsdUVBQXFELENBQUE7SUFDckQsaURBQStCLENBQUE7SUFDL0IsdUVBQXFELENBQUE7SUFDckQsbURBQWlDLENBQUE7SUFDakMsbURBQWlDLENBQUE7SUFDakMseUVBQXVELENBQUE7SUFDdkQsbUZBQWlFLENBQUE7SUFDakUsNkNBQTJCLENBQUE7SUFDM0IsNkNBQTJCLENBQUE7SUFDM0IsaUVBQStDLENBQUE7SUFDL0MsaUVBQStDLENBQUE7SUFDL0MsNkNBQTJCLENBQUE7SUFDM0IsaURBQStCLENBQUE7SUFDL0IsdURBQXFDLENBQUE7SUFDckMseUVBQXVELENBQUE7SUFDdkQseUVBQXVELENBQUE7SUFDdkQsaUVBQStDLENBQUE7SUFDL0MsbUVBQWlELENBQUE7SUFDakQsaUVBQStDLENBQUE7SUFDL0MsbURBQWlDLENBQUE7SUFDakMseUVBQXVELENBQUE7SUFDdkQseUNBQXVCLENBQUE7SUFDdkIsK0RBQTZDLENBQUE7SUFDN0MseUVBQXVELENBQUE7SUFDdkQsbURBQWlDLENBQUE7SUFDakMsK0RBQTZDLENBQUE7SUFDN0MsK0RBQTZDLENBQUE7SUFDN0MsaUVBQStDLENBQUE7SUFDL0MsMkNBQXlCLENBQUE7SUFDekIsaUdBQStFLENBQUE7SUFDL0UsMkZBQXlFLENBQUE7SUFDekUscUVBQW1ELENBQUE7SUFDbkQsMkVBQXlELENBQUE7SUFDekQseUVBQXVELENBQUE7SUFDdkQsaUZBQStELENBQUE7SUFDL0QsaUdBQStFLENBQUE7SUFDL0UsMkZBQXlFLENBQUE7SUFDekUsMkdBQXlGLENBQUE7SUFDekYscUdBQW1GLENBQUE7SUFDbkYsdURBQXFDLENBQUE7SUFDckMsaUNBQWUsQ0FBQTtJQUNmLHFGQUFtRSxDQUFBO0lBQ25FLCtFQUE2RCxDQUFBO0lBQzdELDJDQUF5QixDQUFBO0lBQ3pCLGlFQUErQyxDQUFBO0lBQy9DLCtEQUE2QyxDQUFBO0lBQzdDLDZGQUEyRSxDQUFBO0lBQzNFLHVFQUFxRCxDQUFBO0lBQ3JELHVHQUFxRixDQUFBO0lBQ3JGLGlGQUErRCxDQUFBO0lBQy9ELCtEQUE2QyxDQUFBO0lBQzdDLHlFQUF1RCxDQUFBO0lBQ3ZELCtEQUE2QyxDQUFBO0lBQzdDLHlDQUF1QixDQUFBO0lBQ3ZCLDJEQUF5QyxDQUFBO0lBQ3pDLHVEQUFxQyxDQUFBO0lBQ3JDLGlFQUErQyxDQUFBO0lBQy9DLGlDQUFlLENBQUE7SUFDZiwyQ0FBeUIsQ0FBQTtJQUN6QixpRUFBK0MsQ0FBQTtJQUMvQywyQ0FBeUIsQ0FBQTtJQUN6QiwyRUFBeUQsQ0FBQTtJQUN6RCxxREFBbUMsQ0FBQTtJQUNuQywyRUFBeUQsQ0FBQTtJQUN6RCxxREFBbUMsQ0FBQTtJQUNuQyxtREFBaUMsQ0FBQTtJQUNqQywrREFBNkMsQ0FBQTtJQUM3QyxpR0FBK0UsQ0FBQTtJQUMvRSwyRkFBeUUsQ0FBQTtJQUN6RSx1RUFBcUQsQ0FBQTtJQUNyRCw2RkFBMkUsQ0FBQTtJQUMzRSwrREFBNkMsQ0FBQTtJQUM3Qyw2REFBMkMsQ0FBQTtJQUMzQywrREFBNkMsQ0FBQTtJQUM3Qyx1Q0FBcUIsQ0FBQTtJQUNyQiwrQ0FBNkIsQ0FBQTtJQUM3Qix5RUFBdUQsQ0FBQTtBQUN6RCxDQUFDLEVBbkZXLGNBQWMsS0FBZCxjQUFjLFFBbUZ6QiJ9
|
||||
@ -1,6 +1,6 @@
|
||||
export declare enum E_OPENROUTER_MODEL_FREE {
|
||||
MODEL_FREE_QWEN_QWEN3_CODER_FREE = "qwen/qwen3-coder:free",
|
||||
MODEL_FREE_QWEN_QWEN3_235B_A22B_2507_FREE = "qwen/qwen3-235b-a22b-2507:free",
|
||||
MODEL_FREE_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",
|
||||
MODEL_FREE_Z_AI_GLM_4_5_AIR_FREE = "z-ai/glm-4.5-air:free",
|
||||
MODEL_FREE_MOONSHOTAI_KIMI_K2_FREE = "moonshotai/kimi-k2:free",
|
||||
MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN_MISTRAL_24B_VENICE_EDITION_FREE = "cognitivecomputations/dolphin-mistral-24b-venice-edition:free",
|
||||
MODEL_FREE_GOOGLE_GEMMA_3N_E2B_IT_FREE = "google/gemma-3n-e2b-it:free",
|
||||
@ -21,7 +21,6 @@ export declare enum E_OPENROUTER_MODEL_FREE {
|
||||
MODEL_FREE_TNGTECH_DEEPSEEK_R1T_CHIMERA_FREE = "tngtech/deepseek-r1t-chimera:free",
|
||||
MODEL_FREE_MICROSOFT_MAI_DS_R1_FREE = "microsoft/mai-ds-r1:free",
|
||||
MODEL_FREE_THUDM_GLM_Z1_32B_FREE = "thudm/glm-z1-32b:free",
|
||||
MODEL_FREE_THUDM_GLM_4_32B_FREE = "thudm/glm-4-32b:free",
|
||||
MODEL_FREE_SHISA_AI_SHISA_V2_LLAMA3_3_70B_FREE = "shisa-ai/shisa-v2-llama3.3-70b:free",
|
||||
MODEL_FREE_ARLIAI_QWQ_32B_ARLIAI_RPR_V1_FREE = "arliai/qwq-32b-arliai-rpr-v1:free",
|
||||
MODEL_FREE_AGENTICA_ORG_DEEPCODER_14B_PREVIEW_FREE = "agentica-org/deepcoder-14b-preview:free",
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
export var E_OPENROUTER_MODEL_FREE;
|
||||
(function (E_OPENROUTER_MODEL_FREE) {
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN3_CODER_FREE"] = "qwen/qwen3-coder:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN3_235B_A22B_2507_FREE"] = "qwen/qwen3-235b-a22b-2507:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENAI_GPT_OSS_20B_FREE"] = "openai/gpt-oss-20b:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_Z_AI_GLM_4_5_AIR_FREE"] = "z-ai/glm-4.5-air:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MOONSHOTAI_KIMI_K2_FREE"] = "moonshotai/kimi-k2:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN_MISTRAL_24B_VENICE_EDITION_FREE"] = "cognitivecomputations/dolphin-mistral-24b-venice-edition:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3N_E2B_IT_FREE"] = "google/gemma-3n-e2b-it:free";
|
||||
@ -22,7 +22,6 @@ export var E_OPENROUTER_MODEL_FREE;
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_TNGTECH_DEEPSEEK_R1T_CHIMERA_FREE"] = "tngtech/deepseek-r1t-chimera:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MICROSOFT_MAI_DS_R1_FREE"] = "microsoft/mai-ds-r1:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_THUDM_GLM_Z1_32B_FREE"] = "thudm/glm-z1-32b:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_THUDM_GLM_4_32B_FREE"] = "thudm/glm-4-32b:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_SHISA_AI_SHISA_V2_LLAMA3_3_70B_FREE"] = "shisa-ai/shisa-v2-llama3.3-70b:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_ARLIAI_QWQ_32B_ARLIAI_RPR_V1_FREE"] = "arliai/qwq-32b-arliai-rpr-v1:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_AGENTICA_ORG_DEEPCODER_14B_PREVIEW_FREE"] = "agentica-org/deepcoder-14b-preview:free";
|
||||
@ -57,4 +56,4 @@ export var E_OPENROUTER_MODEL_FREE;
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE"] = "google/gemma-2-9b-it:free";
|
||||
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free";
|
||||
})(E_OPENROUTER_MODEL_FREE || (E_OPENROUTER_MODEL_FREE = {}));
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkF5RFg7QUF6REQsV0FBWSx1QkFBdUI7SUFDakMscUZBQTBELENBQUE7SUFDMUQsdUdBQTRFLENBQUE7SUFDNUUseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsdUZBQTRELENBQUE7SUFDNUQsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUscUZBQTBELENBQUE7SUFDMUQsbUZBQXdELENBQUE7SUFDeEQsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsbUlBQXdHLENBQUE7SUFDeEcseUdBQThFLENBQUE7SUFDOUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYsaUdBQXNFLENBQUE7SUFDdEUscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsMkZBQWdFLENBQUE7SUFDaEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsK0hBQW9HLENBQUE7SUFDcEcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYscUlBQTBHLENBQUE7SUFDMUcseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQXpEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBeURsQyJ9
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkF3RFg7QUF4REQsV0FBWSx1QkFBdUI7SUFDakMseUZBQThELENBQUE7SUFDOUQscUZBQTBELENBQUE7SUFDMUQseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsdUZBQTRELENBQUE7SUFDNUQsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUscUZBQTBELENBQUE7SUFDMUQsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsbUlBQXdHLENBQUE7SUFDeEcseUdBQThFLENBQUE7SUFDOUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYsaUdBQXNFLENBQUE7SUFDdEUscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsMkZBQWdFLENBQUE7SUFDaEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsK0hBQW9HLENBQUE7SUFDcEcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYscUlBQTBHLENBQUE7SUFDMUcseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQXhEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBd0RsQyJ9
|
||||
@ -1,11 +1,22 @@
|
||||
export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_OPENAI_GPT_5_CHAT = "openai/gpt-5-chat",
|
||||
MODEL_OPENAI_GPT_5 = "openai/gpt-5",
|
||||
MODEL_OPENAI_GPT_5_MINI = "openai/gpt-5-mini",
|
||||
MODEL_OPENAI_GPT_5_NANO = "openai/gpt-5-nano",
|
||||
MODEL_OPENAI_GPT_OSS_120B = "openai/gpt-oss-120b",
|
||||
MODEL_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",
|
||||
MODEL_OPENAI_GPT_OSS_20B = "openai/gpt-oss-20b",
|
||||
MODEL_ANTHROPIC_CLAUDE_OPUS_4_1 = "anthropic/claude-opus-4.1",
|
||||
MODEL_MISTRALAI_CODESTRAL_2508 = "mistralai/codestral-2508",
|
||||
MODEL_QWEN_QWEN3_30B_A3B_INSTRUCT_2507 = "qwen/qwen3-30b-a3b-instruct-2507",
|
||||
MODEL_Z_AI_GLM_4_5 = "z-ai/glm-4.5",
|
||||
MODEL_Z_AI_GLM_4_5_AIR_FREE = "z-ai/glm-4.5-air:free",
|
||||
MODEL_Z_AI_GLM_4_5_AIR = "z-ai/glm-4.5-air",
|
||||
MODEL_QWEN_QWEN3_235B_A22B_THINKING_2507 = "qwen/qwen3-235b-a22b-thinking-2507",
|
||||
MODEL_Z_AI_GLM_4_32B = "z-ai/glm-4-32b",
|
||||
MODEL_QWEN_QWEN3_CODER_FREE = "qwen/qwen3-coder:free",
|
||||
MODEL_QWEN_QWEN3_CODER = "qwen/qwen3-coder",
|
||||
MODEL_BYTEDANCE_UI_TARS_1_5_7B = "bytedance/ui-tars-1.5-7b",
|
||||
MODEL_GOOGLE_GEMINI_2_5_FLASH_LITE = "google/gemini-2.5-flash-lite",
|
||||
MODEL_QWEN_QWEN3_235B_A22B_2507_FREE = "qwen/qwen3-235b-a22b-2507:free",
|
||||
MODEL_QWEN_QWEN3_235B_A22B_2507 = "qwen/qwen3-235b-a22b-2507",
|
||||
MODEL_SWITCHPOINT_ROUTER = "switchpoint/router",
|
||||
MODEL_MOONSHOTAI_KIMI_K2_FREE = "moonshotai/kimi-k2:free",
|
||||
@ -19,13 +30,11 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_TENCENT_HUNYUAN_A13B_INSTRUCT_FREE = "tencent/hunyuan-a13b-instruct:free",
|
||||
MODEL_TENCENT_HUNYUAN_A13B_INSTRUCT = "tencent/hunyuan-a13b-instruct",
|
||||
MODEL_TNGTECH_DEEPSEEK_R1T2_CHIMERA_FREE = "tngtech/deepseek-r1t2-chimera:free",
|
||||
MODEL_TNGTECH_DEEPSEEK_R1T2_CHIMERA = "tngtech/deepseek-r1t2-chimera",
|
||||
MODEL_MORPH_MORPH_V3_LARGE = "morph/morph-v3-large",
|
||||
MODEL_MORPH_MORPH_V3_FAST = "morph/morph-v3-fast",
|
||||
MODEL_BAIDU_ERNIE_4_5_300B_A47B = "baidu/ernie-4.5-300b-a47b",
|
||||
MODEL_THEDRUMMER_ANUBIS_70B_V1_1 = "thedrummer/anubis-70b-v1.1",
|
||||
MODEL_INCEPTION_MERCURY = "inception/mercury",
|
||||
MODEL_MORPH_MORPH_V2 = "morph/morph-v2",
|
||||
MODEL_MISTRALAI_MISTRAL_SMALL_3_2_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.2-24b-instruct:free",
|
||||
MODEL_MISTRALAI_MISTRAL_SMALL_3_2_24B_INSTRUCT = "mistralai/mistral-small-3.2-24b-instruct",
|
||||
MODEL_MINIMAX_MINIMAX_M1 = "minimax/minimax-m1",
|
||||
@ -46,7 +55,6 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_DEEPSEEK_DEEPSEEK_R1_0528_FREE = "deepseek/deepseek-r1-0528:free",
|
||||
MODEL_DEEPSEEK_DEEPSEEK_R1_0528 = "deepseek/deepseek-r1-0528",
|
||||
MODEL_SARVAMAI_SARVAM_M_FREE = "sarvamai/sarvam-m:free",
|
||||
MODEL_SARVAMAI_SARVAM_M = "sarvamai/sarvam-m",
|
||||
MODEL_THEDRUMMER_VALKYRIE_49B_V1 = "thedrummer/valkyrie-49b-v1",
|
||||
MODEL_ANTHROPIC_CLAUDE_OPUS_4 = "anthropic/claude-opus-4",
|
||||
MODEL_ANTHROPIC_CLAUDE_SONNET_4 = "anthropic/claude-sonnet-4",
|
||||
@ -78,11 +86,10 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_QWEN_QWEN3_235B_A22B_FREE = "qwen/qwen3-235b-a22b:free",
|
||||
MODEL_QWEN_QWEN3_235B_A22B = "qwen/qwen3-235b-a22b",
|
||||
MODEL_TNGTECH_DEEPSEEK_R1T_CHIMERA_FREE = "tngtech/deepseek-r1t-chimera:free",
|
||||
MODEL_TNGTECH_DEEPSEEK_R1T_CHIMERA = "tngtech/deepseek-r1t-chimera",
|
||||
MODEL_MICROSOFT_MAI_DS_R1_FREE = "microsoft/mai-ds-r1:free",
|
||||
MODEL_MICROSOFT_MAI_DS_R1 = "microsoft/mai-ds-r1",
|
||||
MODEL_THUDM_GLM_Z1_32B_FREE = "thudm/glm-z1-32b:free",
|
||||
MODEL_THUDM_GLM_Z1_32B = "thudm/glm-z1-32b",
|
||||
MODEL_THUDM_GLM_4_32B_FREE = "thudm/glm-4-32b:free",
|
||||
MODEL_THUDM_GLM_4_32B = "thudm/glm-4-32b",
|
||||
MODEL_OPENAI_O4_MINI_HIGH = "openai/o4-mini-high",
|
||||
MODEL_OPENAI_O3 = "openai/o3",
|
||||
@ -120,15 +127,12 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT = "mistralai/mistral-small-3.1-24b-instruct",
|
||||
MODEL_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free",
|
||||
MODEL_GOOGLE_GEMMA_3_4B_IT = "google/gemma-3-4b-it",
|
||||
MODEL_AI21_JAMBA_1_6_LARGE = "ai21/jamba-1.6-large",
|
||||
MODEL_AI21_JAMBA_1_6_MINI = "ai21/jamba-1.6-mini",
|
||||
MODEL_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free",
|
||||
MODEL_GOOGLE_GEMMA_3_12B_IT = "google/gemma-3-12b-it",
|
||||
MODEL_COHERE_COMMAND_A = "cohere/command-a",
|
||||
MODEL_OPENAI_GPT_4O_MINI_SEARCH_PREVIEW = "openai/gpt-4o-mini-search-preview",
|
||||
MODEL_OPENAI_GPT_4O_SEARCH_PREVIEW = "openai/gpt-4o-search-preview",
|
||||
MODEL_REKAAI_REKA_FLASH_3_FREE = "rekaai/reka-flash-3:free",
|
||||
MODEL_REKAAI_REKA_FLASH_3 = "rekaai/reka-flash-3",
|
||||
MODEL_GOOGLE_GEMMA_3_27B_IT_FREE = "google/gemma-3-27b-it:free",
|
||||
MODEL_GOOGLE_GEMMA_3_27B_IT = "google/gemma-3-27b-it",
|
||||
MODEL_THEDRUMMER_ANUBIS_PRO_105B_V1 = "thedrummer/anubis-pro-105b-v1",
|
||||
@ -149,6 +153,7 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free",
|
||||
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B = "cognitivecomputations/dolphin3.0-r1-mistral-24b",
|
||||
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-mistral-24b:free",
|
||||
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B = "cognitivecomputations/dolphin3.0-mistral-24b",
|
||||
MODEL_META_LLAMA_LLAMA_GUARD_3_8B = "meta-llama/llama-guard-3-8b",
|
||||
MODEL_OPENAI_O3_MINI_HIGH = "openai/o3-mini-high",
|
||||
MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_8B = "deepseek/deepseek-r1-distill-llama-8b",
|
||||
@ -194,7 +199,6 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_AMAZON_NOVA_MICRO_V1 = "amazon/nova-micro-v1",
|
||||
MODEL_AMAZON_NOVA_PRO_V1 = "amazon/nova-pro-v1",
|
||||
MODEL_QWEN_QWQ_32B_PREVIEW = "qwen/qwq-32b-preview",
|
||||
MODEL_EVA_UNIT_01_EVA_QWEN_2_5_72B = "eva-unit-01/eva-qwen-2.5-72b",
|
||||
MODEL_OPENAI_GPT_4O_2024_11_20 = "openai/gpt-4o-2024-11-20",
|
||||
MODEL_MISTRALAI_MISTRAL_LARGE_2411 = "mistralai/mistral-large-2411",
|
||||
MODEL_MISTRALAI_MISTRAL_LARGE_2407 = "mistralai/mistral-large-2407",
|
||||
@ -205,10 +209,9 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT = "qwen/qwen-2.5-coder-32b-instruct",
|
||||
MODEL_RAIFLE_SORCERERLM_8X22B = "raifle/sorcererlm-8x22b",
|
||||
MODEL_THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_BETA = "anthropic/claude-3.5-haiku:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022_BETA = "anthropic/claude-3.5-haiku-20241022:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
|
||||
MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_BETA = "anthropic/claude-3.5-sonnet:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet",
|
||||
@ -216,30 +219,28 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b",
|
||||
MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct",
|
||||
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct",
|
||||
MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi",
|
||||
MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity",
|
||||
MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi",
|
||||
MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b",
|
||||
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
|
||||
MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b",
|
||||
MODEL_LIQUID_LFM_40B = "liquid/lfm-40b",
|
||||
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
|
||||
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free",
|
||||
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct",
|
||||
MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b",
|
||||
MODEL_OPENAI_O1_PREVIEW_2024_09_12 = "openai/o1-preview-2024-09-12",
|
||||
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
|
||||
MODEL_OPENAI_O1_PREVIEW = "openai/o1-preview",
|
||||
MODEL_OPENAI_O1_MINI = "openai/o1-mini",
|
||||
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
|
||||
MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b",
|
||||
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
|
||||
MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024",
|
||||
MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct",
|
||||
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
|
||||
MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b",
|
||||
MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct",
|
||||
MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct",
|
||||
MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B = "nousresearch/hermes-3-llama-3.1-70b",
|
||||
MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B = "nousresearch/hermes-3-llama-3.1-405b",
|
||||
@ -247,11 +248,10 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b",
|
||||
MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b",
|
||||
MODEL_NOTHINGIISREAL_MN_CELESTE_12B = "nothingiisreal/mn-celeste-12b",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE = "meta-llama/llama-3.1-405b-instruct:free",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct",
|
||||
MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free",
|
||||
MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo",
|
||||
MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini",
|
||||
@ -259,25 +259,23 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_GOOGLE_GEMMA_2_27B_IT = "google/gemma-2-27b-it",
|
||||
MODEL_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free",
|
||||
MODEL_GOOGLE_GEMMA_2_9B_IT = "google/gemma-2-9b-it",
|
||||
MODEL_01_AI_YI_LARGE = "01-ai/yi-large",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620_BETA = "anthropic/claude-3.5-sonnet-20240620:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620 = "anthropic/claude-3.5-sonnet-20240620",
|
||||
MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b",
|
||||
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b",
|
||||
MODEL_QWEN_QWEN_2_72B_INSTRUCT = "qwen/qwen-2-72b-instruct",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
|
||||
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct",
|
||||
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
|
||||
MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct",
|
||||
MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct",
|
||||
MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b",
|
||||
MODEL_GOOGLE_GEMINI_FLASH_1_5 = "google/gemini-flash-1.5",
|
||||
MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b",
|
||||
MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13",
|
||||
MODEL_OPENAI_GPT_4O = "openai/gpt-4o",
|
||||
MODEL_OPENAI_GPT_4O_EXTENDED = "openai/gpt-4o:extended",
|
||||
MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13",
|
||||
MODEL_SAO10K_FIMBULVETR_11B_V2 = "sao10k/fimbulvetr-11b-v2",
|
||||
MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b",
|
||||
MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT = "meta-llama/llama-3-70b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct",
|
||||
MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT = "mistralai/mixtral-8x22b-instruct",
|
||||
@ -287,24 +285,22 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus",
|
||||
MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024",
|
||||
MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b",
|
||||
MODEL_COHERE_COMMAND = "cohere/command",
|
||||
MODEL_COHERE_COMMAND_R = "cohere/command-r",
|
||||
MODEL_COHERE_COMMAND = "cohere/command",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_HAIKU_BETA = "anthropic/claude-3-haiku:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_SONNET = "anthropic/claude-3-sonnet",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_OPUS_BETA = "anthropic/claude-3-opus:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus",
|
||||
MODEL_COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024",
|
||||
MODEL_MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large",
|
||||
MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview",
|
||||
MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613",
|
||||
MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview",
|
||||
MODEL_NOUSRESEARCH_NOUS_HERMES_2_MIXTRAL_8X7B_DPO = "nousresearch/nous-hermes-2-mixtral-8x7b-dpo",
|
||||
MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small",
|
||||
MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_2 = "mistralai/mistral-7b-instruct-v0.2",
|
||||
MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct",
|
||||
MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b",
|
||||
MODEL_UNDI95_TOPPY_M_7B = "undi95/toppy-m-7b",
|
||||
MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b",
|
||||
MODEL_OPENROUTER_AUTO = "openrouter/auto",
|
||||
MODEL_OPENAI_GPT_4_1106_PREVIEW = "openai/gpt-4-1106-preview",
|
||||
@ -315,7 +311,7 @@ export declare enum E_OPENROUTER_MODEL {
|
||||
MODEL_MANCER_WEAVER = "mancer/weaver",
|
||||
MODEL_UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b",
|
||||
MODEL_GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b",
|
||||
MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314",
|
||||
MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo",
|
||||
MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314",
|
||||
MODEL_OPENAI_GPT_4 = "openai/gpt-4"
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
2
packages/kbot/dist-in/utils/merge.d.ts
vendored
2
packages/kbot/dist-in/utils/merge.d.ts
vendored
@ -25,4 +25,4 @@ export declare function merge(existingContent: string, newContent: string): stri
|
||||
* @param logger - Optional logger instance conforming to SimpleLogger.
|
||||
* @returns The final content written to the file or the original content if read fails.
|
||||
*/
|
||||
export declare function writeOrAppend(dstPath: string, content: string, appendMode?: 'concat' | 'merge'): string;
|
||||
export declare function writeOrAppend(dstPath: string, content: string, appendMode?: 'concat' | 'merge' | 'replace'): string;
|
||||
|
||||
@ -35,6 +35,10 @@ export function merge(existingContent, newContent) {
|
||||
* @returns The final content written to the file or the original content if read fails.
|
||||
*/
|
||||
export function writeOrAppend(dstPath, content, appendMode) {
|
||||
if (appendMode === 'replace') {
|
||||
write(dstPath, content);
|
||||
return content;
|
||||
}
|
||||
let finalContent = content;
|
||||
if (exists(dstPath) && appendMode) {
|
||||
const existingContentBuffer = read(dstPath); // Read returns Buffer | undefined
|
||||
@ -44,11 +48,11 @@ export function writeOrAppend(dstPath, content, appendMode) {
|
||||
finalContent = concat(existingContent, content);
|
||||
}
|
||||
else if (appendMode === 'merge') {
|
||||
finalContent = merge(existingContent, content); // Using placeholder merge for now
|
||||
finalContent = merge(existingContent, content);
|
||||
}
|
||||
}
|
||||
}
|
||||
write(dstPath, finalContent);
|
||||
return finalContent;
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWVyZ2UuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvdXRpbHMvbWVyZ2UudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUNwRCxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksS0FBSyxFQUFFLE1BQU0sb0JBQW9CLENBQUE7QUFFbEQ7Ozs7OztHQU1HO0FBQ0gsTUFBTSxVQUFVLE1BQU0sQ0FBQyxlQUF1QixFQUFFLFVBQWtCO0lBQ2hFLE9BQU8sZUFBZSxHQUFHLE1BQU0sR0FBRyxVQUFVLEdBQUcsSUFBSSxDQUFBO0FBQ3JELENBQUM7QUFFRDs7Ozs7Ozs7R0FRRztBQUNILE1BQU0sVUFBVSxLQUFLLENBQUMsZUFBdUIsRUFBRSxVQUFrQjtJQUMvRCxvQ0FBb0M7SUFDcEMsaUZBQWlGO0lBQ2pGLE9BQU8sZUFBZSxHQUFHLElBQUksR0FBRyxVQUFVLENBQUM7QUFDN0MsQ0FBQztBQUVEOzs7Ozs7OztHQVFHO0FBQ0gsTUFBTSxVQUFVLGFBQWEsQ0FDM0IsT0FBZSxFQUNmLE9BQWUsRUFDZixVQUErQjtJQUUvQixJQUFJLFlBQVksR0FBRyxPQUFPLENBQUM7SUFDM0IsSUFBSSxNQUFNLENBQUMsT0FBTyxDQUFDLElBQUksVUFBVSxFQUFFLENBQUM7UUFDbEMsTUFBTSxxQkFBcUIsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxrQ0FBa0M7UUFDL0UsSUFBSSxxQkFBcUIsRUFBRSxDQUFDLENBQUMsK0JBQStCO1lBQzFELE1BQU0sZUFBZSxHQUFHLHFCQUFxQixDQUFDLFFBQVEsRUFBRSxDQUFDO1lBQ3pELElBQUksVUFBVSxLQUFLLFFBQVEsRUFBRSxDQUFDO2dCQUM1QixZQUFZLEdBQUcsTUFBTSxDQUFDLGVBQWUsRUFBRSxPQUFPLENBQUMsQ0FBQztZQUNsRCxDQUFDO2lCQUFNLElBQUksVUFBVSxLQUFLLE9BQU8sRUFBRSxDQUFDO2dCQUNsQyxZQUFZLEdBQUcsS0FBSyxDQUFDLGVBQWUsRUFBRSxPQUFPLENBQUMsQ0FBQyxDQUFDLDBDQUEwQztZQUM1RixDQUFDO1FBQ0gsQ0FBQztJQUNILENBQUM7SUFDRCxLQUFLLENBQUMsT0FBTyxFQUFFLFlBQVksQ0FBQyxDQUFBO0lBQzVCLE9BQU8sWUFBWSxDQUFBO0FBQ3JCLENBQUMifQ==
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWVyZ2UuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvdXRpbHMvbWVyZ2UudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUNwRCxPQUFPLEVBQUUsSUFBSSxJQUFJLElBQUksRUFBRSxNQUFNLG1CQUFtQixDQUFBO0FBQ2hELE9BQU8sRUFBRSxJQUFJLElBQUksS0FBSyxFQUFFLE1BQU0sb0JBQW9CLENBQUE7QUFFbEQ7Ozs7OztHQU1HO0FBQ0gsTUFBTSxVQUFVLE1BQU0sQ0FBQyxlQUF1QixFQUFFLFVBQWtCO0lBQ2hFLE9BQU8sZUFBZSxHQUFHLE1BQU0sR0FBRyxVQUFVLEdBQUcsSUFBSSxDQUFBO0FBQ3JELENBQUM7QUFFRDs7Ozs7Ozs7R0FRRztBQUNILE1BQU0sVUFBVSxLQUFLLENBQUMsZUFBdUIsRUFBRSxVQUFrQjtJQUMvRCxvQ0FBb0M7SUFDcEMsaUZBQWlGO0lBQ2pGLE9BQU8sZUFBZSxHQUFHLElBQUksR0FBRyxVQUFVLENBQUM7QUFDN0MsQ0FBQztBQUVEOzs7Ozs7OztHQVFHO0FBQ0gsTUFBTSxVQUFVLGFBQWEsQ0FDM0IsT0FBZSxFQUNmLE9BQWUsRUFDZixVQUEyQztJQUUzQyxJQUFJLFVBQVUsS0FBSyxTQUFTLEVBQUUsQ0FBQztRQUM3QixLQUFLLENBQUMsT0FBTyxFQUFFLE9BQU8sQ0FBQyxDQUFDO1FBQ3hCLE9BQU8sT0FBTyxDQUFDO0lBQ2pCLENBQUM7SUFDRCxJQUFJLFlBQVksR0FBRyxPQUFPLENBQUM7SUFDM0IsSUFBSSxNQUFNLENBQUMsT0FBTyxDQUFDLElBQUksVUFBVSxFQUFFLENBQUM7UUFDbEMsTUFBTSxxQkFBcUIsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxrQ0FBa0M7UUFDL0UsSUFBSSxxQkFBcUIsRUFBRSxDQUFDLENBQUMsK0JBQStCO1lBQzFELE1BQU0sZUFBZSxHQUFHLHFCQUFxQixDQUFDLFFBQVEsRUFBRSxDQUFDO1lBQ3pELElBQUksVUFBVSxLQUFLLFFBQVEsRUFBRSxDQUFDO2dCQUM1QixZQUFZLEdBQUcsTUFBTSxDQUFDLGVBQWUsRUFBRSxPQUFPLENBQUMsQ0FBQztZQUNsRCxDQUFDO2lCQUFNLElBQUksVUFBVSxLQUFLLE9BQU8sRUFBRSxDQUFDO2dCQUNsQyxZQUFZLEdBQUcsS0FBSyxDQUFDLGVBQWUsRUFBRSxPQUFPLENBQUMsQ0FBQztZQUNqRCxDQUFDO1FBQ0gsQ0FBQztJQUNILENBQUM7SUFFRCxLQUFLLENBQUMsT0FBTyxFQUFFLFlBQVksQ0FBQyxDQUFDO0lBQzdCLE9BQU8sWUFBWSxDQUFDO0FBQ3RCLENBQUMifQ==
|
||||
2
packages/kbot/dist-in/zod_schema.d.ts
vendored
2
packages/kbot/dist-in/zod_schema.d.ts
vendored
@ -13,7 +13,7 @@ export declare const E_Mode: {
|
||||
readonly CUSTOM: "custom";
|
||||
};
|
||||
export declare const EType: z.ZodEnum<["completion", "tools", "assistant", "responses", "custom"]>;
|
||||
export declare const E_AppendMode: z.ZodEnum<["concat", "merge"]>;
|
||||
export declare const E_AppendMode: z.ZodEnum<["concat", "merge", "replace"]>;
|
||||
export type E_AppendModeType = z.infer<typeof E_AppendMode>;
|
||||
export declare const E_WrapMode: z.ZodEnum<["meta", "none"]>;
|
||||
export type E_WrapModeType = z.infer<typeof E_WrapMode>;
|
||||
|
||||
File diff suppressed because one or more lines are too long
56
packages/kbot/dist-in/zod_types.d.ts
vendored
56
packages/kbot/dist-in/zod_types.d.ts
vendored
@ -8,7 +8,7 @@ export interface IKBotOptions {
|
||||
/** Optional destination path for the result, will substitute ${MODEL_NAME} and ${ROUTER} in the path. Optional, used for "completion" mode */
|
||||
dst?: string | undefined;
|
||||
/** How to handle output if --dst file already exists: "concat" (append) or "merge" (try to merge structures if possible, otherwise append). Only used if --dst is specified. */
|
||||
append?: ("concat" | "merge") | undefined;
|
||||
append?: ("concat" | "merge" | "replace") | undefined;
|
||||
/** Specify how to wrap the output, "meta (file name, absolute path, cwd)" or "none". */
|
||||
wrap?: "meta" | "none";
|
||||
/** Iterate over items, supported: GLOB | Path to JSON File | array of strings (comma separated). To test different models, use --each="gpt-3.5-turbo,gpt-4o", the actual string will exposed as variable `ITEM`, eg: --dst="${ITEM}-output.md" */
|
||||
@ -31,8 +31,6 @@ export interface IKBotOptions {
|
||||
[35m[1m[22m[39m
|
||||
[35m[1m OpenRouter models:[22m[39m
|
||||
[35m[1m[22m[39m
|
||||
01-ai/yi-large | paid
|
||||
aetherwiing/mn-starcannon-12b | paid
|
||||
agentica-org/deepcoder-14b-preview | paid
|
||||
agentica-org/deepcoder-14b-preview:free | free
|
||||
ai21/jamba-1.6-large | paid
|
||||
@ -48,11 +46,8 @@ export interface IKBotOptions {
|
||||
anthropic/claude-3-haiku:beta | paid
|
||||
anthropic/claude-3-opus | paid
|
||||
anthropic/claude-3-opus:beta | paid
|
||||
anthropic/claude-3-sonnet | paid
|
||||
anthropic/claude-3-sonnet:beta | paid
|
||||
anthropic/claude-3.5-haiku | paid
|
||||
anthropic/claude-3.5-haiku-20241022 | paid
|
||||
anthropic/claude-3.5-haiku-20241022:beta | paid
|
||||
anthropic/claude-3.5-haiku:beta | paid
|
||||
anthropic/claude-3.5-sonnet | paid
|
||||
anthropic/claude-3.5-sonnet-20240620 | paid
|
||||
@ -62,23 +57,17 @@ export interface IKBotOptions {
|
||||
anthropic/claude-3.7-sonnet:beta | paid
|
||||
anthropic/claude-3.7-sonnet:thinking | paid
|
||||
anthropic/claude-opus-4 | paid
|
||||
anthropic/claude-opus-4.1 | paid
|
||||
anthropic/claude-sonnet-4 | paid
|
||||
anthropic/claude-2 | paid
|
||||
anthropic/claude-2:beta | paid
|
||||
anthropic/claude-2.0 | paid
|
||||
anthropic/claude-2.0:beta | paid
|
||||
anthropic/claude-2.1 | paid
|
||||
anthropic/claude-2.1:beta | paid
|
||||
arcee-ai/arcee-blitz | paid
|
||||
arcee-ai/caller-large | paid
|
||||
arcee-ai/coder-large | paid
|
||||
arcee-ai/maestro-reasoning | paid
|
||||
arcee-ai/spotlight | paid
|
||||
arcee-ai/virtuoso-large | paid
|
||||
arcee-ai/virtuoso-medium-v2 | paid
|
||||
arliai/qwq-32b-arliai-rpr-v1 | paid
|
||||
arliai/qwq-32b-arliai-rpr-v1:free | free
|
||||
openrouter/auto | paid
|
||||
baidu/ernie-4.5-300b-a47b | paid
|
||||
bytedance/ui-tars-1.5-7b | paid
|
||||
cohere/command | paid
|
||||
cohere/command-a | paid
|
||||
cohere/command-r | paid
|
||||
@ -92,10 +81,9 @@ export interface IKBotOptions {
|
||||
deepseek/deepseek-r1-0528-qwen3-8b | paid
|
||||
deepseek/deepseek-r1-0528-qwen3-8b:free | free
|
||||
deepseek/deepseek-chat | paid
|
||||
deepseek/deepseek-chat:free | free
|
||||
deepseek/deepseek-chat-v3-0324 | paid
|
||||
deepseek/deepseek-chat-v3-0324:free | free
|
||||
deepseek/deepseek-v3-base:free | free
|
||||
deepseek/deepseek-v3-base | paid
|
||||
deepseek/deepseek-r1 | paid
|
||||
deepseek/deepseek-r1:free | free
|
||||
deepseek/deepseek-r1-0528 | paid
|
||||
@ -109,12 +97,11 @@ export interface IKBotOptions {
|
||||
deepseek/deepseek-r1-distill-qwen-32b | paid
|
||||
deepseek/deepseek-r1-distill-qwen-7b | paid
|
||||
cognitivecomputations/dolphin-mixtral-8x22b | paid
|
||||
cognitivecomputations/dolphin3.0-mistral-24b | paid
|
||||
cognitivecomputations/dolphin3.0-mistral-24b:free | free
|
||||
cognitivecomputations/dolphin3.0-r1-mistral-24b | paid
|
||||
cognitivecomputations/dolphin3.0-r1-mistral-24b:free | free
|
||||
eleutherai/llemma_7b | paid
|
||||
eva-unit-01/eva-llama-3.33-70b | paid
|
||||
eva-unit-01/eva-qwen-2.5-72b | paid
|
||||
sao10k/fimbulvetr-11b-v2 | paid
|
||||
alpindale/goliath-120b | paid
|
||||
google/gemini-flash-1.5 | paid
|
||||
@ -124,6 +111,7 @@ export interface IKBotOptions {
|
||||
google/gemini-2.0-flash-exp:free | free
|
||||
google/gemini-2.0-flash-lite-001 | paid
|
||||
google/gemini-2.5-flash | paid
|
||||
google/gemini-2.5-flash-lite | paid
|
||||
google/gemini-2.5-flash-lite-preview-06-17 | paid
|
||||
google/gemini-2.5-pro | paid
|
||||
google/gemini-2.5-pro-exp-03-25 | paid
|
||||
@ -141,6 +129,7 @@ export interface IKBotOptions {
|
||||
google/gemma-3n-e2b-it:free | free
|
||||
google/gemma-3n-e4b-it | paid
|
||||
google/gemma-3n-e4b-it:free | free
|
||||
openrouter/horizon-beta | paid
|
||||
inception/mercury | paid
|
||||
inception/mercury-coder | paid
|
||||
infermatic/mn-inferor-12b | paid
|
||||
@ -151,7 +140,6 @@ export interface IKBotOptions {
|
||||
liquid/lfm-40b | paid
|
||||
liquid/lfm-7b | paid
|
||||
meta-llama/llama-guard-3-8b | paid
|
||||
alpindale/magnum-72b | paid
|
||||
anthracite-org/magnum-v2-72b | paid
|
||||
anthracite-org/magnum-v4-72b | paid
|
||||
mancer/weaver | paid
|
||||
@ -174,6 +162,7 @@ export interface IKBotOptions {
|
||||
meta-llama/llama-4-scout | paid
|
||||
meta-llama/llama-guard-4-12b | paid
|
||||
meta-llama/llama-guard-2-8b | paid
|
||||
microsoft/mai-ds-r1 | paid
|
||||
microsoft/mai-ds-r1:free | free
|
||||
microsoft/phi-4 | paid
|
||||
microsoft/phi-4-multimodal-instruct | paid
|
||||
@ -187,10 +176,10 @@ export interface IKBotOptions {
|
||||
mistralai/mistral-large | paid
|
||||
mistralai/mistral-large-2407 | paid
|
||||
mistralai/mistral-large-2411 | paid
|
||||
nothingiisreal/mn-celeste-12b | paid
|
||||
mistralai/mistral-small | paid
|
||||
mistralai/mistral-tiny | paid
|
||||
mistralai/codestral-2501 | paid
|
||||
mistralai/codestral-2508 | paid
|
||||
mistralai/devstral-medium | paid
|
||||
mistralai/devstral-small | paid
|
||||
mistralai/devstral-small-2505 | paid
|
||||
@ -219,10 +208,10 @@ export interface IKBotOptions {
|
||||
mistralai/pixtral-12b | paid
|
||||
mistralai/pixtral-large-2411 | paid
|
||||
mistralai/mistral-saba | paid
|
||||
moonshotai/kimi-vl-a3b-thinking | paid
|
||||
moonshotai/kimi-vl-a3b-thinking:free | free
|
||||
moonshotai/kimi-k2 | paid
|
||||
moonshotai/kimi-k2:free | free
|
||||
morph/morph-v2 | paid
|
||||
morph/morph-v3-fast | paid
|
||||
morph/morph-v3-large | paid
|
||||
gryphe/mythomax-l2-13b | paid
|
||||
@ -230,6 +219,7 @@ export interface IKBotOptions {
|
||||
neversleep/llama-3.1-lumimaid-8b | paid
|
||||
neversleep/noromaid-20b | paid
|
||||
nousresearch/deephermes-3-llama-3-8b-preview:free | free
|
||||
nousresearch/deephermes-3-mistral-24b-preview | paid
|
||||
nousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid
|
||||
nousresearch/hermes-3-llama-3.1-405b | paid
|
||||
nousresearch/hermes-3-llama-3.1-70b | paid
|
||||
@ -240,6 +230,8 @@ export interface IKBotOptions {
|
||||
nvidia/llama-3.3-nemotron-super-49b-v1 | paid
|
||||
openai/chatgpt-4o-latest | paid
|
||||
openai/codex-mini | paid
|
||||
openai/gpt-oss-120b | paid
|
||||
openai/gpt-oss-20b | paid
|
||||
openai/gpt-3.5-turbo | paid
|
||||
openai/gpt-3.5-turbo-0613 | paid
|
||||
openai/gpt-3.5-turbo-16k | paid
|
||||
@ -264,8 +256,6 @@ export interface IKBotOptions {
|
||||
openai/o1 | paid
|
||||
openai/o1-mini | paid
|
||||
openai/o1-mini-2024-09-12 | paid
|
||||
openai/o1-preview | paid
|
||||
openai/o1-preview-2024-09-12 | paid
|
||||
openai/o1-pro | paid
|
||||
openai/o3 | paid
|
||||
openai/o3-mini | paid
|
||||
@ -281,6 +271,7 @@ export interface IKBotOptions {
|
||||
perplexity/sonar-reasoning | paid
|
||||
perplexity/sonar-reasoning-pro | paid
|
||||
pygmalionai/mythalion-13b | paid
|
||||
featherless/qwerky-72b:free | free
|
||||
qwen/qwen-2-72b-instruct | paid
|
||||
qwen/qwen-vl-max | paid
|
||||
qwen/qwen-vl-plus | paid
|
||||
@ -296,13 +287,16 @@ export interface IKBotOptions {
|
||||
qwen/qwen3-14b:free | free
|
||||
qwen/qwen3-235b-a22b | paid
|
||||
qwen/qwen3-235b-a22b:free | free
|
||||
qwen/qwen3-235b-a22b-2507 | paid
|
||||
qwen/qwen3-235b-a22b-thinking-2507 | paid
|
||||
qwen/qwen3-30b-a3b | paid
|
||||
qwen/qwen3-30b-a3b:free | free
|
||||
qwen/qwen3-30b-a3b-instruct-2507 | paid
|
||||
qwen/qwen3-32b | paid
|
||||
qwen/qwen3-32b:free | free
|
||||
qwen/qwen3-4b:free | free
|
||||
qwen/qwen3-8b | paid
|
||||
qwen/qwen3-8b:free | free
|
||||
qwen/qwen3-coder | paid
|
||||
qwen/qwq-32b | paid
|
||||
qwen/qwq-32b:free | free
|
||||
qwen/qwq-32b-preview | paid
|
||||
@ -311,16 +305,14 @@ export interface IKBotOptions {
|
||||
qwen/qwen-2.5-7b-instruct | paid
|
||||
qwen/qwen-2.5-coder-32b-instruct | paid
|
||||
qwen/qwen-2.5-coder-32b-instruct:free | free
|
||||
featherless/qwerky-72b:free | free
|
||||
rekaai/reka-flash-3 | paid
|
||||
rekaai/reka-flash-3:free | free
|
||||
undi95/remm-slerp-l2-13b | paid
|
||||
sao10k/l3-lunaris-8b | paid
|
||||
sao10k/l3-euryale-70b | paid
|
||||
sao10k/l3.1-euryale-70b | paid
|
||||
sao10k/l3.3-euryale-70b | paid
|
||||
sarvamai/sarvam-m | paid
|
||||
sarvamai/sarvam-m:free | free
|
||||
shisa-ai/shisa-v2-llama3.3-70b | paid
|
||||
shisa-ai/shisa-v2-llama3.3-70b:free | free
|
||||
raifle/sorcererlm-8x22b | paid
|
||||
switchpoint/router | paid
|
||||
@ -333,9 +325,9 @@ export interface IKBotOptions {
|
||||
thedrummer/unslopnemo-12b | paid
|
||||
thedrummer/valkyrie-49b-v1 | paid
|
||||
thudm/glm-4-32b | paid
|
||||
thudm/glm-4-32b:free | free
|
||||
thudm/glm-4.1v-9b-thinking | paid
|
||||
thudm/glm-z1-32b:free | free
|
||||
tngtech/deepseek-r1t-chimera | paid
|
||||
tngtech/deepseek-r1t-chimera:free | free
|
||||
tngtech/deepseek-r1t2-chimera:free | free
|
||||
undi95/toppy-m-7b | paid
|
||||
@ -350,6 +342,10 @@ export interface IKBotOptions {
|
||||
x-ai/grok-3-mini-beta | paid
|
||||
x-ai/grok-4 | paid
|
||||
x-ai/grok-vision-beta | paid
|
||||
z-ai/glm-4-32b | paid
|
||||
z-ai/glm-4.5 | paid
|
||||
z-ai/glm-4.5-air | paid
|
||||
z-ai/glm-4.5-air:free | free
|
||||
[35m[1m[22m[39m
|
||||
[35m[1m OpenAI models:[22m[39m
|
||||
[35m[1m[22m[39m
|
||||
@ -408,8 +404,6 @@ export interface IKBotOptions {
|
||||
o1-2024-12-17
|
||||
o1-mini
|
||||
o1-mini-2024-09-12
|
||||
o1-preview
|
||||
o1-preview-2024-09-12
|
||||
o1-pro
|
||||
o1-pro-2025-03-19
|
||||
o3-mini
|
||||
|
||||
11728
packages/kbot/dist/main_node.js
vendored
11728
packages/kbot/dist/main_node.js
vendored
File diff suppressed because one or more lines are too long
4
packages/kbot/dist/package-lock.json
generated
vendored
4
packages/kbot/dist/package-lock.json
generated
vendored
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@plastichub/kbot",
|
||||
"version": "1.1.46",
|
||||
"version": "1.1.47",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@plastichub/kbot",
|
||||
"version": "1.1.46",
|
||||
"version": "1.1.47",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"node-emoji": "^2.2.0"
|
||||
|
||||
2
packages/kbot/dist/package.json
vendored
2
packages/kbot/dist/package.json
vendored
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@plastichub/kbot",
|
||||
"version": "1.1.46",
|
||||
"version": "1.1.47",
|
||||
"main": "main_node.js",
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -14,17 +14,24 @@ export function createHandlers(options: IKBotOptions): IHandler[] {
|
||||
new TextHandler(),
|
||||
]
|
||||
}
|
||||
import { isFile } from '@polymech/commons';
|
||||
|
||||
export async function detectAndHandle(content: Buffer | string, options: IKBotOptions): Promise<string> {
|
||||
const handlers = createHandlers(options)
|
||||
// Check if content is a file path
|
||||
const contentStr = content.toString();
|
||||
if (exists(contentStr)) {
|
||||
const filePath = path.resolve(contentStr);
|
||||
for (const handler of handlers) {
|
||||
if (await handler.canHandle(filePath, true)) {
|
||||
return handler.handle(filePath, true);
|
||||
const handlers = createHandlers(options)
|
||||
const contentStr = content.toString().trim();
|
||||
try {
|
||||
if (isFile(contentStr) && exists(contentStr)) {
|
||||
const filePath = path.resolve(contentStr);
|
||||
for (const handler of handlers) {
|
||||
if (await handler.canHandle(filePath, true)) {
|
||||
return handler.handle(filePath, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// options.logger?.warn(`Malformed path detected. Treating as content. Error: ${e.message}`);
|
||||
// If it's not a valid path, it will throw an error, so we just continue
|
||||
console.error(`Malformed path detected. Treating as content. Error: ${e.message}`);
|
||||
}
|
||||
|
||||
// Handle as raw content
|
||||
@ -33,5 +40,6 @@ export async function detectAndHandle(content: Buffer | string, options: IKBotOp
|
||||
return handler.handle(content, false);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('No suitable handler found for the input content');
|
||||
}
|
||||
@ -24,6 +24,7 @@ export const onCompletion = async (result: any = "", options: IKBotTask) => {
|
||||
writeOrAppend(dstPath, result, options.append as E_AppendModeType)
|
||||
const action = options.append ? `${options.append} content to` : 'Wrote completion result to'
|
||||
options.logger?.debug(`${action} ${dstPath} : ${options.dst}`)
|
||||
console.log(`wrote ${action} ${dstPath} : ${options.dst}`)
|
||||
} else {
|
||||
marked.use(markedTerminal({
|
||||
emoji: false,
|
||||
|
||||
@ -3,6 +3,7 @@ import { lookup } from 'mime-types'
|
||||
import { cwd as processCwd } from 'node:process'
|
||||
|
||||
import { hasMagic } from 'glob'
|
||||
import { lstatSync } from 'node:fs';
|
||||
import { sync as dir } from '@polymech/fs/dir'
|
||||
import { sync as exists } from '@polymech/fs/exists'
|
||||
import { sync as write } from '@polymech/fs/write'
|
||||
@ -116,10 +117,9 @@ export const complete_messages = async (
|
||||
let chatMessages: Array<ChatCompletionMessageParam> = []
|
||||
|
||||
const promptMessage = await prompt(opts)
|
||||
if (!promptMessage?.content) {
|
||||
return { messages: [], files: [] }
|
||||
if (promptMessage?.content) {
|
||||
chatMessages.push(promptMessage as ChatCompletionMessageParam)
|
||||
}
|
||||
chatMessages.push(promptMessage as ChatCompletionMessageParam)
|
||||
chatMessages.push((await preferences(opts)) as ChatCompletionMessageParam)
|
||||
|
||||
const projectPath = path.resolve(options.path || '.')
|
||||
@ -383,6 +383,16 @@ export const run = async (opts: IKBotTask): Promise<ProcessRunResult[]> => {
|
||||
opts.logger.info(`Processing ${items.length} items matching pattern ${opts.each}...`)
|
||||
const _models = all()
|
||||
for (const item of items) {
|
||||
try {
|
||||
const stats = lstatSync(item);
|
||||
if (!stats.isFile()) {
|
||||
opts.logger.warn(`Item from --each is not a file, skipping: ${item}`);
|
||||
continue;
|
||||
}
|
||||
} catch (e) {
|
||||
opts.logger.warn(`Item from --each is not a valid path, skipping: ${item}`);
|
||||
continue;
|
||||
}
|
||||
const itemOpts = {
|
||||
...opts,
|
||||
ITEM: item,
|
||||
@ -395,7 +405,8 @@ export const run = async (opts: IKBotTask): Promise<ProcessRunResult[]> => {
|
||||
|
||||
let currentItemSpecificIncludes = [forward_slash(item)];
|
||||
|
||||
itemOpts.include = [...opts.include, ...currentItemSpecificIncludes];
|
||||
const initialIncludes = [...opts.include]; // Store original includes
|
||||
itemOpts.include = [...initialIncludes, ...currentItemSpecificIncludes];
|
||||
|
||||
const result = await processRun(itemOpts)
|
||||
if (result !== undefined) {
|
||||
|
||||
19
packages/kbot/src/models/cache/openai-models.ts
vendored
19
packages/kbot/src/models/cache/openai-models.ts
vendored
@ -2,11 +2,11 @@ export enum E_OPENAI_MODEL {
|
||||
MODEL_GPT_4_0613 = "gpt-4-0613",
|
||||
MODEL_GPT_4 = "gpt-4",
|
||||
MODEL_GPT_3_5_TURBO = "gpt-3.5-turbo",
|
||||
MODEL_O4_MINI_DEEP_RESEARCH_2025_06_26 = "o4-mini-deep-research-2025-06-26",
|
||||
MODEL_CODEX_MINI_LATEST = "codex-mini-latest",
|
||||
MODEL_GPT_4O_REALTIME_PREVIEW_2025_06_03 = "gpt-4o-realtime-preview-2025-06-03",
|
||||
MODEL_GPT_4O_AUDIO_PREVIEW_2025_06_03 = "gpt-4o-audio-preview-2025-06-03",
|
||||
MODEL_O4_MINI_DEEP_RESEARCH = "o4-mini-deep-research",
|
||||
MODEL_GPT_5_NANO = "gpt-5-nano",
|
||||
MODEL_GPT_5 = "gpt-5",
|
||||
MODEL_GPT_5_MINI_2025_08_07 = "gpt-5-mini-2025-08-07",
|
||||
MODEL_GPT_5_MINI = "gpt-5-mini",
|
||||
MODEL_GPT_5_NANO_2025_08_07 = "gpt-5-nano-2025-08-07",
|
||||
MODEL_DAVINCI_002 = "davinci-002",
|
||||
MODEL_BABBAGE_002 = "babbage-002",
|
||||
MODEL_GPT_3_5_TURBO_INSTRUCT = "gpt-3.5-turbo-instruct",
|
||||
@ -59,7 +59,9 @@ export enum E_OPENAI_MODEL {
|
||||
MODEL_O1_PRO_2025_03_19 = "o1-pro-2025-03-19",
|
||||
MODEL_O1_PRO = "o1-pro",
|
||||
MODEL_GPT_4O_MINI_TTS = "gpt-4o-mini-tts",
|
||||
MODEL_O3_2025_04_16 = "o3-2025-04-16",
|
||||
MODEL_O4_MINI_2025_04_16 = "o4-mini-2025-04-16",
|
||||
MODEL_O3 = "o3",
|
||||
MODEL_O4_MINI = "o4-mini",
|
||||
MODEL_GPT_4_1_2025_04_14 = "gpt-4.1-2025-04-14",
|
||||
MODEL_GPT_4_1 = "gpt-4.1",
|
||||
@ -68,6 +70,13 @@ export enum E_OPENAI_MODEL {
|
||||
MODEL_GPT_4_1_NANO_2025_04_14 = "gpt-4.1-nano-2025-04-14",
|
||||
MODEL_GPT_4_1_NANO = "gpt-4.1-nano",
|
||||
MODEL_GPT_IMAGE_1 = "gpt-image-1",
|
||||
MODEL_CODEX_MINI_LATEST = "codex-mini-latest",
|
||||
MODEL_GPT_4O_REALTIME_PREVIEW_2025_06_03 = "gpt-4o-realtime-preview-2025-06-03",
|
||||
MODEL_GPT_4O_AUDIO_PREVIEW_2025_06_03 = "gpt-4o-audio-preview-2025-06-03",
|
||||
MODEL_O4_MINI_DEEP_RESEARCH = "o4-mini-deep-research",
|
||||
MODEL_O4_MINI_DEEP_RESEARCH_2025_06_26 = "o4-mini-deep-research-2025-06-26",
|
||||
MODEL_GPT_5_CHAT_LATEST = "gpt-5-chat-latest",
|
||||
MODEL_GPT_5_2025_08_07 = "gpt-5-2025-08-07",
|
||||
MODEL_GPT_3_5_TURBO_16K = "gpt-3.5-turbo-16k",
|
||||
MODEL_TTS_1 = "tts-1",
|
||||
MODEL_WHISPER_1 = "whisper-1",
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
export enum E_OPENROUTER_MODEL_FREE {
|
||||
MODEL_FREE_OPENROUTER_HORIZON_BETA = "openrouter/horizon-beta",
|
||||
MODEL_FREE_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",
|
||||
MODEL_FREE_Z_AI_GLM_4_5_AIR_FREE = "z-ai/glm-4.5-air:free",
|
||||
MODEL_FREE_MOONSHOTAI_KIMI_K2_FREE = "moonshotai/kimi-k2:free",
|
||||
MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN_MISTRAL_24B_VENICE_EDITION_FREE = "cognitivecomputations/dolphin-mistral-24b-venice-edition:free",
|
||||
|
||||
@ -1,8 +1,12 @@
|
||||
export enum E_OPENROUTER_MODEL {
|
||||
MODEL_OPENAI_GPT_5_CHAT = "openai/gpt-5-chat",
|
||||
MODEL_OPENAI_GPT_5 = "openai/gpt-5",
|
||||
MODEL_OPENAI_GPT_5_MINI = "openai/gpt-5-mini",
|
||||
MODEL_OPENAI_GPT_5_NANO = "openai/gpt-5-nano",
|
||||
MODEL_OPENAI_GPT_OSS_120B = "openai/gpt-oss-120b",
|
||||
MODEL_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",
|
||||
MODEL_OPENAI_GPT_OSS_20B = "openai/gpt-oss-20b",
|
||||
MODEL_ANTHROPIC_CLAUDE_OPUS_4_1 = "anthropic/claude-opus-4.1",
|
||||
MODEL_OPENROUTER_HORIZON_BETA = "openrouter/horizon-beta",
|
||||
MODEL_MISTRALAI_CODESTRAL_2508 = "mistralai/codestral-2508",
|
||||
MODEL_QWEN_QWEN3_30B_A3B_INSTRUCT_2507 = "qwen/qwen3-30b-a3b-instruct-2507",
|
||||
MODEL_Z_AI_GLM_4_5 = "z-ai/glm-4.5",
|
||||
@ -123,8 +127,6 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT = "mistralai/mistral-small-3.1-24b-instruct",
|
||||
MODEL_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free",
|
||||
MODEL_GOOGLE_GEMMA_3_4B_IT = "google/gemma-3-4b-it",
|
||||
MODEL_AI21_JAMBA_1_6_LARGE = "ai21/jamba-1.6-large",
|
||||
MODEL_AI21_JAMBA_1_6_MINI = "ai21/jamba-1.6-mini",
|
||||
MODEL_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free",
|
||||
MODEL_GOOGLE_GEMMA_3_12B_IT = "google/gemma-3-12b-it",
|
||||
MODEL_COHERE_COMMAND_A = "cohere/command-a",
|
||||
@ -207,9 +209,9 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT = "qwen/qwen-2.5-coder-32b-instruct",
|
||||
MODEL_RAIFLE_SORCERERLM_8X22B = "raifle/sorcererlm-8x22b",
|
||||
MODEL_THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_BETA = "anthropic/claude-3.5-haiku:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
|
||||
MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_BETA = "anthropic/claude-3.5-sonnet:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet",
|
||||
@ -225,20 +227,20 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
|
||||
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free",
|
||||
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct",
|
||||
MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b",
|
||||
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
|
||||
MODEL_OPENAI_O1_MINI = "openai/o1-mini",
|
||||
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
|
||||
MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b",
|
||||
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
|
||||
MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024",
|
||||
MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct",
|
||||
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
|
||||
MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b",
|
||||
MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct",
|
||||
MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct",
|
||||
MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B = "nousresearch/hermes-3-llama-3.1-70b",
|
||||
MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B = "nousresearch/hermes-3-llama-3.1-405b",
|
||||
@ -246,14 +248,14 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b",
|
||||
MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE = "meta-llama/llama-3.1-405b-instruct:free",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct",
|
||||
MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free",
|
||||
MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo",
|
||||
MODEL_OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18",
|
||||
MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini",
|
||||
MODEL_OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18",
|
||||
MODEL_GOOGLE_GEMMA_2_27B_IT = "google/gemma-2-27b-it",
|
||||
MODEL_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free",
|
||||
MODEL_GOOGLE_GEMMA_2_9B_IT = "google/gemma-2-9b-it",
|
||||
@ -263,29 +265,28 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b",
|
||||
MODEL_QWEN_QWEN_2_72B_INSTRUCT = "qwen/qwen-2-72b-instruct",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
|
||||
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct",
|
||||
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
|
||||
MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct",
|
||||
MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct",
|
||||
MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b",
|
||||
MODEL_GOOGLE_GEMINI_FLASH_1_5 = "google/gemini-flash-1.5",
|
||||
MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b",
|
||||
MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13",
|
||||
MODEL_OPENAI_GPT_4O = "openai/gpt-4o",
|
||||
MODEL_OPENAI_GPT_4O_EXTENDED = "openai/gpt-4o:extended",
|
||||
MODEL_SAO10K_FIMBULVETR_11B_V2 = "sao10k/fimbulvetr-11b-v2",
|
||||
MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b",
|
||||
MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT = "meta-llama/llama-3-70b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct",
|
||||
MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT = "mistralai/mixtral-8x22b-instruct",
|
||||
MODEL_MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b",
|
||||
MODEL_GOOGLE_GEMINI_PRO_1_5 = "google/gemini-pro-1.5",
|
||||
MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo",
|
||||
MODEL_GOOGLE_GEMINI_PRO_1_5 = "google/gemini-pro-1.5",
|
||||
MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus",
|
||||
MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024",
|
||||
MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b",
|
||||
MODEL_COHERE_COMMAND = "cohere/command",
|
||||
MODEL_COHERE_COMMAND_R = "cohere/command-r",
|
||||
MODEL_COHERE_COMMAND = "cohere/command",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_HAIKU_BETA = "anthropic/claude-3-haiku:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_OPUS_BETA = "anthropic/claude-3-opus:beta",
|
||||
@ -301,17 +302,16 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct",
|
||||
MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b",
|
||||
MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b",
|
||||
MODEL_UNDI95_TOPPY_M_7B = "undi95/toppy-m-7b",
|
||||
MODEL_OPENROUTER_AUTO = "openrouter/auto",
|
||||
MODEL_OPENAI_GPT_4_1106_PREVIEW = "openai/gpt-4-1106-preview",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1",
|
||||
MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1",
|
||||
MODEL_PYGMALIONAI_MYTHALION_13B = "pygmalionai/mythalion-13b",
|
||||
MODEL_OPENAI_GPT_3_5_TURBO_16K = "openai/gpt-3.5-turbo-16k",
|
||||
MODEL_MANCER_WEAVER = "mancer/weaver",
|
||||
MODEL_UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b",
|
||||
MODEL_GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b",
|
||||
MODEL_OPENAI_GPT_4 = "openai/gpt-4",
|
||||
MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo",
|
||||
MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314"
|
||||
MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314",
|
||||
MODEL_OPENAI_GPT_4 = "openai/gpt-4"
|
||||
}
|
||||
@ -88,7 +88,7 @@ export async function get(
|
||||
ROUTER: options.router || 'unknown_router'
|
||||
}
|
||||
const potentialDstPath = path.resolve(resolvePath(options.dst, false, fullVarsForDst));
|
||||
if (exists(potentialDstPath)) {
|
||||
if (exists(potentialDstPath) && options.append !== 'replace') {
|
||||
options.logger?.info(`Skipping source file ${path.relative(projectPath, absoluteSrcFilePath)} as output ${potentialDstPath} already exists.`);
|
||||
} else {
|
||||
filesToKeepAfterDstCheck.push(absoluteSrcFilePath);
|
||||
|
||||
@ -40,20 +40,25 @@ export function merge(existingContent: string, newContent: string): string {
|
||||
export function writeOrAppend(
|
||||
dstPath: string,
|
||||
content: string,
|
||||
appendMode?: 'concat' | 'merge'
|
||||
appendMode?: 'concat' | 'merge' | 'replace'
|
||||
): string {
|
||||
if (appendMode === 'replace') {
|
||||
write(dstPath, content);
|
||||
return content;
|
||||
}
|
||||
let finalContent = content;
|
||||
if (exists(dstPath) && appendMode) {
|
||||
const existingContentBuffer = read(dstPath); // Read returns Buffer | undefined
|
||||
if (existingContentBuffer) { // Check if read was successful
|
||||
const existingContent = existingContentBuffer.toString();
|
||||
if (appendMode === 'concat') {
|
||||
finalContent = concat(existingContent, content);
|
||||
finalContent = concat(existingContent, content);
|
||||
} else if (appendMode === 'merge') {
|
||||
finalContent = merge(existingContent, content); // Using placeholder merge for now
|
||||
finalContent = merge(existingContent, content);
|
||||
}
|
||||
}
|
||||
}
|
||||
write(dstPath, finalContent)
|
||||
return finalContent
|
||||
|
||||
write(dstPath, finalContent);
|
||||
return finalContent;
|
||||
}
|
||||
@ -41,9 +41,10 @@ export const EType = z.enum([
|
||||
])
|
||||
|
||||
// Define the new enum for append modes
|
||||
export const E_AppendMode = z.enum(['concat', 'merge'])
|
||||
export const E_AppendMode = z.enum(['concat', 'merge', 'replace'])
|
||||
export type E_AppendModeType = z.infer<typeof E_AppendMode>
|
||||
|
||||
|
||||
// Define the new enum for wrap modes
|
||||
export const E_WrapMode = z.enum(['meta', 'none'])
|
||||
export type E_WrapModeType = z.infer<typeof E_WrapMode>
|
||||
|
||||
@ -8,7 +8,7 @@ export interface IKBotOptions {
|
||||
/** Optional destination path for the result, will substitute ${MODEL_NAME} and ${ROUTER} in the path. Optional, used for "completion" mode */
|
||||
dst?: string | undefined;
|
||||
/** How to handle output if --dst file already exists: "concat" (append) or "merge" (try to merge structures if possible, otherwise append). Only used if --dst is specified. */
|
||||
append?: ("concat" | "merge") | undefined;
|
||||
append?: ("concat" | "merge" | "replace") | undefined;
|
||||
/** Specify how to wrap the output, "meta (file name, absolute path, cwd)" or "none". */
|
||||
wrap?: "meta" | "none";
|
||||
/** Iterate over items, supported: GLOB | Path to JSON File | array of strings (comma separated). To test different models, use --each="gpt-3.5-turbo,gpt-4o", the actual string will exposed as variable `ITEM`, eg: --dst="${ITEM}-output.md" */
|
||||
@ -31,7 +31,6 @@ export interface IKBotOptions {
|
||||
[35m[1m[22m[39m
|
||||
[35m[1m OpenRouter models:[22m[39m
|
||||
[35m[1m[22m[39m
|
||||
01-ai/yi-large | paid
|
||||
agentica-org/deepcoder-14b-preview | paid
|
||||
agentica-org/deepcoder-14b-preview:free | free
|
||||
ai21/jamba-1.6-large | paid
|
||||
@ -47,10 +46,8 @@ export interface IKBotOptions {
|
||||
anthropic/claude-3-haiku:beta | paid
|
||||
anthropic/claude-3-opus | paid
|
||||
anthropic/claude-3-opus:beta | paid
|
||||
anthropic/claude-3-sonnet | paid
|
||||
anthropic/claude-3.5-haiku | paid
|
||||
anthropic/claude-3.5-haiku-20241022 | paid
|
||||
anthropic/claude-3.5-haiku-20241022:beta | paid
|
||||
anthropic/claude-3.5-haiku:beta | paid
|
||||
anthropic/claude-3.5-sonnet | paid
|
||||
anthropic/claude-3.5-sonnet-20240620 | paid
|
||||
@ -60,6 +57,7 @@ export interface IKBotOptions {
|
||||
anthropic/claude-3.7-sonnet:beta | paid
|
||||
anthropic/claude-3.7-sonnet:thinking | paid
|
||||
anthropic/claude-opus-4 | paid
|
||||
anthropic/claude-opus-4.1 | paid
|
||||
anthropic/claude-sonnet-4 | paid
|
||||
arcee-ai/coder-large | paid
|
||||
arcee-ai/maestro-reasoning | paid
|
||||
@ -99,11 +97,11 @@ export interface IKBotOptions {
|
||||
deepseek/deepseek-r1-distill-qwen-32b | paid
|
||||
deepseek/deepseek-r1-distill-qwen-7b | paid
|
||||
cognitivecomputations/dolphin-mixtral-8x22b | paid
|
||||
cognitivecomputations/dolphin3.0-mistral-24b | paid
|
||||
cognitivecomputations/dolphin3.0-mistral-24b:free | free
|
||||
cognitivecomputations/dolphin3.0-r1-mistral-24b | paid
|
||||
cognitivecomputations/dolphin3.0-r1-mistral-24b:free | free
|
||||
eleutherai/llemma_7b | paid
|
||||
eva-unit-01/eva-qwen-2.5-72b | paid
|
||||
sao10k/fimbulvetr-11b-v2 | paid
|
||||
alpindale/goliath-120b | paid
|
||||
google/gemini-flash-1.5 | paid
|
||||
@ -131,6 +129,7 @@ export interface IKBotOptions {
|
||||
google/gemma-3n-e2b-it:free | free
|
||||
google/gemma-3n-e4b-it | paid
|
||||
google/gemma-3n-e4b-it:free | free
|
||||
openrouter/horizon-beta | paid
|
||||
inception/mercury | paid
|
||||
inception/mercury-coder | paid
|
||||
infermatic/mn-inferor-12b | paid
|
||||
@ -177,10 +176,10 @@ export interface IKBotOptions {
|
||||
mistralai/mistral-large | paid
|
||||
mistralai/mistral-large-2407 | paid
|
||||
mistralai/mistral-large-2411 | paid
|
||||
nothingiisreal/mn-celeste-12b | paid
|
||||
mistralai/mistral-small | paid
|
||||
mistralai/mistral-tiny | paid
|
||||
mistralai/codestral-2501 | paid
|
||||
mistralai/codestral-2508 | paid
|
||||
mistralai/devstral-medium | paid
|
||||
mistralai/devstral-small | paid
|
||||
mistralai/devstral-small-2505 | paid
|
||||
@ -213,7 +212,6 @@ export interface IKBotOptions {
|
||||
moonshotai/kimi-vl-a3b-thinking:free | free
|
||||
moonshotai/kimi-k2 | paid
|
||||
moonshotai/kimi-k2:free | free
|
||||
morph/morph-v2 | paid
|
||||
morph/morph-v3-fast | paid
|
||||
morph/morph-v3-large | paid
|
||||
gryphe/mythomax-l2-13b | paid
|
||||
@ -232,6 +230,8 @@ export interface IKBotOptions {
|
||||
nvidia/llama-3.3-nemotron-super-49b-v1 | paid
|
||||
openai/chatgpt-4o-latest | paid
|
||||
openai/codex-mini | paid
|
||||
openai/gpt-oss-120b | paid
|
||||
openai/gpt-oss-20b | paid
|
||||
openai/gpt-3.5-turbo | paid
|
||||
openai/gpt-3.5-turbo-0613 | paid
|
||||
openai/gpt-3.5-turbo-16k | paid
|
||||
@ -256,8 +256,6 @@ export interface IKBotOptions {
|
||||
openai/o1 | paid
|
||||
openai/o1-mini | paid
|
||||
openai/o1-mini-2024-09-12 | paid
|
||||
openai/o1-preview | paid
|
||||
openai/o1-preview-2024-09-12 | paid
|
||||
openai/o1-pro | paid
|
||||
openai/o3 | paid
|
||||
openai/o3-mini | paid
|
||||
@ -290,16 +288,15 @@ export interface IKBotOptions {
|
||||
qwen/qwen3-235b-a22b | paid
|
||||
qwen/qwen3-235b-a22b:free | free
|
||||
qwen/qwen3-235b-a22b-2507 | paid
|
||||
qwen/qwen3-235b-a22b-2507:free | free
|
||||
qwen/qwen3-235b-a22b-thinking-2507 | paid
|
||||
qwen/qwen3-30b-a3b | paid
|
||||
qwen/qwen3-30b-a3b:free | free
|
||||
qwen/qwen3-30b-a3b-instruct-2507 | paid
|
||||
qwen/qwen3-32b | paid
|
||||
qwen/qwen3-4b:free | free
|
||||
qwen/qwen3-8b | paid
|
||||
qwen/qwen3-8b:free | free
|
||||
qwen/qwen3-coder | paid
|
||||
qwen/qwen3-coder:free | free
|
||||
qwen/qwq-32b | paid
|
||||
qwen/qwq-32b:free | free
|
||||
qwen/qwq-32b-preview | paid
|
||||
@ -308,14 +305,12 @@ export interface IKBotOptions {
|
||||
qwen/qwen-2.5-7b-instruct | paid
|
||||
qwen/qwen-2.5-coder-32b-instruct | paid
|
||||
qwen/qwen-2.5-coder-32b-instruct:free | free
|
||||
rekaai/reka-flash-3 | paid
|
||||
rekaai/reka-flash-3:free | free
|
||||
undi95/remm-slerp-l2-13b | paid
|
||||
sao10k/l3-lunaris-8b | paid
|
||||
sao10k/l3-euryale-70b | paid
|
||||
sao10k/l3.1-euryale-70b | paid
|
||||
sao10k/l3.3-euryale-70b | paid
|
||||
sarvamai/sarvam-m | paid
|
||||
sarvamai/sarvam-m:free | free
|
||||
shisa-ai/shisa-v2-llama3.3-70b | paid
|
||||
shisa-ai/shisa-v2-llama3.3-70b:free | free
|
||||
@ -330,12 +325,10 @@ export interface IKBotOptions {
|
||||
thedrummer/unslopnemo-12b | paid
|
||||
thedrummer/valkyrie-49b-v1 | paid
|
||||
thudm/glm-4-32b | paid
|
||||
thudm/glm-4-32b:free | free
|
||||
thudm/glm-4.1v-9b-thinking | paid
|
||||
thudm/glm-z1-32b | paid
|
||||
thudm/glm-z1-32b:free | free
|
||||
tngtech/deepseek-r1t-chimera | paid
|
||||
tngtech/deepseek-r1t-chimera:free | free
|
||||
tngtech/deepseek-r1t2-chimera | paid
|
||||
tngtech/deepseek-r1t2-chimera:free | free
|
||||
undi95/toppy-m-7b | paid
|
||||
scb10x/llama3.1-typhoon2-70b-instruct | paid
|
||||
@ -350,6 +343,9 @@ export interface IKBotOptions {
|
||||
x-ai/grok-4 | paid
|
||||
x-ai/grok-vision-beta | paid
|
||||
z-ai/glm-4-32b | paid
|
||||
z-ai/glm-4.5 | paid
|
||||
z-ai/glm-4.5-air | paid
|
||||
z-ai/glm-4.5-air:free | free
|
||||
[35m[1m[22m[39m
|
||||
[35m[1m OpenAI models:[22m[39m
|
||||
[35m[1m[22m[39m
|
||||
@ -408,8 +404,6 @@ export interface IKBotOptions {
|
||||
o1-2024-12-17
|
||||
o1-mini
|
||||
o1-mini-2024-09-12
|
||||
o1-preview
|
||||
o1-preview-2024-09-12
|
||||
o1-pro
|
||||
o1-pro-2025-03-19
|
||||
o3-mini
|
||||
|
||||
Loading…
Reference in New Issue
Block a user