maintainence love:)
This commit is contained in:
parent
eb12d74af8
commit
ad00911d37
@ -1,3 +1,3 @@
|
||||
import OpenAI from 'openai';
|
||||
import { IKBotTask } from '@polymech/ai-tools';
|
||||
export declare const runResponses: (client: OpenAI, params: any, options: IKBotTask) => Promise<string | false>;
|
||||
export declare const runResponses: (client: OpenAI, params: any, options: IKBotTask) => Promise<any>;
|
||||
|
||||
@ -11,37 +11,47 @@ export const runResponses = async (client, params, options) => {
|
||||
.join('\n\n');
|
||||
const tools = [
|
||||
{
|
||||
type: "web_search",
|
||||
user_location: {
|
||||
type: "approximate",
|
||||
country: "ES",
|
||||
city: "Sentmenant",
|
||||
region: "Barcelona",
|
||||
},
|
||||
},
|
||||
// { type: "web_search_preview" },
|
||||
type: 'web_search'
|
||||
}
|
||||
];
|
||||
let format = null;
|
||||
if (exists(options.format)) {
|
||||
const content = readFS(options.format);
|
||||
format = JSON.parse(content.toString());
|
||||
const createParams = {
|
||||
model: options.model,
|
||||
input,
|
||||
stream: false,
|
||||
parallel_tool_calls: false,
|
||||
tools
|
||||
};
|
||||
if (options.stream) {
|
||||
createParams.stream = true;
|
||||
}
|
||||
try {
|
||||
const response = await client.responses.create({
|
||||
model: options.model,
|
||||
input,
|
||||
stream: false,
|
||||
parallel_tool_calls: false,
|
||||
tools: tools,
|
||||
text: {
|
||||
if (options.format && typeof options.format === 'string' && exists(options.format)) {
|
||||
try {
|
||||
const content = readFS(options.format);
|
||||
const schema = JSON.parse(content.toString());
|
||||
createParams.text = {
|
||||
format: {
|
||||
type: "json_schema",
|
||||
name: "format",
|
||||
schema: format,
|
||||
type: 'json_schema',
|
||||
name: 'format',
|
||||
schema,
|
||||
strict: false
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (e) {
|
||||
options.logger?.error(`Failed to parse format file: ${options.format}`, e);
|
||||
}
|
||||
}
|
||||
try {
|
||||
const response = await client.responses.create(createParams);
|
||||
if (options.stream) {
|
||||
const allEvents = [];
|
||||
for await (const event of response) {
|
||||
allEvents.push({ event: event.type, data: event });
|
||||
}
|
||||
});
|
||||
//writeFileSync('./tests/research-stream.json', JSON.stringify(allEvents, null, 2))
|
||||
//options.logger?.info('Stream logged to ./tests/research-stream.json')
|
||||
return;
|
||||
}
|
||||
if (!response || !response.output_text) {
|
||||
return '';
|
||||
}
|
||||
@ -50,7 +60,8 @@ export const runResponses = async (client, params, options) => {
|
||||
return result;
|
||||
}
|
||||
catch (e) {
|
||||
options.logger?.error(`Error running responses mode: ${e.message}`, e.stack);
|
||||
options.logger?.error('Failed to create response:', e.message);
|
||||
return;
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicnVuLXJlc3BvbnNlcy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9jb21tYW5kcy9ydW4tcmVzcG9uc2VzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUdBLE9BQU8sRUFBRSxJQUFJLElBQUksTUFBTSxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFDbEQsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUNwRCxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFDbEQsTUFBTSxDQUFDLE1BQU0sWUFBWSxHQUFHLEtBQUssRUFBRSxNQUFjLEVBQUUsTUFBVyxFQUFFLE9BQWtCLEVBQUUsRUFBRTtJQUNsRixJQUFJLE9BQU8sQ0FBQyxHQUFHLEVBQUUsQ0FBQztRQUNkLE9BQU8sQ0FBQyxNQUFNLEVBQUUsSUFBSSxDQUFDLDZCQUE2QixDQUFDLENBQUE7UUFDbkQsT0FBTyxLQUFLLENBQUE7SUFDaEIsQ0FBQztJQUVELE1BQU0sS0FBSyxHQUFHLE1BQU0sQ0FBQyxRQUFRO1NBQ3hCLEdBQUcsQ0FBQyxDQUFDLENBQTZCLEVBQUUsRUFBRSxDQUFDLENBQUMsQ0FBQyxPQUFPLENBQUM7U0FDakQsSUFBSSxDQUFDLE1BQU0sQ0FBQyxDQUFBO0lBRWpCLE1BQU0sS0FBSyxHQUFHO1FBQ1Y7WUFDSSxJQUFJLEVBQUUsWUFBWTtZQUNsQixhQUFhLEVBQUU7Z0JBQ1gsSUFBSSxFQUFFLGFBQWE7Z0JBQ25CLE9BQU8sRUFBRSxJQUFJO2dCQUNiLElBQUksRUFBRSxZQUFZO2dCQUNsQixNQUFNLEVBQUUsV0FBVzthQUN0QjtTQUNKO1FBQ0Qsa0NBQWtDO0tBQ3JDLENBQUE7SUFFRCxJQUFJLE1BQU0sR0FBRyxJQUFJLENBQUM7SUFFbEIsSUFBSSxNQUFNLENBQUMsT0FBTyxDQUFDLE1BQU0sQ0FBQyxFQUFFLENBQUM7UUFDekIsTUFBTSxPQUFPLEdBQUcsTUFBTSxDQUFDLE9BQU8sQ0FBQyxNQUFNLENBQUMsQ0FBQztRQUN2QyxNQUFNLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsUUFBUSxFQUFFLENBQUMsQ0FBQztJQUM1QyxDQUFDO0lBRUQsSUFBSSxDQUFDO1FBQ0QsTUFBTSxRQUFRLEdBQUcsTUFBTSxNQUFNLENBQUMsU0FBUyxDQUFDLE1BQU0sQ0FBQztZQUMzQyxLQUFLLEVBQUUsT0FBTyxDQUFDLEtBQUs7WUFDcEIsS0FBSztZQUNMLE1BQU0sRUFBRSxLQUFLO1lBQ2IsbUJBQW1CLEVBQUUsS0FBSztZQUMxQixLQUFLLEVBQUUsS0FBWTtZQUNuQixJQUFJLEVBQUU7Z0JBQ0YsTUFBTSxFQUFFO29CQUNKLElBQUksRUFBRSxhQUFhO29CQUNuQixJQUFJLEVBQUUsUUFBUTtvQkFDZCxNQUFNLEVBQUUsTUFBTTtvQkFDZCxNQUFNLEVBQUUsS0FBSztpQkFDaEI7YUFDSjtTQUNKLENBQUMsQ0FBQTtRQUVGLElBQUksQ0FBQyxRQUFRLElBQUksQ0FBQyxRQUFRLENBQUMsV0FBVyxFQUFFLENBQUM7WUFDckMsT0FBTyxFQUFFLENBQUE7UUFDYixDQUFDO1FBRUQsSUFBSSxNQUFNLEdBQUcsUUFBUSxDQUFDLFdBQVcsQ0FBQTtRQUNqQyxNQUFNLEdBQUcsTUFBTSxZQUFZLENBQUMsTUFBTSxFQUFFLE9BQU8sQ0FBQyxDQUFBO1FBQzVDLE9BQU8sTUFBTSxDQUFBO0lBQ2pCLENBQUM7SUFBQyxPQUFPLENBQUMsRUFBRSxDQUFDO1FBQ1QsT0FBTyxDQUFDLE1BQU0sRUFBRSxLQUFLLENBQUMsaUNBQWlDLENBQUMsQ0FBQyxPQUFPLEVBQUUsRUFBRSxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUE7SUFDaEYsQ0FBQztBQUNMLENBQUMsQ0FBQSJ9
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicnVuLXJlc3BvbnNlcy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9jb21tYW5kcy9ydW4tcmVzcG9uc2VzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUdBLE9BQU8sRUFBRSxJQUFJLElBQUksTUFBTSxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFDbEQsT0FBTyxFQUFFLElBQUksSUFBSSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQTtBQUdwRCxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0scUJBQXFCLENBQUE7QUFFbEQsTUFBTSxDQUFDLE1BQU0sWUFBWSxHQUFHLEtBQUssRUFBRSxNQUFjLEVBQUUsTUFBVyxFQUFFLE9BQWtCLEVBQUUsRUFBRTtJQUNsRixJQUFJLE9BQU8sQ0FBQyxHQUFHLEVBQUUsQ0FBQztRQUNkLE9BQU8sQ0FBQyxNQUFNLEVBQUUsSUFBSSxDQUFDLDZCQUE2QixDQUFDLENBQUE7UUFDbkQsT0FBTyxLQUFLLENBQUE7SUFDaEIsQ0FBQztJQUdELE1BQU0sS0FBSyxHQUFHLE1BQU0sQ0FBQyxRQUFRO1NBQ3hCLEdBQUcsQ0FBQyxDQUFDLENBQTZCLEVBQUUsRUFBRSxDQUFDLENBQUMsQ0FBQyxPQUFPLENBQUM7U0FDakQsSUFBSSxDQUFDLE1BQU0sQ0FBQyxDQUFBO0lBRWpCLE1BQU0sS0FBSyxHQUFHO1FBQ1Y7WUFDSSxJQUFJLEVBQUUsWUFBcUI7U0FDOUI7S0FDSixDQUFBO0lBRUQsTUFBTSxZQUFZLEdBQVE7UUFDdEIsS0FBSyxFQUFFLE9BQU8sQ0FBQyxLQUFLO1FBQ3BCLEtBQUs7UUFDTCxNQUFNLEVBQUUsS0FBSztRQUNiLG1CQUFtQixFQUFFLEtBQUs7UUFDMUIsS0FBSztLQUNSLENBQUE7SUFFRCxJQUFJLE9BQU8sQ0FBQyxNQUFNLEVBQUUsQ0FBQztRQUNqQixZQUFZLENBQUMsTUFBTSxHQUFHLElBQUksQ0FBQTtJQUM5QixDQUFDO0lBRUQsSUFBSSxPQUFPLENBQUMsTUFBTSxJQUFJLE9BQU8sT0FBTyxDQUFDLE1BQU0sS0FBSyxRQUFRLElBQUksTUFBTSxDQUFDLE9BQU8sQ0FBQyxNQUFNLENBQUMsRUFBRSxDQUFDO1FBQ2pGLElBQUksQ0FBQztZQUNELE1BQU0sT0FBTyxHQUFHLE1BQU0sQ0FBQyxPQUFPLENBQUMsTUFBTSxDQUFDLENBQUE7WUFDdEMsTUFBTSxNQUFNLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsUUFBUSxFQUFFLENBQUMsQ0FBQTtZQUM3QyxZQUFZLENBQUMsSUFBSSxHQUFHO2dCQUNoQixNQUFNLEVBQUU7b0JBQ0osSUFBSSxFQUFFLGFBQWE7b0JBQ25CLElBQUksRUFBRSxRQUFRO29CQUNkLE1BQU07b0JBQ04sTUFBTSxFQUFFLEtBQUs7aUJBQ2hCO2FBQ0osQ0FBQTtRQUNMLENBQUM7UUFBQyxPQUFPLENBQUMsRUFBRSxDQUFDO1lBQ1QsT0FBTyxDQUFDLE1BQU0sRUFBRSxLQUFLLENBQUMsZ0NBQWdDLE9BQU8sQ0FBQyxNQUFNLEVBQUUsRUFBRSxDQUFDLENBQUMsQ0FBQTtRQUM5RSxDQUFDO0lBQ0wsQ0FBQztJQUVELElBQUksQ0FBQztRQUNELE1BQU0sUUFBUSxHQUFHLE1BQU8sTUFBYyxDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsWUFBWSxDQUFDLENBQUE7UUFFckUsSUFBSSxPQUFPLENBQUMsTUFBTSxFQUFFLENBQUM7WUFDakIsTUFBTSxTQUFTLEdBQVUsRUFBRSxDQUFBO1lBQzNCLElBQUksS0FBSyxFQUFFLE1BQU0sS0FBSyxJQUFJLFFBQVEsRUFBRSxDQUFDO2dCQUNqQyxTQUFTLENBQUMsSUFBSSxDQUFDLEVBQUUsS0FBSyxFQUFFLEtBQUssQ0FBQyxJQUFJLEVBQUUsSUFBSSxFQUFFLEtBQUssRUFBRSxDQUFDLENBQUE7WUFDdEQsQ0FBQztZQUNELG1GQUFtRjtZQUNuRix1RUFBdUU7WUFDdkUsT0FBTTtRQUNWLENBQUM7UUFFRCxJQUFJLENBQUMsUUFBUSxJQUFJLENBQUMsUUFBUSxDQUFDLFdBQVcsRUFBRSxDQUFDO1lBQ3JDLE9BQU8sRUFBRSxDQUFBO1FBQ2IsQ0FBQztRQUVELElBQUksTUFBTSxHQUFHLFFBQVEsQ0FBQyxXQUFXLENBQUE7UUFDakMsTUFBTSxHQUFHLE1BQU0sWUFBWSxDQUFDLE1BQU0sRUFBRSxPQUFPLENBQUMsQ0FBQTtRQUM1QyxPQUFPLE1BQU0sQ0FBQTtJQUNqQixDQUFDO0lBQUMsT0FBTyxDQUFDLEVBQUUsQ0FBQztRQUNULE9BQU8sQ0FBQyxNQUFNLEVBQUUsS0FBSyxDQUFDLDRCQUE0QixFQUFFLENBQUMsQ0FBQyxPQUFPLENBQUMsQ0FBQTtRQUM5RCxPQUFNO0lBQ1YsQ0FBQztBQUNMLENBQUMsQ0FBQSJ9
|
||||
@ -1,5 +1,5 @@
|
||||
{
|
||||
"timestamp": 1751099810190,
|
||||
"timestamp": 1751485210107,
|
||||
"models": [
|
||||
{
|
||||
"id": "gpt-4-0613",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
3
packages/kbot/dist-in/zod_types.d.ts
vendored
3
packages/kbot/dist-in/zod_types.d.ts
vendored
@ -444,6 +444,7 @@ export interface IKBotOptions {
|
||||
completion, tools, assistant.
|
||||
[32m[1mcompletion[22m[39m: no support for tools, please use --dst parameter to save the output.
|
||||
[32m[1mtools[22m[39m: allows for tools to be used, eg 'save to ./output.md'. Not all models support this mode.
|
||||
[32m[1mresponses[22m[39m: allows for responses to be used, eg 'save to ./output.md'. Not all models support this mode.
|
||||
[32m[1massistant[22m[39m: : allows documents (PDF, DOCX, ...) to be added but dont support tools. Use --dst to save the output. Supported files :
|
||||
[32m[1mcustom[22m[39m: custom mode
|
||||
*/
|
||||
@ -462,6 +463,8 @@ export interface IKBotOptions {
|
||||
preferences?: string;
|
||||
/** Logging directory */
|
||||
logs?: string;
|
||||
/** Enable streaming (verbose LLM output) */
|
||||
stream?: boolean;
|
||||
/** Environment (in profile) */
|
||||
env?: string;
|
||||
variables?: {
|
||||
|
||||
15477
packages/kbot/dist/main_node.js
vendored
15477
packages/kbot/dist/main_node.js
vendored
File diff suppressed because one or more lines are too long
4
packages/kbot/dist/package-lock.json
generated
vendored
4
packages/kbot/dist/package-lock.json
generated
vendored
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@plastichub/kbot",
|
||||
"version": "1.1.41",
|
||||
"version": "1.1.42",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@plastichub/kbot",
|
||||
"version": "1.1.41",
|
||||
"version": "1.1.42",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"node-emoji": "^2.2.0"
|
||||
|
||||
2
packages/kbot/dist/package.json
vendored
2
packages/kbot/dist/package.json
vendored
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@plastichub/kbot",
|
||||
"version": "1.1.41",
|
||||
"version": "1.1.42",
|
||||
"main": "main_node.js",
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -3,53 +3,68 @@ import { ChatCompletionMessageParam } from 'openai/resources/index.mjs'
|
||||
import { IKBotTask } from '@polymech/ai-tools'
|
||||
import { sync as readFS } from '@polymech/fs/read'
|
||||
import { sync as exists } from '@polymech/fs/exists'
|
||||
import { sync as writeFS } from '@polymech/fs/write'
|
||||
import { writeFileSync } from 'node:fs'
|
||||
import { onCompletion } from './run-completion.js'
|
||||
|
||||
export const runResponses = async (client: OpenAI, params: any, options: IKBotTask) => {
|
||||
if (options.dry) {
|
||||
options.logger?.info('Dry run - skipping API call')
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
const input = params.messages
|
||||
.map((m: ChatCompletionMessageParam) => m.content)
|
||||
.join('\n\n')
|
||||
|
||||
const tools = [
|
||||
{
|
||||
type: "web_search",
|
||||
user_location: {
|
||||
type: "approximate",
|
||||
country: "ES",
|
||||
city: "Sentmenant",
|
||||
region: "Barcelona",
|
||||
},
|
||||
},
|
||||
// { type: "web_search_preview" },
|
||||
type: 'web_search' as const
|
||||
}
|
||||
]
|
||||
|
||||
let format = null;
|
||||
|
||||
if (exists(options.format)) {
|
||||
const content = readFS(options.format);
|
||||
format = JSON.parse(content.toString());
|
||||
const createParams: any = {
|
||||
model: options.model,
|
||||
input,
|
||||
stream: false,
|
||||
parallel_tool_calls: false,
|
||||
tools
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await client.responses.create({
|
||||
model: options.model,
|
||||
input,
|
||||
stream: false,
|
||||
parallel_tool_calls: false,
|
||||
tools: tools as any,
|
||||
text: {
|
||||
if (options.stream) {
|
||||
createParams.stream = true
|
||||
}
|
||||
|
||||
if (options.format && typeof options.format === 'string' && exists(options.format)) {
|
||||
try {
|
||||
const content = readFS(options.format)
|
||||
const schema = JSON.parse(content.toString())
|
||||
createParams.text = {
|
||||
format: {
|
||||
type: "json_schema",
|
||||
name: "format",
|
||||
schema: format,
|
||||
type: 'json_schema',
|
||||
name: 'format',
|
||||
schema,
|
||||
strict: false
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (e) {
|
||||
options.logger?.error(`Failed to parse format file: ${options.format}`, e)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await (client as any).responses.create(createParams)
|
||||
|
||||
if (options.stream) {
|
||||
const allEvents: any[] = []
|
||||
for await (const event of response) {
|
||||
allEvents.push({ event: event.type, data: event })
|
||||
}
|
||||
//writeFileSync('./tests/research-stream.json', JSON.stringify(allEvents, null, 2))
|
||||
//options.logger?.info('Stream logged to ./tests/research-stream.json')
|
||||
return
|
||||
}
|
||||
|
||||
if (!response || !response.output_text) {
|
||||
return ''
|
||||
@ -59,6 +74,7 @@ export const runResponses = async (client: OpenAI, params: any, options: IKBotTa
|
||||
result = await onCompletion(result, options)
|
||||
return result
|
||||
} catch (e) {
|
||||
options.logger?.error(`Error running responses mode: ${e.message}`, e.stack)
|
||||
options.logger?.error('Failed to create response:', e.message)
|
||||
return
|
||||
}
|
||||
}
|
||||
@ -1,6 +1,6 @@
|
||||
export enum E_OPENROUTER_MODEL_FREE {
|
||||
MODEL_FREE_OPENROUTER_CYPHER_ALPHA_FREE = "openrouter/cypher-alpha:free",
|
||||
MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_2_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.2-24b-instruct:free",
|
||||
MODEL_FREE_MINIMAX_MINIMAX_M1_EXTENDED = "minimax/minimax-m1:extended",
|
||||
MODEL_FREE_MOONSHOTAI_KIMI_DEV_72B_FREE = "moonshotai/kimi-dev-72b:free",
|
||||
MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_0528_QWEN3_8B_FREE = "deepseek/deepseek-r1-0528-qwen3-8b:free",
|
||||
MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_0528_FREE = "deepseek/deepseek-r1-0528:free",
|
||||
@ -47,8 +47,8 @@ export enum E_OPENROUTER_MODEL_FREE {
|
||||
MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE = "google/gemini-2.0-flash-exp:free",
|
||||
MODEL_FREE_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE = "meta-llama/llama-3.3-70b-instruct:free",
|
||||
MODEL_FREE_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free",
|
||||
MODEL_FREE_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free",
|
||||
MODEL_FREE_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE = "meta-llama/llama-3.2-1b-instruct:free",
|
||||
MODEL_FREE_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free",
|
||||
MODEL_FREE_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free",
|
||||
MODEL_FREE_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE = "meta-llama/llama-3.1-8b-instruct:free",
|
||||
MODEL_FREE_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free",
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
export enum E_OPENROUTER_MODEL {
|
||||
MODEL_OPENROUTER_CYPHER_ALPHA_FREE = "openrouter/cypher-alpha:free",
|
||||
MODEL_BAIDU_ERNIE_4_5_300B_A47B = "baidu/ernie-4.5-300b-a47b",
|
||||
MODEL_THEDRUMMER_ANUBIS_70B_V1_1 = "thedrummer/anubis-70b-v1.1",
|
||||
MODEL_INCEPTION_MERCURY = "inception/mercury",
|
||||
MODEL_MORPH_MORPH_V2 = "morph/morph-v2",
|
||||
MODEL_MISTRALAI_MISTRAL_SMALL_3_2_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.2-24b-instruct:free",
|
||||
MODEL_MISTRALAI_MISTRAL_SMALL_3_2_24B_INSTRUCT = "mistralai/mistral-small-3.2-24b-instruct",
|
||||
MODEL_MINIMAX_MINIMAX_M1_EXTENDED = "minimax/minimax-m1:extended",
|
||||
MODEL_MINIMAX_MINIMAX_M1 = "minimax/minimax-m1",
|
||||
MODEL_GOOGLE_GEMINI_2_5_FLASH_LITE_PREVIEW_06_17 = "google/gemini-2.5-flash-lite-preview-06-17",
|
||||
MODEL_GOOGLE_GEMINI_2_5_FLASH = "google/gemini-2.5-flash",
|
||||
@ -16,7 +18,6 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_MISTRALAI_MAGISTRAL_MEDIUM_2506 = "mistralai/magistral-medium-2506",
|
||||
MODEL_MISTRALAI_MAGISTRAL_MEDIUM_2506_THINKING = "mistralai/magistral-medium-2506:thinking",
|
||||
MODEL_GOOGLE_GEMINI_2_5_PRO_PREVIEW = "google/gemini-2.5-pro-preview",
|
||||
MODEL_SENTIENTAGI_DOBBY_MINI_UNHINGED_PLUS_LLAMA_3_1_8B = "sentientagi/dobby-mini-unhinged-plus-llama-3.1-8b",
|
||||
MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_7B = "deepseek/deepseek-r1-distill-qwen-7b",
|
||||
MODEL_DEEPSEEK_DEEPSEEK_R1_0528_QWEN3_8B_FREE = "deepseek/deepseek-r1-0528-qwen3-8b:free",
|
||||
MODEL_DEEPSEEK_DEEPSEEK_R1_0528_QWEN3_8B = "deepseek/deepseek-r1-0528-qwen3-8b",
|
||||
@ -43,7 +44,7 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_ARCEE_AI_VIRTUOSO_MEDIUM_V2 = "arcee-ai/virtuoso-medium-v2",
|
||||
MODEL_ARCEE_AI_ARCEE_BLITZ = "arcee-ai/arcee-blitz",
|
||||
MODEL_MICROSOFT_PHI_4_REASONING_PLUS = "microsoft/phi-4-reasoning-plus",
|
||||
MODEL_INCEPTION_MERCURY_CODER_SMALL_BETA = "inception/mercury-coder-small-beta",
|
||||
MODEL_INCEPTION_MERCURY_CODER = "inception/mercury-coder",
|
||||
MODEL_OPENGVLAB_INTERNVL3_14B = "opengvlab/internvl3-14b",
|
||||
MODEL_OPENGVLAB_INTERNVL3_2B = "opengvlab/internvl3-2b",
|
||||
MODEL_DEEPSEEK_DEEPSEEK_PROVER_V2 = "deepseek/deepseek-prover-v2",
|
||||
@ -194,34 +195,34 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022_BETA = "anthropic/claude-3.5-haiku-20241022:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
|
||||
MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_70B = "neversleep/llama-3.1-lumimaid-70b",
|
||||
MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_BETA = "anthropic/claude-3.5-sonnet:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet",
|
||||
MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b",
|
||||
MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_70B = "neversleep/llama-3.1-lumimaid-70b",
|
||||
MODEL_X_AI_GROK_BETA = "x-ai/grok-beta",
|
||||
MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b",
|
||||
MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b",
|
||||
MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct",
|
||||
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct",
|
||||
MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi",
|
||||
MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity",
|
||||
MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi",
|
||||
MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b",
|
||||
MODEL_LIQUID_LFM_40B = "liquid/lfm-40b",
|
||||
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
|
||||
MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b",
|
||||
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
|
||||
MODEL_LIQUID_LFM_40B = "liquid/lfm-40b",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE = "meta-llama/llama-3.2-1b-instruct:free",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE = "meta-llama/llama-3.2-1b-instruct:free",
|
||||
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
|
||||
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free",
|
||||
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct",
|
||||
MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b",
|
||||
MODEL_OPENAI_O1_PREVIEW = "openai/o1-preview",
|
||||
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
|
||||
MODEL_OPENAI_O1_PREVIEW_2024_09_12 = "openai/o1-preview-2024-09-12",
|
||||
MODEL_OPENAI_O1_MINI = "openai/o1-mini",
|
||||
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
|
||||
MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b",
|
||||
MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024",
|
||||
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
|
||||
@ -231,17 +232,17 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B = "nousresearch/hermes-3-llama-3.1-70b",
|
||||
MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B = "nousresearch/hermes-3-llama-3.1-405b",
|
||||
MODEL_OPENAI_CHATGPT_4O_LATEST = "openai/chatgpt-4o-latest",
|
||||
MODEL_AETHERWIING_MN_STARCANNON_12B = "aetherwiing/mn-starcannon-12b",
|
||||
MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b",
|
||||
MODEL_AETHERWIING_MN_STARCANNON_12B = "aetherwiing/mn-starcannon-12b",
|
||||
MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06",
|
||||
MODEL_NOTHINGIISREAL_MN_CELESTE_12B = "nothingiisreal/mn-celeste-12b",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b",
|
||||
MODEL_NOTHINGIISREAL_MN_CELESTE_12B = "nothingiisreal/mn-celeste-12b",
|
||||
MODEL_PERPLEXITY_LLAMA_3_1_SONAR_SMALL_128K_ONLINE = "perplexity/llama-3.1-sonar-small-128k-online",
|
||||
MODEL_PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE = "perplexity/llama-3.1-sonar-large-128k-online",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE = "meta-llama/llama-3.1-8b-instruct:free",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct",
|
||||
MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free",
|
||||
MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo",
|
||||
MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini",
|
||||
@ -256,31 +257,31 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b",
|
||||
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b",
|
||||
MODEL_QWEN_QWEN_2_72B_INSTRUCT = "qwen/qwen-2-72b-instruct",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
|
||||
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct",
|
||||
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
|
||||
MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct",
|
||||
MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct",
|
||||
MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b",
|
||||
MODEL_GOOGLE_GEMINI_FLASH_1_5 = "google/gemini-flash-1.5",
|
||||
MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13",
|
||||
MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b",
|
||||
MODEL_OPENAI_GPT_4O = "openai/gpt-4o",
|
||||
MODEL_OPENAI_GPT_4O_EXTENDED = "openai/gpt-4o:extended",
|
||||
MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b",
|
||||
MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13",
|
||||
MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_8B = "neversleep/llama-3-lumimaid-8b",
|
||||
MODEL_SAO10K_FIMBULVETR_11B_V2 = "sao10k/fimbulvetr-11b-v2",
|
||||
MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct",
|
||||
MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT = "meta-llama/llama-3-70b-instruct",
|
||||
MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT = "mistralai/mixtral-8x22b-instruct",
|
||||
MODEL_MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b",
|
||||
MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo",
|
||||
MODEL_GOOGLE_GEMINI_PRO_1_5 = "google/gemini-pro-1.5",
|
||||
MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo",
|
||||
MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus",
|
||||
MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024",
|
||||
MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b",
|
||||
MODEL_COHERE_COMMAND_R = "cohere/command-r",
|
||||
MODEL_COHERE_COMMAND = "cohere/command",
|
||||
MODEL_COHERE_COMMAND_R = "cohere/command-r",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_HAIKU_BETA = "anthropic/claude-3-haiku:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku",
|
||||
MODEL_ANTHROPIC_CLAUDE_3_OPUS_BETA = "anthropic/claude-3-opus:beta",
|
||||
@ -297,10 +298,10 @@ export enum E_OPENROUTER_MODEL {
|
||||
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_2 = "mistralai/mistral-7b-instruct-v0.2",
|
||||
MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct",
|
||||
MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b",
|
||||
MODEL_ANTHROPIC_CLAUDE_2_BETA = "anthropic/claude-2:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_2 = "anthropic/claude-2",
|
||||
MODEL_ANTHROPIC_CLAUDE_2_1_BETA = "anthropic/claude-2.1:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_2_1 = "anthropic/claude-2.1",
|
||||
MODEL_ANTHROPIC_CLAUDE_2_BETA = "anthropic/claude-2:beta",
|
||||
MODEL_ANTHROPIC_CLAUDE_2 = "anthropic/claude-2",
|
||||
MODEL_UNDI95_TOPPY_M_7B = "undi95/toppy-m-7b",
|
||||
MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b",
|
||||
MODEL_OPENROUTER_AUTO = "openrouter/auto",
|
||||
|
||||
@ -176,6 +176,7 @@ export const OptionsSchema = (opts?: any): any => {
|
||||
.describe(`Chat completion mode:\n\t completion, tools, assistant.
|
||||
${chalk.green.bold('completion')}: no support for tools, please use --dst parameter to save the output.
|
||||
${chalk.green.bold('tools')}: allows for tools to be used, eg 'save to ./output.md'. Not all models support this mode.
|
||||
${chalk.green.bold('responses')}: allows for responses to be used, eg 'save to ./output.md'. Not all models support this mode.
|
||||
${chalk.green.bold('assistant')}: : allows documents (PDF, DOCX, ...) to be added but dont support tools. Use --dst to save the output. Supported files :
|
||||
${chalk.green.bold('custom')}: custom mode
|
||||
`)
|
||||
@ -221,6 +222,11 @@ export const OptionsSchema = (opts?: any): any => {
|
||||
z.string()
|
||||
.default(LOGGING_DIRECTORY)
|
||||
.describe('Logging directory')
|
||||
).add(
|
||||
'stream',
|
||||
z.boolean()
|
||||
.default(false)
|
||||
.describe('Enable streaming (verbose LLM output)')
|
||||
)
|
||||
.add(
|
||||
'env',
|
||||
|
||||
@ -33,13 +33,10 @@ export interface IKBotOptions {
|
||||
[35m[1m[22m[39m
|
||||
01-ai/yi-large | paid
|
||||
aetherwiing/mn-starcannon-12b | paid
|
||||
agentica-org/deepcoder-14b-preview:free | free
|
||||
ai21/jamba-1.6-large | paid
|
||||
ai21/jamba-1.6-mini | paid
|
||||
aion-labs/aion-1.0 | paid
|
||||
aion-labs/aion-1.0-mini | paid
|
||||
aion-labs/aion-rp-llama-3.1-8b | paid
|
||||
alfredpros/codellama-7b-instruct-solidity | paid
|
||||
ai21/jamba-1-5-large | paid
|
||||
ai21/jamba-1-5-mini | paid
|
||||
ai21/jamba-instruct | paid
|
||||
jondurbin/airoboros-l2-70b | paid
|
||||
amazon/nova-lite-v1 | paid
|
||||
amazon/nova-micro-v1 | paid
|
||||
amazon/nova-pro-v1 | paid
|
||||
@ -57,28 +54,14 @@ export interface IKBotOptions {
|
||||
anthropic/claude-3.5-sonnet-20240620 | paid
|
||||
anthropic/claude-3.5-sonnet-20240620:beta | paid
|
||||
anthropic/claude-3.5-sonnet:beta | paid
|
||||
anthropic/claude-3.7-sonnet | paid
|
||||
anthropic/claude-3.7-sonnet:beta | paid
|
||||
anthropic/claude-3.7-sonnet:thinking | paid
|
||||
anthropic/claude-opus-4 | paid
|
||||
anthropic/claude-sonnet-4 | paid
|
||||
anthropic/claude-2 | paid
|
||||
anthropic/claude-2:beta | paid
|
||||
anthropic/claude-2.0 | paid
|
||||
anthropic/claude-2.0:beta | paid
|
||||
anthropic/claude-2.1 | paid
|
||||
anthropic/claude-2.1:beta | paid
|
||||
arcee-ai/arcee-blitz | paid
|
||||
arcee-ai/caller-large | paid
|
||||
arcee-ai/coder-large | paid
|
||||
arcee-ai/maestro-reasoning | paid
|
||||
arcee-ai/spotlight | paid
|
||||
arcee-ai/virtuoso-large | paid
|
||||
arcee-ai/virtuoso-medium-v2 | paid
|
||||
arliai/qwq-32b-arliai-rpr-v1:free | free
|
||||
openrouter/auto | paid
|
||||
cohere/command | paid
|
||||
cohere/command-a | paid
|
||||
cohere/command-r | paid
|
||||
cohere/command-r-03-2024 | paid
|
||||
cohere/command-r-08-2024 | paid
|
||||
@ -86,81 +69,61 @@ export interface IKBotOptions {
|
||||
cohere/command-r-plus-04-2024 | paid
|
||||
cohere/command-r-plus-08-2024 | paid
|
||||
cohere/command-r7b-12-2024 | paid
|
||||
deepseek/deepseek-prover-v2 | paid
|
||||
deepseek/deepseek-r1-0528-qwen3-8b | paid
|
||||
deepseek/deepseek-r1-0528-qwen3-8b:free | free
|
||||
databricks/dbrx-instruct | paid
|
||||
deepseek/deepseek-chat-v2.5 | paid
|
||||
deepseek/deepseek-chat | paid
|
||||
deepseek/deepseek-chat:free | free
|
||||
deepseek/deepseek-chat-v3-0324 | paid
|
||||
deepseek/deepseek-chat-v3-0324:free | free
|
||||
deepseek/deepseek-v3-base:free | free
|
||||
deepseek/deepseek-r1 | paid
|
||||
deepseek/deepseek-r1:free | free
|
||||
deepseek/deepseek-r1-0528 | paid
|
||||
deepseek/deepseek-r1-0528:free | free
|
||||
deepseek/deepseek-r1-distill-llama-70b | paid
|
||||
deepseek/deepseek-r1-distill-llama-70b:free | free
|
||||
deepseek/deepseek-r1-distill-llama-8b | paid
|
||||
deepseek/deepseek-r1-distill-qwen-1.5b | paid
|
||||
deepseek/deepseek-r1-distill-qwen-14b | paid
|
||||
deepseek/deepseek-r1-distill-qwen-14b:free | free
|
||||
deepseek/deepseek-r1-distill-qwen-32b | paid
|
||||
deepseek/deepseek-r1-distill-qwen-7b | paid
|
||||
cognitivecomputations/dolphin-mixtral-8x7b | paid
|
||||
cognitivecomputations/dolphin-mixtral-8x22b | paid
|
||||
cognitivecomputations/dolphin3.0-mistral-24b:free | free
|
||||
cognitivecomputations/dolphin3.0-r1-mistral-24b:free | free
|
||||
eleutherai/llemma_7b | paid
|
||||
eva-unit-01/eva-llama-3.33-70b | paid
|
||||
eva-unit-01/eva-qwen-2.5-32b | paid
|
||||
eva-unit-01/eva-qwen-2.5-72b | paid
|
||||
sao10k/fimbulvetr-11b-v2 | paid
|
||||
alpindale/goliath-120b | paid
|
||||
google/gemini-2.0-flash-thinking-exp:free | free
|
||||
google/gemini-exp-1114:free | free
|
||||
google/gemini-exp-1121:free | free
|
||||
google/gemini-exp-1206:free | free
|
||||
google/gemini-flash-1.5 | paid
|
||||
google/gemini-flash-1.5-8b | paid
|
||||
google/gemini-pro-1.5 | paid
|
||||
google/gemini-2.0-flash-001 | paid
|
||||
google/gemini-flash-1.5-8b-exp | paid
|
||||
google/gemini-flash-1.5-exp | paid
|
||||
google/gemini-2.0-flash-exp:free | free
|
||||
google/gemini-2.0-flash-lite-001 | paid
|
||||
google/gemini-2.5-flash | paid
|
||||
google/gemini-2.5-flash-lite-preview-06-17 | paid
|
||||
google/gemini-2.5-flash-preview | paid
|
||||
google/gemini-2.5-flash-preview:thinking | paid
|
||||
google/gemini-2.5-flash-preview-05-20 | paid
|
||||
google/gemini-2.5-flash-preview-05-20:thinking | paid
|
||||
google/gemini-2.5-pro | paid
|
||||
google/gemini-2.5-pro-exp-03-25 | paid
|
||||
google/gemini-2.5-pro-preview-05-06 | paid
|
||||
google/gemini-2.5-pro-preview | paid
|
||||
google/gemini-pro | paid
|
||||
google/gemini-pro-1.5 | paid
|
||||
google/gemini-pro-1.5-exp | paid
|
||||
google/gemini-pro-vision | paid
|
||||
google/gemma-2-27b-it | paid
|
||||
google/gemma-2-9b-it | paid
|
||||
google/gemma-2-9b-it:free | free
|
||||
google/gemma-3-12b-it | paid
|
||||
google/gemma-3-12b-it:free | free
|
||||
google/gemma-3-27b-it | paid
|
||||
google/gemma-3-27b-it:free | free
|
||||
google/gemma-3-4b-it | paid
|
||||
google/gemma-3-4b-it:free | free
|
||||
google/gemma-3n-e4b-it | paid
|
||||
google/gemma-3n-e4b-it:free | free
|
||||
inception/mercury | paid
|
||||
inception/mercury-coder-small-beta | paid
|
||||
google/learnlm-1.5-pro-experimental:free | free
|
||||
google/palm-2-chat-bison | paid
|
||||
google/palm-2-chat-bison-32k | paid
|
||||
google/palm-2-codechat-bison | paid
|
||||
google/palm-2-codechat-bison-32k | paid
|
||||
huggingfaceh4/zephyr-7b-beta:free | free
|
||||
infermatic/mn-inferor-12b | paid
|
||||
inflatebot/mn-mag-mell-r1 | paid
|
||||
inflection/inflection-3-pi | paid
|
||||
inflection/inflection-3-productivity | paid
|
||||
moonshotai/kimi-dev-72b:free | free
|
||||
liquid/lfm-3b | paid
|
||||
liquid/lfm-40b | paid
|
||||
liquid/lfm-7b | paid
|
||||
meta-llama/llama-guard-3-8b | paid
|
||||
lizpreciatior/lzlv-70b-fp16-hf | paid
|
||||
alpindale/magnum-72b | paid
|
||||
anthracite-org/magnum-v2-72b | paid
|
||||
anthracite-org/magnum-v4-72b | paid
|
||||
mancer/weaver | paid
|
||||
meta-llama/llama-2-13b-chat | paid
|
||||
meta-llama/llama-3-70b-instruct | paid
|
||||
meta-llama/llama-3-70b-instruct:nitro | paid
|
||||
meta-llama/llama-3-8b-instruct | paid
|
||||
meta-llama/llama-3-8b-instruct:extended | paid
|
||||
meta-llama/llama-3-8b-instruct:free | free
|
||||
meta-llama/llama-3-8b-instruct:nitro | paid
|
||||
meta-llama/llama-3.1-405b | paid
|
||||
meta-llama/llama-3.1-405b-instruct | paid
|
||||
meta-llama/llama-3.1-405b-instruct:free | free
|
||||
meta-llama/llama-3.1-405b-instruct:nitro | paid
|
||||
meta-llama/llama-3.1-70b-instruct | paid
|
||||
meta-llama/llama-3.1-70b-instruct:free | free
|
||||
meta-llama/llama-3.1-70b-instruct:nitro | paid
|
||||
meta-llama/llama-3.1-8b-instruct | paid
|
||||
meta-llama/llama-3.1-8b-instruct:free | free
|
||||
meta-llama/llama-3.2-11b-vision-instruct | paid
|
||||
@ -168,192 +131,128 @@ export interface IKBotOptions {
|
||||
meta-llama/llama-3.2-1b-instruct | paid
|
||||
meta-llama/llama-3.2-1b-instruct:free | free
|
||||
meta-llama/llama-3.2-3b-instruct | paid
|
||||
meta-llama/llama-3.2-3b-instruct:free | free
|
||||
meta-llama/llama-3.2-90b-vision-instruct | paid
|
||||
meta-llama/llama-3.2-90b-vision-instruct:free | free
|
||||
meta-llama/llama-3.3-70b-instruct | paid
|
||||
meta-llama/llama-3.3-70b-instruct:free | free
|
||||
meta-llama/llama-4-maverick | paid
|
||||
meta-llama/llama-4-maverick:free | free
|
||||
meta-llama/llama-4-scout | paid
|
||||
meta-llama/llama-4-scout:free | free
|
||||
meta-llama/llama-guard-4-12b | paid
|
||||
meta-llama/llama-guard-2-8b | paid
|
||||
microsoft/mai-ds-r1:free | free
|
||||
microsoft/phi-4 | paid
|
||||
microsoft/phi-4-multimodal-instruct | paid
|
||||
microsoft/phi-4-reasoning-plus | paid
|
||||
microsoft/phi-3-medium-128k-instruct | paid
|
||||
microsoft/phi-3-medium-128k-instruct:free | free
|
||||
microsoft/phi-3-mini-128k-instruct | paid
|
||||
microsoft/phi-3-mini-128k-instruct:free | free
|
||||
microsoft/phi-3.5-mini-128k-instruct | paid
|
||||
sophosympatheia/midnight-rose-70b | paid
|
||||
minimax/minimax-m1 | paid
|
||||
minimax/minimax-m1:extended | paid
|
||||
minimax/minimax-01 | paid
|
||||
mistralai/mistral-large | paid
|
||||
mistralai/mistral-large-2407 | paid
|
||||
mistralai/mistral-large-2411 | paid
|
||||
mistralai/mistral-medium | paid
|
||||
nothingiisreal/mn-celeste-12b | paid
|
||||
mistralai/mistral-small | paid
|
||||
mistralai/mistral-tiny | paid
|
||||
mistralai/codestral-2501 | paid
|
||||
mistralai/devstral-small | paid
|
||||
mistralai/devstral-small:free | free
|
||||
mistralai/magistral-medium-2506 | paid
|
||||
mistralai/magistral-medium-2506:thinking | paid
|
||||
mistralai/magistral-small-2506 | paid
|
||||
mistralai/codestral-mamba | paid
|
||||
mistralai/ministral-3b | paid
|
||||
mistralai/ministral-8b | paid
|
||||
mistralai/mistral-7b-instruct | paid
|
||||
mistralai/mistral-7b-instruct:free | free
|
||||
mistralai/mistral-7b-instruct:nitro | paid
|
||||
mistralai/mistral-7b-instruct-v0.1 | paid
|
||||
mistralai/mistral-7b-instruct-v0.2 | paid
|
||||
mistralai/mistral-7b-instruct-v0.3 | paid
|
||||
mistralai/mistral-medium-3 | paid
|
||||
mistralai/mistral-nemo | paid
|
||||
mistralai/mistral-nemo:free | free
|
||||
mistralai/mistral-small-24b-instruct-2501 | paid
|
||||
mistralai/mistral-small-24b-instruct-2501:free | free
|
||||
mistralai/mistral-small-3.1-24b-instruct | paid
|
||||
mistralai/mistral-small-3.1-24b-instruct:free | free
|
||||
mistralai/mistral-small-3.2-24b-instruct | paid
|
||||
mistralai/mistral-small-3.2-24b-instruct:free | free
|
||||
mistralai/mixtral-8x22b-instruct | paid
|
||||
mistralai/mixtral-8x7b | paid
|
||||
mistralai/mixtral-8x7b-instruct | paid
|
||||
mistralai/mixtral-8x7b-instruct:nitro | paid
|
||||
mistralai/pixtral-12b | paid
|
||||
mistralai/pixtral-large-2411 | paid
|
||||
mistralai/mistral-saba | paid
|
||||
moonshotai/kimi-vl-a3b-thinking:free | free
|
||||
morph/morph-v2 | paid
|
||||
gryphe/mythomax-l2-13b | paid
|
||||
gryphe/mythomax-l2-13b:extended | paid
|
||||
gryphe/mythomax-l2-13b:free | free
|
||||
gryphe/mythomax-l2-13b:nitro | paid
|
||||
neversleep/llama-3-lumimaid-70b | paid
|
||||
neversleep/llama-3-lumimaid-8b | paid
|
||||
neversleep/llama-3-lumimaid-8b:extended | paid
|
||||
neversleep/llama-3.1-lumimaid-70b | paid
|
||||
neversleep/llama-3.1-lumimaid-8b | paid
|
||||
neversleep/noromaid-20b | paid
|
||||
nousresearch/deephermes-3-llama-3-8b-preview:free | free
|
||||
nousresearch/nous-hermes-llama2-13b | paid
|
||||
nousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid
|
||||
nousresearch/hermes-3-llama-3.1-405b | paid
|
||||
nousresearch/hermes-3-llama-3.1-70b | paid
|
||||
nousresearch/hermes-2-pro-llama-3-8b | paid
|
||||
nvidia/llama-3.1-nemotron-70b-instruct | paid
|
||||
nvidia/llama-3.1-nemotron-ultra-253b-v1 | paid
|
||||
nvidia/llama-3.1-nemotron-ultra-253b-v1:free | free
|
||||
nvidia/llama-3.3-nemotron-super-49b-v1 | paid
|
||||
nvidia/llama-3.3-nemotron-super-49b-v1:free | free
|
||||
openai/chatgpt-4o-latest | paid
|
||||
openai/codex-mini | paid
|
||||
openai/gpt-3.5-turbo | paid
|
||||
openai/gpt-3.5-turbo-0613 | paid
|
||||
openai/gpt-3.5-turbo-16k | paid
|
||||
openai/gpt-3.5-turbo-0125 | paid
|
||||
openai/gpt-3.5-turbo-1106 | paid
|
||||
openai/gpt-3.5-turbo-instruct | paid
|
||||
openai/gpt-4 | paid
|
||||
openai/gpt-4-0314 | paid
|
||||
openai/gpt-4-32k | paid
|
||||
openai/gpt-4-32k-0314 | paid
|
||||
openai/gpt-4-turbo | paid
|
||||
openai/gpt-4-1106-preview | paid
|
||||
openai/gpt-4-turbo-preview | paid
|
||||
openai/gpt-4.1 | paid
|
||||
openai/gpt-4.1-mini | paid
|
||||
openai/gpt-4.1-nano | paid
|
||||
openai/gpt-4.5-preview | paid
|
||||
openai/gpt-4o | paid
|
||||
openai/gpt-4o-2024-05-13 | paid
|
||||
openai/gpt-4o-2024-08-06 | paid
|
||||
openai/gpt-4o-2024-11-20 | paid
|
||||
openai/gpt-4o:extended | paid
|
||||
openai/gpt-4o-search-preview | paid
|
||||
openai/gpt-4o-mini | paid
|
||||
openai/gpt-4o-mini-2024-07-18 | paid
|
||||
openai/gpt-4o-mini-search-preview | paid
|
||||
openai/o1 | paid
|
||||
openai/o1-mini | paid
|
||||
openai/o1-mini-2024-09-12 | paid
|
||||
openai/o1-preview | paid
|
||||
openai/o1-preview-2024-09-12 | paid
|
||||
openai/o1-pro | paid
|
||||
openai/o3 | paid
|
||||
openai/o3-mini | paid
|
||||
openai/o3-mini-high | paid
|
||||
openai/o3-pro | paid
|
||||
openai/o4-mini | paid
|
||||
openai/o4-mini-high | paid
|
||||
opengvlab/internvl3-14b | paid
|
||||
opengvlab/internvl3-2b | paid
|
||||
all-hands/openhands-lm-32b-v0.1 | paid
|
||||
openchat/openchat-7b | paid
|
||||
openchat/openchat-7b:free | free
|
||||
teknium/openhermes-2.5-mistral-7b | paid
|
||||
perplexity/llama-3.1-sonar-huge-128k-online | paid
|
||||
perplexity/llama-3.1-sonar-large-128k-chat | paid
|
||||
perplexity/llama-3.1-sonar-large-128k-online | paid
|
||||
perplexity/llama-3.1-sonar-small-128k-chat | paid
|
||||
perplexity/llama-3.1-sonar-small-128k-online | paid
|
||||
perplexity/r1-1776 | paid
|
||||
perplexity/sonar | paid
|
||||
perplexity/sonar-deep-research | paid
|
||||
perplexity/sonar-pro | paid
|
||||
perplexity/sonar-reasoning | paid
|
||||
perplexity/sonar-reasoning-pro | paid
|
||||
perplexity/llama-3-sonar-large-32k-chat | paid
|
||||
perplexity/llama-3-sonar-large-32k-online | paid
|
||||
perplexity/llama-3-sonar-small-32k-chat | paid
|
||||
pygmalionai/mythalion-13b | paid
|
||||
qwen/qwen-2-72b-instruct | paid
|
||||
qwen/qwen-vl-max | paid
|
||||
qwen/qwen-vl-plus | paid
|
||||
qwen/qwen-max | paid
|
||||
qwen/qwen-plus | paid
|
||||
qwen/qwen-turbo | paid
|
||||
qwen/qwen2.5-vl-32b-instruct | paid
|
||||
qwen/qwen2.5-vl-32b-instruct:free | free
|
||||
qwen/qwen2.5-vl-72b-instruct | paid
|
||||
qwen/qwen2.5-vl-72b-instruct:free | free
|
||||
qwen/qwen-2.5-vl-7b-instruct | paid
|
||||
qwen/qwen3-14b | paid
|
||||
qwen/qwen3-14b:free | free
|
||||
qwen/qwen3-235b-a22b | paid
|
||||
qwen/qwen3-235b-a22b:free | free
|
||||
qwen/qwen3-30b-a3b | paid
|
||||
qwen/qwen3-30b-a3b:free | free
|
||||
qwen/qwen3-32b | paid
|
||||
qwen/qwen3-32b:free | free
|
||||
qwen/qwen3-8b | paid
|
||||
qwen/qwen3-8b:free | free
|
||||
qwen/qwq-32b | paid
|
||||
qwen/qwq-32b:free | free
|
||||
qwen/qwen-2-7b-instruct | paid
|
||||
qwen/qwen-2-7b-instruct:free | free
|
||||
qwen/qvq-72b-preview | paid
|
||||
qwen/qwq-32b-preview | paid
|
||||
qwen/qwen-2-vl-72b-instruct | paid
|
||||
qwen/qwen-2-vl-7b-instruct | paid
|
||||
qwen/qwen-2.5-72b-instruct | paid
|
||||
qwen/qwen-2.5-72b-instruct:free | free
|
||||
qwen/qwen-2.5-7b-instruct | paid
|
||||
qwen/qwen-2.5-coder-32b-instruct | paid
|
||||
qwen/qwen-2.5-coder-32b-instruct:free | free
|
||||
featherless/qwerky-72b:free | free
|
||||
rekaai/reka-flash-3:free | free
|
||||
undi95/remm-slerp-l2-13b | paid
|
||||
undi95/remm-slerp-l2-13b:extended | paid
|
||||
thedrummer/rocinante-12b | paid
|
||||
sao10k/l3-lunaris-8b | paid
|
||||
sao10k/l3-euryale-70b | paid
|
||||
sao10k/l3.1-euryale-70b | paid
|
||||
sao10k/l3.3-euryale-70b | paid
|
||||
sarvamai/sarvam-m:free | free
|
||||
sentientagi/dobby-mini-unhinged-plus-llama-3.1-8b | paid
|
||||
shisa-ai/shisa-v2-llama3.3-70b:free | free
|
||||
raifle/sorcererlm-8x22b | paid
|
||||
thedrummer/anubis-pro-105b-v1 | paid
|
||||
thedrummer/rocinante-12b | paid
|
||||
thedrummer/skyfall-36b-v2 | paid
|
||||
thedrummer/unslopnemo-12b | paid
|
||||
thedrummer/valkyrie-49b-v1 | paid
|
||||
thudm/glm-4-32b | paid
|
||||
thudm/glm-4-32b:free | free
|
||||
thudm/glm-z1-32b | paid
|
||||
thudm/glm-z1-32b:free | free
|
||||
thudm/glm-z1-rumination-32b | paid
|
||||
tngtech/deepseek-r1t-chimera:free | free
|
||||
undi95/toppy-m-7b | paid
|
||||
scb10x/llama3.1-typhoon2-70b-instruct | paid
|
||||
undi95/toppy-m-7b:free | free
|
||||
undi95/toppy-m-7b:nitro | paid
|
||||
thedrummer/unslopnemo-12b | paid
|
||||
microsoft/wizardlm-2-7b | paid
|
||||
microsoft/wizardlm-2-8x22b | paid
|
||||
x-ai/grok-2-1212 | paid
|
||||
x-ai/grok-2-vision-1212 | paid
|
||||
x-ai/grok-3 | paid
|
||||
x-ai/grok-3-beta | paid
|
||||
x-ai/grok-3-mini | paid
|
||||
x-ai/grok-3-mini-beta | paid
|
||||
x-ai/grok-beta | paid
|
||||
x-ai/grok-vision-beta | paid
|
||||
xwin-lm/xwin-lm-70b | paid
|
||||
[35m[1m[22m[39m
|
||||
[35m[1m OpenAI models:[22m[39m
|
||||
[35m[1m[22m[39m
|
||||
babbage-002
|
||||
chatgpt-4o-latest
|
||||
codex-mini-latest
|
||||
dall-e-2
|
||||
dall-e-3
|
||||
davinci-002
|
||||
@ -367,17 +266,11 @@ export interface IKBotOptions {
|
||||
gpt-4-0125-preview
|
||||
gpt-4-0613
|
||||
gpt-4-1106-preview
|
||||
gpt-4-1106-vision-preview
|
||||
gpt-4-turbo
|
||||
gpt-4-turbo-2024-04-09
|
||||
gpt-4-turbo-preview
|
||||
gpt-4.1
|
||||
gpt-4.1-2025-04-14
|
||||
gpt-4.1-mini
|
||||
gpt-4.1-mini-2025-04-14
|
||||
gpt-4.1-nano
|
||||
gpt-4.1-nano-2025-04-14
|
||||
gpt-4.5-preview
|
||||
gpt-4.5-preview-2025-02-27
|
||||
gpt-4-vision-preview
|
||||
gpt-4o
|
||||
gpt-4o-2024-05-13
|
||||
gpt-4o-2024-08-06
|
||||
@ -385,39 +278,19 @@ export interface IKBotOptions {
|
||||
gpt-4o-audio-preview
|
||||
gpt-4o-audio-preview-2024-10-01
|
||||
gpt-4o-audio-preview-2024-12-17
|
||||
gpt-4o-audio-preview-2025-06-03
|
||||
gpt-4o-mini
|
||||
gpt-4o-mini-2024-07-18
|
||||
gpt-4o-mini-audio-preview
|
||||
gpt-4o-mini-audio-preview-2024-12-17
|
||||
gpt-4o-mini-realtime-preview
|
||||
gpt-4o-mini-realtime-preview-2024-12-17
|
||||
gpt-4o-mini-search-preview
|
||||
gpt-4o-mini-search-preview-2025-03-11
|
||||
gpt-4o-mini-transcribe
|
||||
gpt-4o-mini-tts
|
||||
gpt-4o-realtime-preview
|
||||
gpt-4o-realtime-preview-2024-10-01
|
||||
gpt-4o-realtime-preview-2024-12-17
|
||||
gpt-4o-realtime-preview-2025-06-03
|
||||
gpt-4o-search-preview
|
||||
gpt-4o-search-preview-2025-03-11
|
||||
gpt-4o-transcribe
|
||||
gpt-image-1
|
||||
o1
|
||||
o1-2024-12-17
|
||||
o1-mini
|
||||
o1-mini-2024-09-12
|
||||
o1-preview
|
||||
o1-preview-2024-09-12
|
||||
o1-pro
|
||||
o1-pro-2025-03-19
|
||||
o3-mini
|
||||
o3-mini-2025-01-31
|
||||
o4-mini
|
||||
o4-mini-2025-04-16
|
||||
o4-mini-deep-research
|
||||
o4-mini-deep-research-2025-06-26
|
||||
omni-moderation-2024-09-26
|
||||
omni-moderation-latest
|
||||
text-embedding-3-large
|
||||
@ -444,6 +317,7 @@ export interface IKBotOptions {
|
||||
completion, tools, assistant.
|
||||
[32m[1mcompletion[22m[39m: no support for tools, please use --dst parameter to save the output.
|
||||
[32m[1mtools[22m[39m: allows for tools to be used, eg 'save to ./output.md'. Not all models support this mode.
|
||||
[32m[1mresponses[22m[39m: allows for responses to be used, eg 'save to ./output.md'. Not all models support this mode.
|
||||
[32m[1massistant[22m[39m: : allows documents (PDF, DOCX, ...) to be added but dont support tools. Use --dst to save the output. Supported files :
|
||||
[32m[1mcustom[22m[39m: custom mode
|
||||
*/
|
||||
@ -462,6 +336,8 @@ export interface IKBotOptions {
|
||||
preferences?: string;
|
||||
/** Logging directory */
|
||||
logs?: string;
|
||||
/** Enable streaming (verbose LLM output) */
|
||||
stream?: boolean;
|
||||
/** Environment (in profile) */
|
||||
env?: string;
|
||||
variables?: {
|
||||
|
||||
@ -40,6 +40,10 @@
|
||||
"cons": {
|
||||
"type": "string",
|
||||
"description": "Disadvantages of using this display."
|
||||
},
|
||||
"brand": {
|
||||
"type": "string",
|
||||
"description": "Brand of the display."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
# - --model=o3-deep-research needs verification : https://platform.openai.com/settings/organization/general
|
||||
|
||||
kbot-d "Create a comprehensive list of OLED displays, with buttons - to be used with a ESP-32. The output should be a JSON object that follows the provided schema. Elaborate on the features, and the pros and cons of each." \
|
||||
kbot-d "Create a comprehensive list of OLED displays (european market), at least 20, with buttons - to be used with a ESP-32. The output should be a JSON object that follows the provided schema. Elaborate on the features, and the pros and cons of each." \
|
||||
--router=openai \
|
||||
--model=gpt-4.1 \
|
||||
--model_rs_1=o4-mini-deep-research \
|
||||
|
||||
@ -1,49 +0,0 @@
|
||||
{
|
||||
"displays": [
|
||||
{
|
||||
"model_name": "TouchEye Dual Round OLED Display",
|
||||
"link": "https://www.diyelectronics.us/2025/02/toucheye-compact-open-source-dual-oled.html",
|
||||
"price": 49.0,
|
||||
"type": "I2C",
|
||||
"features": "Dual 1.28” round touch displays with 240x240 resolution, ESP32-S3 microcontroller, Wi-Fi and Bluetooth connectivity, microSD card slot, two programmable buttons, USB Type-C interface, battery connector with charging management.",
|
||||
"pros": "Compact design with dual touch displays, wireless connectivity, expandable storage, and user-friendly interface.",
|
||||
"cons": "Limited to round display format, higher price point compared to single-display modules."
|
||||
},
|
||||
{
|
||||
"model_name": "ESP32-CAM with Capacitive Touch Buttons and OLED",
|
||||
"link": "https://robotzero.one/esp32-cam-oled-capacitive-touch-buttons/",
|
||||
"price": 30.0,
|
||||
"type": "I2C",
|
||||
"features": "0.96” OLED display, ESP32-CAM module with camera, three capacitive touch buttons, microSD card slot, USB Type-C interface, battery-powered operation.",
|
||||
"pros": "Integrated camera and display, touch button interface, portable design with battery operation.",
|
||||
"cons": "Smaller display size, limited to three touch buttons, requires assembly and 3D printing for enclosure."
|
||||
},
|
||||
{
|
||||
"model_name": "FireBeetle Covers-OLED12864 Display",
|
||||
"link": "https://community.dfrobot.com/makelog-313920.html",
|
||||
"price": 20.0,
|
||||
"type": "I2C",
|
||||
"features": "1.3” OLED display with 128x64 resolution, two buttons, 5-way joystick, 3-axis accelerometer, compatible with FireBeetle ESP8266 board.",
|
||||
"pros": "Comprehensive input options with joystick and buttons, integrated accelerometer for motion sensing, compact and stackable design.",
|
||||
"cons": "Requires FireBeetle board for compatibility, limited to specific ecosystem, smaller display size."
|
||||
},
|
||||
{
|
||||
"model_name": "ESP32-S3 SuperMini with SSD1306 OLED and Button",
|
||||
"link": "https://www.espboards.dev/blog/ssd1306-esp32-s3-super-mini-setup/",
|
||||
"price": 15.0,
|
||||
"type": "I2C",
|
||||
"features": "0.96” OLED display with 128x64 resolution, single push button, ESP32-S3 microcontroller, USB Type-C interface.",
|
||||
"pros": "Minimalist design, easy to integrate, cost-effective solution for simple interfaces.",
|
||||
"cons": "Limited input options with only one button, smaller display size, basic functionality."
|
||||
},
|
||||
{
|
||||
"model_name": "Norvi IIOT-AE01-R with Built-in OLED and Push Buttons",
|
||||
"link": "https://www.instructables.com/Working-With-Built-in-Display-and-Push-Buttons-of-/",
|
||||
"price": 60.0,
|
||||
"type": "I2C",
|
||||
"features": "0.96” OLED SSD1306 display with 128x64 resolution, three push buttons connected via analog input, ESP32 microcontroller, industrial-grade design.",
|
||||
"pros": "Industrial-grade build quality, integrated display and buttons, suitable for automation applications.",
|
||||
"cons": "Higher price point, limited to three buttons, analog input method for buttons may require calibration."
|
||||
}
|
||||
]
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user