maintainence love:)

This commit is contained in:
babayaga 2025-02-19 08:04:38 +01:00
parent 3c4d8bc122
commit 79e4a1574b
14 changed files with 162 additions and 104 deletions

View File

@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MAX_FILE_SIZE = exports.EXCLUDE_GLOB = exports.MODULE_NAME = void 0;
exports.LOGGING_DIRECTORY = exports.PREFERENCES_FILE_NAME = exports.API_PREFIX = exports.MAX_FILE_SIZE = exports.EXCLUDE_GLOB = exports.MODULE_NAME = void 0;
exports.MODULE_NAME = 'kbot';
exports.EXCLUDE_GLOB = [
"**/node_modules/**",
@ -8,8 +8,11 @@ exports.EXCLUDE_GLOB = [
"**/build/**",
"**/coverage/**",
"*.log",
".kbot",
`.${exports.MODULE_NAME}`,
".git"
];
exports.MAX_FILE_SIZE = 1024 * 1024 * 2;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY29uc3RhbnRzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsic3JjL2NvbnN0YW50cy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7QUFBYSxRQUFBLFdBQVcsR0FBRyxNQUFNLENBQUE7QUFDcEIsUUFBQSxZQUFZLEdBQUc7SUFDeEIsb0JBQW9CO0lBQ3BCLFlBQVk7SUFDWixhQUFhO0lBQ2IsZ0JBQWdCO0lBQ2hCLE9BQU87SUFDUCxPQUFPO0lBQ1AsTUFBTTtDQUNULENBQUE7QUFDWSxRQUFBLGFBQWEsR0FBRyxJQUFJLEdBQUcsSUFBSSxHQUFHLENBQUMsQ0FBQSJ9
exports.API_PREFIX = 'osr';
exports.PREFERENCES_FILE_NAME = 'preferences.md';
exports.LOGGING_DIRECTORY = `.${exports.MODULE_NAME}`;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY29uc3RhbnRzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsic3JjL2NvbnN0YW50cy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7QUFBYSxRQUFBLFdBQVcsR0FBRyxNQUFNLENBQUE7QUFDcEIsUUFBQSxZQUFZLEdBQUc7SUFDeEIsb0JBQW9CO0lBQ3BCLFlBQVk7SUFDWixhQUFhO0lBQ2IsZ0JBQWdCO0lBQ2hCLE9BQU87SUFDUCxJQUFJLG1CQUFXLEVBQUU7SUFDakIsTUFBTTtDQUNULENBQUE7QUFDWSxRQUFBLGFBQWEsR0FBRyxJQUFJLEdBQUcsSUFBSSxHQUFHLENBQUMsQ0FBQTtBQUMvQixRQUFBLFVBQVUsR0FBRyxLQUFLLENBQUE7QUFDbEIsUUFBQSxxQkFBcUIsR0FBRyxnQkFBZ0IsQ0FBQTtBQUN4QyxRQUFBLGlCQUFpQixHQUFHLElBQUksbUJBQVcsRUFBRSxDQUFBIn0=

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,5 +1,5 @@
{
"timestamp": 1739545527029,
"timestamp": 1739948673840,
"models": [
{
"id": "gpt-4o-mini-audio-preview-2024-12-17",
@ -13,6 +13,12 @@
"created": 1698785189,
"owned_by": "system"
},
{
"id": "gpt-4-turbo-2024-04-09",
"object": "model",
"created": 1712601677,
"owned_by": "system"
},
{
"id": "dall-e-2",
"object": "model",
@ -79,12 +85,6 @@
"created": 1712361441,
"owned_by": "system"
},
{
"id": "gpt-4o",
"object": "model",
"created": 1715367049,
"owned_by": "system"
},
{
"id": "gpt-4o-mini-audio-preview",
"object": "model",
@ -92,9 +92,9 @@
"owned_by": "system"
},
{
"id": "gpt-4o-mini-2024-07-18",
"id": "gpt-4o-2024-11-20",
"object": "model",
"created": 1721172717,
"created": 1739331543,
"owned_by": "system"
},
{
@ -109,18 +109,6 @@
"created": 1692634615,
"owned_by": "system"
},
{
"id": "omni-moderation-latest",
"object": "model",
"created": 1731689265,
"owned_by": "system"
},
{
"id": "omni-moderation-2024-09-26",
"object": "model",
"created": 1732734466,
"owned_by": "system"
},
{
"id": "tts-1-hd-1106",
"object": "model",
@ -133,48 +121,12 @@
"created": 1734034239,
"owned_by": "system"
},
{
"id": "gpt-4o-mini",
"object": "model",
"created": 1721172741,
"owned_by": "system"
},
{
"id": "gpt-4o-2024-08-06",
"object": "model",
"created": 1722814719,
"owned_by": "system"
},
{
"id": "tts-1-hd",
"object": "model",
"created": 1699046015,
"owned_by": "system"
},
{
"id": "gpt-4-0125-preview",
"object": "model",
"created": 1706037612,
"owned_by": "system"
},
{
"id": "gpt-4o-2024-11-20",
"object": "model",
"created": 1739331543,
"owned_by": "system"
},
{
"id": "gpt-4-turbo-preview",
"object": "model",
"created": 1706037777,
"owned_by": "system"
},
{
"id": "chatgpt-4o-latest",
"object": "model",
"created": 1723515131,
"owned_by": "system"
},
{
"id": "text-embedding-3-large",
"object": "model",
@ -182,9 +134,15 @@
"owned_by": "system"
},
{
"id": "gpt-4-turbo-2024-04-09",
"id": "gpt-4o-mini-2024-07-18",
"object": "model",
"created": 1712601677,
"created": 1721172717,
"owned_by": "system"
},
{
"id": "gpt-4o-mini",
"object": "model",
"created": 1721172741,
"owned_by": "system"
},
{
@ -193,12 +151,24 @@
"created": 1681940951,
"owned_by": "openai-internal"
},
{
"id": "gpt-4o",
"object": "model",
"created": 1715367049,
"owned_by": "system"
},
{
"id": "tts-1-1106",
"object": "model",
"created": 1699053241,
"owned_by": "system"
},
{
"id": "gpt-4o-2024-08-06",
"object": "model",
"created": 1722814719,
"owned_by": "system"
},
{
"id": "davinci-002",
"object": "model",
@ -211,6 +181,12 @@
"created": 1698959748,
"owned_by": "system"
},
{
"id": "omni-moderation-2024-09-26",
"object": "model",
"created": 1732734466,
"owned_by": "system"
},
{
"id": "gpt-3.5-turbo-instruct",
"object": "model",
@ -265,6 +241,12 @@
"created": 1687882411,
"owned_by": "openai"
},
{
"id": "text-embedding-ada-002",
"object": "model",
"created": 1671217299,
"owned_by": "openai-internal"
},
{
"id": "gpt-4-1106-preview",
"object": "model",
@ -272,10 +254,28 @@
"owned_by": "system"
},
{
"id": "text-embedding-ada-002",
"id": "omni-moderation-latest",
"object": "model",
"created": 1671217299,
"owned_by": "openai-internal"
"created": 1731689265,
"owned_by": "system"
},
{
"id": "gpt-4-0613",
"object": "model",
"created": 1686588896,
"owned_by": "openai"
},
{
"id": "gpt-4-0125-preview",
"object": "model",
"created": 1706037612,
"owned_by": "system"
},
{
"id": "gpt-4-turbo-preview",
"object": "model",
"created": 1706037777,
"owned_by": "system"
},
{
"id": "gpt-4o-2024-05-13",
@ -284,10 +284,10 @@
"owned_by": "system"
},
{
"id": "gpt-4-0613",
"id": "chatgpt-4o-latest",
"object": "model",
"created": 1686588896,
"owned_by": "openai"
"created": 1723515131,
"owned_by": "system"
}
]
}

View File

@ -1,9 +1,33 @@
{
"timestamp": 1739545527255,
"timestamp": 1739948674415,
"models": [
{
"id": "mistralai/mistral-saba",
"name": "Mistral: Saba",
"created": 1739803239,
"description": "Mistral Saba is a 24B-parameter language model specifically designed for the Middle East and South Asia, delivering accurate and contextually relevant responses while maintaining efficient performance. Trained on curated regional datasets, it supports multiple Indian-origin languages—including Tamil and Malayalam—alongside Arabic. This makes it a versatile option for a range of regional and multilingual applications. Read more at the blog post [here](https://mistral.ai/en/news/mistral-saba)",
"context_length": 32000,
"architecture": {
"modality": "text->text",
"tokenizer": "Mistral",
"instruct_type": null
},
"pricing": {
"prompt": "0.0000002",
"completion": "0.0000006",
"image": "0",
"request": "0"
},
"top_provider": {
"context_length": 32000,
"max_completion_tokens": null,
"is_moderated": false
},
"per_request_limits": null
},
{
"id": "cognitivecomputations/dolphin3.0-r1-mistral-24b:free",
"name": "Dolphin3.0 R1 Mistral 24b (free)",
"name": "Dolphin3.0 R1 Mistral 24B (free)",
"created": 1739462498,
"description": "Dolphin 3.0 R1 is the next generation of the Dolphin series of instruct-tuned models. Designed to be the ultimate general purpose local model, enabling coding, math, agentic, function calling, and general use cases.\n\nThe R1 version has been trained for 3 epochs to reason using 800k reasoning traces from the Dolphin-R1 dataset.\n\nDolphin aims to be a general purpose reasoning instruct model, similar to the models behind ChatGPT, Claude, Gemini.\n\nPart of the [Dolphin 3.0 Collection](https://huggingface.co/collections/cognitivecomputations/dolphin-30-677ab47f73d7ff66743979a3) Curated and trained by [Eric Hartford](https://huggingface.co/ehartford), [Ben Gitter](https://huggingface.co/bigstorm), [BlouseJury](https://huggingface.co/BlouseJury) and [Cognitive Computations](https://huggingface.co/cognitivecomputations)",
"context_length": 32768,
@ -27,7 +51,7 @@
},
{
"id": "cognitivecomputations/dolphin3.0-mistral-24b:free",
"name": "Dolphin3.0 Mistral 24b (free)",
"name": "Dolphin3.0 Mistral 24B (free)",
"created": 1739462019,
"description": "Dolphin 3.0 is the next generation of the Dolphin series of instruct-tuned models. Designed to be the ultimate general purpose local model, enabling coding, math, agentic, function calling, and general use cases.\n\nDolphin aims to be a general purpose instruct model, similar to the models behind ChatGPT, Claude, Gemini. \n\nPart of the [Dolphin 3.0 Collection](https://huggingface.co/collections/cognitivecomputations/dolphin-30-677ab47f73d7ff66743979a3) Curated and trained by [Eric Hartford](https://huggingface.co/ehartford), [Ben Gitter](https://huggingface.co/bigstorm), [BlouseJury](https://huggingface.co/BlouseJury) and [Cognitive Computations](https://huggingface.co/cognitivecomputations)",
"context_length": 32768,
@ -51,7 +75,7 @@
},
{
"id": "meta-llama/llama-guard-3-8b",
"name": "Llama Guard 3 8b",
"name": "Llama Guard 3 8B",
"created": 1739401318,
"description": "Llama Guard 3 is a Llama-3.1-8B pretrained model, fine-tuned for content safety classification. Similar to previous versions, it can be used to classify content in both LLM inputs (prompt classification) and in LLM responses (response classification). It acts as an LLM it generates text in its output that indicates whether a given prompt or response is safe or unsafe, and if unsafe, it also lists the content categories violated.\n\nLlama Guard 3 was aligned to safeguard against the MLCommons standardized hazards taxonomy and designed to support Llama 3.1 capabilities. Specifically, it provides content moderation in 8 languages, and was optimized to support safety and security for search and code interpreter tool calls.\n",
"context_length": 16384,
@ -99,7 +123,7 @@
},
{
"id": "allenai/llama-3.1-tulu-3-405b",
"name": "Llama 3.1 Tulu 3 405b",
"name": "Llama 3.1 Tulu 3 405B",
"created": 1739053421,
"description": "Tülu 3 405B is the largest model in the Tülu 3 family, applying fully open post-training recipes at a 405B parameter scale. Built on the Llama 3.1 405B base, it leverages Reinforcement Learning with Verifiable Rewards (RLVR) to enhance instruction following, MATH, GSM8K, and IFEval performance. As part of Tülu 3s fully open-source approach, it offers state-of-the-art capabilities while surpassing prior open-weight models like Llama 3.1 405B Instruct and Nous Hermes 3 405B on multiple benchmarks. To read more, [click here.](https://allenai.org/blog/tulu-3-405B)",
"context_length": 16000,
@ -1011,7 +1035,7 @@
},
{
"id": "eva-unit-01/eva-llama-3.33-70b",
"name": "EVA Llama 3.33 70b",
"name": "EVA Llama 3.33 70B",
"created": 1734377303,
"description": "EVA Llama 3.33 70b is a roleplay and storywriting specialist model. It is a full-parameter finetune of [Llama-3.3-70B-Instruct](https://openrouter.ai/meta-llama/llama-3.3-70b-instruct) on mixture of synthetic and natural data.\n\nIt uses Celeste 70B 0.1 data mixture, greatly expanding it to improve versatility, creativity and \"flavor\" of the resulting model\n\nThis model was built with Llama by Meta.\n",
"context_length": 16384,
@ -1749,7 +1773,7 @@
"top_provider": {
"context_length": 200000,
"max_completion_tokens": 8192,
"is_moderated": true
"is_moderated": false
},
"per_request_limits": null
},

View File

@ -36,9 +36,11 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.preferences = exports.prompt = void 0;
const input_1 = require("./utils/input");
const read_1 = require("@plastichub/fs/read");
const exists_1 = require("@plastichub/fs/exists");
const osr_commons_1 = require("@plastichub/osr-commons");
const path = __importStar(require("path"));
const env_1 = require("./utils/env");
const constants_1 = require("./constants");
const prompt = async (opts) => {
const input = await (0, input_1.resolveQuery)(opts);
return {
@ -48,7 +50,10 @@ const prompt = async (opts) => {
};
exports.prompt = prompt;
const preferences = async (opts) => {
const preferencesPath = path.resolve((0, osr_commons_1.resolve)(opts.preferences, false, (0, env_1.env_vars)()));
let preferencesPath = path.resolve((0, osr_commons_1.resolve)(opts.preferences, false, (0, env_1.env_vars)()));
if (!(0, exists_1.sync)(preferencesPath)) {
preferencesPath = path.resolve(path.join(process.cwd(), constants_1.PREFERENCES_FILE_NAME));
}
const preferences = (0, read_1.sync)(preferencesPath, 'string');
return {
role: "user",
@ -56,4 +61,4 @@ const preferences = async (opts) => {
};
};
exports.preferences = preferences;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJvbXB0LmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsic3JjL3Byb21wdC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7QUFDQSx5Q0FBNEM7QUFFNUMsOENBQWtEO0FBQ2xELHlEQUFpRDtBQUNqRCwyQ0FBNEI7QUFDNUIscUNBQXNDO0FBQy9CLE1BQU0sTUFBTSxHQUFHLEtBQUssRUFBRSxJQUFlLEVBQW1ELEVBQUU7SUFDN0YsTUFBTSxLQUFLLEdBQUcsTUFBTSxJQUFBLG9CQUFZLEVBQUMsSUFBSSxDQUFDLENBQUE7SUFDdEMsT0FBTztRQUNILElBQUksRUFBRSxNQUFNO1FBQ1osT0FBTyxFQUFFLEtBQUssSUFBSSxFQUFFO0tBQ3ZCLENBQUE7QUFDTCxDQUFDLENBQUE7QUFOWSxRQUFBLE1BQU0sVUFNbEI7QUFDTSxNQUFNLFdBQVcsR0FBRyxLQUFLLEVBQUUsSUFBZSxFQUFtRCxFQUFFO0lBQ2xHLE1BQU0sZUFBZSxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsSUFBQSxxQkFBTyxFQUFDLElBQUksQ0FBQyxXQUFXLEVBQUUsS0FBSyxFQUFFLElBQUEsY0FBUSxHQUFFLENBQUMsQ0FBQyxDQUFBO0lBQ2xGLE1BQU0sV0FBVyxHQUFHLElBQUEsV0FBSSxFQUFDLGVBQWUsRUFBRSxRQUFRLENBQVcsQ0FBQTtJQUM3RCxPQUFPO1FBQ0gsSUFBSSxFQUFFLE1BQU07UUFDWixPQUFPLEVBQUUsc0JBQXNCLFdBQVcsRUFBRSxJQUFJLEVBQUU7S0FDckQsQ0FBQTtBQUNMLENBQUMsQ0FBQTtBQVBZLFFBQUEsV0FBVyxlQU92QiJ9
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJvbXB0LmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsic3JjL3Byb21wdC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7QUFDQSx5Q0FBNEM7QUFFNUMsOENBQWtEO0FBQ2xELGtEQUFzRDtBQUN0RCx5REFBaUQ7QUFDakQsMkNBQTRCO0FBQzVCLHFDQUFzQztBQUN0QywyQ0FBbUQ7QUFDNUMsTUFBTSxNQUFNLEdBQUcsS0FBSyxFQUFFLElBQWUsRUFBbUQsRUFBRTtJQUM3RixNQUFNLEtBQUssR0FBRyxNQUFNLElBQUEsb0JBQVksRUFBQyxJQUFJLENBQUMsQ0FBQTtJQUN0QyxPQUFPO1FBQ0gsSUFBSSxFQUFFLE1BQU07UUFDWixPQUFPLEVBQUUsS0FBSyxJQUFJLEVBQUU7S0FDdkIsQ0FBQTtBQUNMLENBQUMsQ0FBQTtBQU5ZLFFBQUEsTUFBTSxVQU1sQjtBQUNNLE1BQU0sV0FBVyxHQUFHLEtBQUssRUFBRSxJQUFlLEVBQW1ELEVBQUU7SUFDbEcsSUFBSSxlQUFlLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxJQUFBLHFCQUFPLEVBQUMsSUFBSSxDQUFDLFdBQVcsRUFBRSxLQUFLLEVBQUUsSUFBQSxjQUFRLEdBQUUsQ0FBQyxDQUFDLENBQUE7SUFDaEYsSUFBRyxDQUFDLElBQUEsYUFBTSxFQUFDLGVBQWUsQ0FBQyxFQUFFLENBQUM7UUFDMUIsZUFBZSxHQUFJLElBQUksQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsR0FBRyxFQUFFLEVBQUcsaUNBQXFCLENBQUMsQ0FBQyxDQUFBO0lBQ3JGLENBQUM7SUFDRCxNQUFNLFdBQVcsR0FBRyxJQUFBLFdBQUksRUFBQyxlQUFlLEVBQUUsUUFBUSxDQUFXLENBQUE7SUFDN0QsT0FBTztRQUNILElBQUksRUFBRSxNQUFNO1FBQ1osT0FBTyxFQUFFLHNCQUFzQixXQUFXLEVBQUUsSUFBSSxFQUFFO0tBQ3JELENBQUE7QUFDTCxDQUFDLENBQUE7QUFWWSxRQUFBLFdBQVcsZUFVdkIifQ==

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,6 +1,7 @@
npm run webpack
#!/bin/sh
kbotd types
kbotd fetch
npm run webpack
sh scripts/update-readme.sh
cp README.md dist/README.md
cd dist

View File

@ -5,7 +5,10 @@ export const EXCLUDE_GLOB = [
"**/build/**",
"**/coverage/**",
"*.log",
".kbot",
`.${MODULE_NAME}`,
".git"
]
export const MAX_FILE_SIZE = 1024 * 1024 * 2
export const MAX_FILE_SIZE = 1024 * 1024 * 2
export const API_PREFIX = 'osr'
export const PREFERENCES_FILE_NAME = 'preferences.md'
export const LOGGING_DIRECTORY = `.${MODULE_NAME}`

View File

@ -2,9 +2,11 @@ import { IKBotTask } from '@plastichub/osr-ai-tools/types'
import { resolveQuery } from './utils/input'
import { ChatCompletionMessageParam } from 'openai/resources/index.mjs'
import { sync as read } from '@plastichub/fs/read'
import { sync as exists } from '@plastichub/fs/exists'
import { resolve } from '@plastichub/osr-commons'
import * as path from 'path'
import { env_vars } from './utils/env'
import { PREFERENCES_FILE_NAME } from './constants'
export const prompt = async (opts: IKBotTask): Promise<ChatCompletionMessageParam | undefined> => {
const input = await resolveQuery(opts)
return {
@ -13,7 +15,10 @@ export const prompt = async (opts: IKBotTask): Promise<ChatCompletionMessagePara
}
}
export const preferences = async (opts: IKBotTask): Promise<ChatCompletionMessageParam | undefined> => {
const preferencesPath = path.resolve(resolve(opts.preferences, false, env_vars()))
let preferencesPath = path.resolve(resolve(opts.preferences, false, env_vars()))
if(!exists(preferencesPath)) {
preferencesPath = path.resolve(path.join(process.cwd(), PREFERENCES_FILE_NAME))
}
const preferences = read(preferencesPath, 'string') as string
return {
role: "user",

View File

@ -1,7 +1,16 @@
import { z } from 'zod'
import * as path from 'path'
import chalk from 'chalk'
import * as env from 'env-var'
import { resolve } from '@plastichub/osr-commons'
import { API_PREFIX, LOGGING_DIRECTORY, PREFERENCES_FILE_NAME } from './constants'
export const get_var = (key: string ='') => env.get(key).asString() || env.get(key.replace(/-/g, '_')).asString() || env.get(key.replace(/_/g, '-')).asString()
export const HOME = (sub = '') => path.join(process.env[(process.platform == 'win32') ? 'USERPROFILE' : 'HOME'], sub)
export const PREFERENCES_DEFAULT = (key: string = 'KBOT_PREFERENCES') => get_var(key) || path.join(HOME(`.${API_PREFIX}`), PREFERENCES_FILE_NAME)
import { models_dist } from './models'
import { defaultTemplate } from './tools'
import { generate_interfaces, write, ZodMetaMap } from '@plastichub/osr-commons'
@ -50,7 +59,7 @@ export const SplitType = z.enum([
export type OptionsSchemaMeta = Record<string, unknown>
let schemaMap: ZodMetaMap = null
let schemaMap: ZodMetaMap
export const OptionsSchema = (opts?: any) => {
schemaMap = ZodMetaMap.create<OptionsSchemaMeta>()
@ -177,13 +186,13 @@ export const OptionsSchema = (opts?: any) => {
.add(
'preferences',
z.string()
.default('./.kbot/preferences.md')
.default(PREFERENCES_DEFAULT())
.describe('Path to preferences file, eg: location, your email address, gender, etc. Supports environment variables.')
)
.add(
'logs',
z.string()
.default('./.kbot')
.default(LOGGING_DIRECTORY)
.describe('Logging directory')
)
.add(

View File

@ -31,7 +31,6 @@ export interface IKBotOptions {
aion-labs/aion-1.0 | paid
aion-labs/aion-1.0-mini | paid
aion-labs/aion-rp-llama-3.1-8b | paid
jondurbin/airoboros-l2-70b | paid
amazon/nova-lite-v1 | paid
amazon/nova-micro-v1 | paid
amazon/nova-pro-v1 | paid
@ -72,12 +71,12 @@ export interface IKBotOptions {
deepseek/deepseek-r1:free | free
deepseek/deepseek-r1-distill-llama-70b | paid
deepseek/deepseek-r1-distill-llama-70b:free | free
deepseek/deepseek-r1-distill-llama-8b | paid
deepseek/deepseek-r1-distill-qwen-1.5b | paid
deepseek/deepseek-r1-distill-qwen-14b | paid
deepseek/deepseek-r1-distill-qwen-32b | paid
cognitivecomputations/dolphin-mixtral-8x7b | paid
cognitivecomputations/dolphin-mixtral-8x22b | paid
cognitivecomputations/dolphin3.0-mistral-24b:free | free
cognitivecomputations/dolphin3.0-r1-mistral-24b:free | free
eva-unit-01/eva-llama-3.33-70b | paid
eva-unit-01/eva-qwen-2.5-32b | paid
eva-unit-01/eva-qwen-2.5-72b | paid
@ -113,6 +112,7 @@ export interface IKBotOptions {
liquid/lfm-40b | paid
liquid/lfm-7b | paid
allenai/llama-3.1-tulu-3-405b | paid
meta-llama/llama-guard-3-8b | paid
alpindale/magnum-72b | paid
anthracite-org/magnum-v2-72b | paid
anthracite-org/magnum-v4-72b | paid
@ -140,7 +140,6 @@ export interface IKBotOptions {
microsoft/phi-3-mini-128k-instruct | paid
microsoft/phi-3-mini-128k-instruct:free | free
microsoft/phi-3.5-mini-128k-instruct | paid
sophosympatheia/midnight-rose-70b | paid
minimax/minimax-01 | paid
mistralai/mistral-large | paid
mistralai/mistral-large-2407 | paid
@ -158,12 +157,15 @@ export interface IKBotOptions {
mistralai/mistral-7b-instruct-v0.1 | paid
mistralai/mistral-7b-instruct-v0.3 | paid
mistralai/mistral-nemo | paid
mistralai/mistral-nemo:free | free
mistralai/mistral-small-24b-instruct-2501 | paid
mistralai/mistral-small-24b-instruct-2501:free | free
mistralai/mixtral-8x22b-instruct | paid
mistralai/mixtral-8x7b | paid
mistralai/mixtral-8x7b-instruct | paid
mistralai/pixtral-12b | paid
mistralai/pixtral-large-2411 | paid
mistralai/mistral-saba | paid
gryphe/mythomax-l2-13b | paid
gryphe/mythomax-l2-13b:free | free
neversleep/llama-3-lumimaid-70b | paid
@ -206,9 +208,9 @@ export interface IKBotOptions {
openai/o1-preview | paid
openai/o1-preview-2024-09-12 | paid
openai/o3-mini | paid
openai/o3-mini-high | paid
openchat/openchat-7b | paid
openchat/openchat-7b:free | free
teknium/openhermes-2.5-mistral-7b | paid
perplexity/llama-3.1-sonar-huge-128k-online | paid
perplexity/llama-3.1-sonar-large-128k-chat | paid
perplexity/llama-3.1-sonar-large-128k-online | paid
@ -218,8 +220,6 @@ export interface IKBotOptions {
perplexity/sonar-reasoning | paid
pygmalionai/mythalion-13b | paid
qwen/qwen-2-72b-instruct | paid
qwen/qwen-2-7b-instruct | paid
qwen/qwen-2-7b-instruct:free | free
qwen/qvq-72b-preview | paid
qwen/qwen-vl-plus:free | free
qwen/qwen-max | paid

File diff suppressed because one or more lines are too long