maintainence love:)

This commit is contained in:
babayaga 2025-09-13 12:12:13 +02:00
parent c32511c3ab
commit de3a0275c1
12 changed files with 1294 additions and 772 deletions

View File

@ -1,5 +1,5 @@
{
"timestamp": 1757014666958,
"timestamp": 1757758318142,
"models": [
{
"id": "gpt-4-0613",

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,7 @@
export declare enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free",
MODEL_FREE_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha",
MODEL_FREE_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha",
MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE = "deepseek/deepseek-chat-v3.1:free",
MODEL_FREE_OPENAI_GPT_OSS_120B_FREE = "openai/gpt-oss-120b:free",
MODEL_FREE_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",
@ -30,7 +33,6 @@ export declare enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free",
MODEL_FREE_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free",
MODEL_FREE_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free",
MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25 = "google/gemini-2.5-pro-exp-03-25",
MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free",
MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free",
MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free",

View File

@ -1,5 +1,8 @@
export var E_OPENROUTER_MODEL_FREE;
(function (E_OPENROUTER_MODEL_FREE) {
E_OPENROUTER_MODEL_FREE["MODEL_FREE_NVIDIA_NEMOTRON_NANO_9B_V2_FREE"] = "nvidia/nemotron-nano-9b-v2:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENROUTER_SONOMA_DUSK_ALPHA"] = "openrouter/sonoma-dusk-alpha";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENROUTER_SONOMA_SKY_ALPHA"] = "openrouter/sonoma-sky-alpha";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE"] = "deepseek/deepseek-chat-v3.1:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENAI_GPT_OSS_120B_FREE"] = "openai/gpt-oss-120b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENAI_GPT_OSS_20B_FREE"] = "openai/gpt-oss-20b:free";
@ -31,7 +34,6 @@ export var E_OPENROUTER_MODEL_FREE;
E_OPENROUTER_MODEL_FREE["MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE"] = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_4_MAVERICK_FREE"] = "meta-llama/llama-4-maverick:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_4_SCOUT_FREE"] = "meta-llama/llama-4-scout:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25"] = "google/gemini-2.5-pro-exp-03-25";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-32b-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE"] = "deepseek/deepseek-chat-v3-0324:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE"] = "mistralai/mistral-small-3.1-24b-instruct:free";
@ -58,4 +60,4 @@ export var E_OPENROUTER_MODEL_FREE;
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE"] = "google/gemma-2-9b-it:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free";
})(E_OPENROUTER_MODEL_FREE || (E_OPENROUTER_MODEL_FREE = {}));
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkEwRFg7QUExREQsV0FBWSx1QkFBdUI7SUFDakMsMkdBQWdGLENBQUE7SUFDaEYsMkZBQWdFLENBQUE7SUFDaEUseUZBQThELENBQUE7SUFDOUQscUZBQTBELENBQUE7SUFDMUQscUZBQTBELENBQUE7SUFDMUQseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUscUhBQTBGLENBQUE7SUFDMUYsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsbUlBQXdHLENBQUE7SUFDeEcsMkdBQWdGLENBQUE7SUFDaEYscUdBQTBFLENBQUE7SUFDMUUseUdBQThFLENBQUE7SUFDOUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsMkZBQWdFLENBQUE7SUFDaEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsK0hBQW9HLENBQUE7SUFDcEcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQTFEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBMERsQyJ9
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkE0RFg7QUE1REQsV0FBWSx1QkFBdUI7SUFDakMseUdBQThFLENBQUE7SUFDOUUsbUdBQXdFLENBQUE7SUFDeEUsaUdBQXNFLENBQUE7SUFDdEUsMkdBQWdGLENBQUE7SUFDaEYsMkZBQWdFLENBQUE7SUFDaEUseUZBQThELENBQUE7SUFDOUQscUZBQTBELENBQUE7SUFDMUQscUZBQTBELENBQUE7SUFDMUQseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUscUhBQTBGLENBQUE7SUFDMUYsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsbUlBQXdHLENBQUE7SUFDeEcsMkdBQWdGLENBQUE7SUFDaEYscUdBQTBFLENBQUE7SUFDMUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsMkZBQWdFLENBQUE7SUFDaEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsK0hBQW9HLENBQUE7SUFDcEcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQTVEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBNERsQyJ9

View File

@ -1,6 +1,19 @@
export declare enum E_OPENROUTER_MODEL {
MODEL_QWEN_QWEN3_NEXT_80B_A3B_THINKING = "qwen/qwen3-next-80b-a3b-thinking",
MODEL_QWEN_QWEN3_NEXT_80B_A3B_INSTRUCT = "qwen/qwen3-next-80b-a3b-instruct",
MODEL_MEITUAN_LONGCAT_FLASH_CHAT = "meituan/longcat-flash-chat",
MODEL_QWEN_QWEN_PLUS_2025_07_28 = "qwen/qwen-plus-2025-07-28",
MODEL_QWEN_QWEN_PLUS_2025_07_28_THINKING = "qwen/qwen-plus-2025-07-28:thinking",
MODEL_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free",
MODEL_NVIDIA_NEMOTRON_NANO_9B_V2 = "nvidia/nemotron-nano-9b-v2",
MODEL_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha",
MODEL_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha",
MODEL_QWEN_QWEN3_MAX = "qwen/qwen3-max",
MODEL_MOONSHOTAI_KIMI_K2_0905 = "moonshotai/kimi-k2-0905",
MODEL_BYTEDANCE_SEED_OSS_36B_INSTRUCT = "bytedance/seed-oss-36b-instruct",
MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_LLAMA_109B_MOE = "deepcogito/cogito-v2-preview-llama-109b-moe",
MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_DEEPSEEK_671B = "deepcogito/cogito-v2-preview-deepseek-671b",
MODEL_STEPFUN_AI_STEP3 = "stepfun-ai/step3",
MODEL_QWEN_QWEN3_30B_A3B_THINKING_2507 = "qwen/qwen3-30b-a3b-thinking-2507",
MODEL_X_AI_GROK_CODE_FAST_1 = "x-ai/grok-code-fast-1",
MODEL_NOUSRESEARCH_HERMES_4_70B = "nousresearch/hermes-4-70b",
@ -128,14 +141,13 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_MOONSHOTAI_KIMI_VL_A3B_THINKING = "moonshotai/kimi-vl-a3b-thinking",
MODEL_X_AI_GROK_3_MINI_BETA = "x-ai/grok-3-mini-beta",
MODEL_X_AI_GROK_3_BETA = "x-ai/grok-3-beta",
MODEL_NVIDIA_LLAMA_3_3_NEMOTRON_SUPER_49B_V1 = "nvidia/llama-3.3-nemotron-super-49b-v1",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1 = "nvidia/llama-3.1-nemotron-ultra-253b-v1",
MODEL_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free",
MODEL_META_LLAMA_LLAMA_4_MAVERICK = "meta-llama/llama-4-maverick",
MODEL_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free",
MODEL_META_LLAMA_LLAMA_4_SCOUT = "meta-llama/llama-4-scout",
MODEL_GOOGLE_GEMINI_2_5_PRO_EXP_03_25 = "google/gemini-2.5-pro-exp-03-25",
MODEL_ALLENAI_MOLMO_7B_D = "allenai/molmo-7b-d",
MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free",
MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT = "qwen/qwen2.5-vl-32b-instruct",
MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free",
@ -143,6 +155,7 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_OPENAI_O1_PRO = "openai/o1-pro",
MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free",
MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT = "mistralai/mistral-small-3.1-24b-instruct",
MODEL_ALLENAI_OLMO_2_0325_32B_INSTRUCT = "allenai/olmo-2-0325-32b-instruct",
MODEL_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free",
MODEL_GOOGLE_GEMMA_3_4B_IT = "google/gemma-3-4b-it",
MODEL_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free",
@ -219,38 +232,36 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_MISTRALAI_MISTRAL_LARGE_2411 = "mistralai/mistral-large-2411",
MODEL_MISTRALAI_MISTRAL_LARGE_2407 = "mistralai/mistral-large-2407",
MODEL_MISTRALAI_PIXTRAL_LARGE_2411 = "mistralai/pixtral-large-2411",
MODEL_X_AI_GROK_VISION_BETA = "x-ai/grok-vision-beta",
MODEL_INFERMATIC_MN_INFEROR_12B = "infermatic/mn-inferor-12b",
MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free",
MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT = "qwen/qwen-2.5-coder-32b-instruct",
MODEL_RAIFLE_SORCERERLM_8X22B = "raifle/sorcererlm-8x22b",
MODEL_THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b",
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku",
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b",
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet",
MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b",
MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b",
MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct",
MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity",
MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi",
MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity",
MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b",
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b",
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct",
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct",
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct",
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct",
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free",
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct",
MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b",
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
MODEL_OPENAI_O1_MINI = "openai/o1-mini",
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b",
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024",
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b",
MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct",
MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct",
@ -260,9 +271,9 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b",
MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06",
MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b",
MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct",
MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE = "meta-llama/llama-3.1-405b-instruct:free",
MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct",
MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct",
MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct",
MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free",
MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo",
@ -275,9 +286,9 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b",
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b",
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct",
MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct",
MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b",
@ -295,29 +306,28 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus",
MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024",
MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b",
MODEL_COHERE_COMMAND_R = "cohere/command-r",
MODEL_COHERE_COMMAND = "cohere/command",
MODEL_COHERE_COMMAND_R = "cohere/command-r",
MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku",
MODEL_ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus",
MODEL_COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024",
MODEL_MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large",
MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview",
MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613",
MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small",
MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny",
MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small",
MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct",
MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b",
MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b",
MODEL_OPENROUTER_AUTO = "openrouter/auto",
MODEL_OPENAI_GPT_4_1106_PREVIEW = "openai/gpt-4-1106-preview",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1",
MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct",
MODEL_PYGMALIONAI_MYTHALION_13B = "pygmalionai/mythalion-13b",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1",
MODEL_OPENAI_GPT_3_5_TURBO_16K = "openai/gpt-3.5-turbo-16k",
MODEL_MANCER_WEAVER = "mancer/weaver",
MODEL_UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b",
MODEL_GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b",
MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314",
MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo",
MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314",
MODEL_OPENAI_GPT_4 = "openai/gpt-4"
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,12 +1,12 @@
{
"name": "@plastichub/kbot",
"version": "1.1.50",
"version": "1.1.51",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@plastichub/kbot",
"version": "1.1.50",
"version": "1.1.51",
"license": "ISC",
"dependencies": {
"node-emoji": "^2.2.0"

View File

@ -1,6 +1,6 @@
{
"name": "@plastichub/kbot",
"version": "1.1.50",
"version": "1.1.51",
"main": "main_node.js",
"author": "",
"license": "ISC",

View File

@ -1,4 +1,7 @@
export enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free",
MODEL_FREE_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha",
MODEL_FREE_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha",
MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE = "deepseek/deepseek-chat-v3.1:free",
MODEL_FREE_OPENAI_GPT_OSS_120B_FREE = "openai/gpt-oss-120b:free",
MODEL_FREE_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",
@ -30,7 +33,6 @@ export enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free",
MODEL_FREE_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free",
MODEL_FREE_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free",
MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25 = "google/gemini-2.5-pro-exp-03-25",
MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free",
MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free",
MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free",

View File

@ -1,6 +1,19 @@
export enum E_OPENROUTER_MODEL {
MODEL_QWEN_QWEN3_NEXT_80B_A3B_THINKING = "qwen/qwen3-next-80b-a3b-thinking",
MODEL_QWEN_QWEN3_NEXT_80B_A3B_INSTRUCT = "qwen/qwen3-next-80b-a3b-instruct",
MODEL_MEITUAN_LONGCAT_FLASH_CHAT = "meituan/longcat-flash-chat",
MODEL_QWEN_QWEN_PLUS_2025_07_28 = "qwen/qwen-plus-2025-07-28",
MODEL_QWEN_QWEN_PLUS_2025_07_28_THINKING = "qwen/qwen-plus-2025-07-28:thinking",
MODEL_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free",
MODEL_NVIDIA_NEMOTRON_NANO_9B_V2 = "nvidia/nemotron-nano-9b-v2",
MODEL_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha",
MODEL_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha",
MODEL_QWEN_QWEN3_MAX = "qwen/qwen3-max",
MODEL_MOONSHOTAI_KIMI_K2_0905 = "moonshotai/kimi-k2-0905",
MODEL_BYTEDANCE_SEED_OSS_36B_INSTRUCT = "bytedance/seed-oss-36b-instruct",
MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_LLAMA_109B_MOE = "deepcogito/cogito-v2-preview-llama-109b-moe",
MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_DEEPSEEK_671B = "deepcogito/cogito-v2-preview-deepseek-671b",
MODEL_STEPFUN_AI_STEP3 = "stepfun-ai/step3",
MODEL_QWEN_QWEN3_30B_A3B_THINKING_2507 = "qwen/qwen3-30b-a3b-thinking-2507",
MODEL_X_AI_GROK_CODE_FAST_1 = "x-ai/grok-code-fast-1",
MODEL_NOUSRESEARCH_HERMES_4_70B = "nousresearch/hermes-4-70b",
@ -128,14 +141,13 @@ export enum E_OPENROUTER_MODEL {
MODEL_MOONSHOTAI_KIMI_VL_A3B_THINKING = "moonshotai/kimi-vl-a3b-thinking",
MODEL_X_AI_GROK_3_MINI_BETA = "x-ai/grok-3-mini-beta",
MODEL_X_AI_GROK_3_BETA = "x-ai/grok-3-beta",
MODEL_NVIDIA_LLAMA_3_3_NEMOTRON_SUPER_49B_V1 = "nvidia/llama-3.3-nemotron-super-49b-v1",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1 = "nvidia/llama-3.1-nemotron-ultra-253b-v1",
MODEL_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free",
MODEL_META_LLAMA_LLAMA_4_MAVERICK = "meta-llama/llama-4-maverick",
MODEL_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free",
MODEL_META_LLAMA_LLAMA_4_SCOUT = "meta-llama/llama-4-scout",
MODEL_GOOGLE_GEMINI_2_5_PRO_EXP_03_25 = "google/gemini-2.5-pro-exp-03-25",
MODEL_ALLENAI_MOLMO_7B_D = "allenai/molmo-7b-d",
MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free",
MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT = "qwen/qwen2.5-vl-32b-instruct",
MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free",
@ -143,6 +155,7 @@ export enum E_OPENROUTER_MODEL {
MODEL_OPENAI_O1_PRO = "openai/o1-pro",
MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free",
MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT = "mistralai/mistral-small-3.1-24b-instruct",
MODEL_ALLENAI_OLMO_2_0325_32B_INSTRUCT = "allenai/olmo-2-0325-32b-instruct",
MODEL_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free",
MODEL_GOOGLE_GEMMA_3_4B_IT = "google/gemma-3-4b-it",
MODEL_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free",
@ -219,38 +232,36 @@ export enum E_OPENROUTER_MODEL {
MODEL_MISTRALAI_MISTRAL_LARGE_2411 = "mistralai/mistral-large-2411",
MODEL_MISTRALAI_MISTRAL_LARGE_2407 = "mistralai/mistral-large-2407",
MODEL_MISTRALAI_PIXTRAL_LARGE_2411 = "mistralai/pixtral-large-2411",
MODEL_X_AI_GROK_VISION_BETA = "x-ai/grok-vision-beta",
MODEL_INFERMATIC_MN_INFEROR_12B = "infermatic/mn-inferor-12b",
MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free",
MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT = "qwen/qwen-2.5-coder-32b-instruct",
MODEL_RAIFLE_SORCERERLM_8X22B = "raifle/sorcererlm-8x22b",
MODEL_THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b",
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku",
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b",
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet",
MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b",
MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b",
MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct",
MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity",
MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi",
MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity",
MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b",
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b",
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct",
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct",
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct",
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct",
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free",
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct",
MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b",
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
MODEL_OPENAI_O1_MINI = "openai/o1-mini",
MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12",
MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b",
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024",
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b",
MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct",
MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct",
@ -260,9 +271,9 @@ export enum E_OPENROUTER_MODEL {
MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b",
MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06",
MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b",
MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct",
MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE = "meta-llama/llama-3.1-405b-instruct:free",
MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct",
MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct",
MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct",
MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free",
MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo",
@ -275,9 +286,9 @@ export enum E_OPENROUTER_MODEL {
MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b",
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b",
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct",
MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct",
MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b",
@ -295,29 +306,28 @@ export enum E_OPENROUTER_MODEL {
MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus",
MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024",
MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b",
MODEL_COHERE_COMMAND_R = "cohere/command-r",
MODEL_COHERE_COMMAND = "cohere/command",
MODEL_COHERE_COMMAND_R = "cohere/command-r",
MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku",
MODEL_ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus",
MODEL_COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024",
MODEL_MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large",
MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview",
MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613",
MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small",
MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny",
MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small",
MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct",
MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b",
MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b",
MODEL_OPENROUTER_AUTO = "openrouter/auto",
MODEL_OPENAI_GPT_4_1106_PREVIEW = "openai/gpt-4-1106-preview",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1",
MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct",
MODEL_PYGMALIONAI_MYTHALION_13B = "pygmalionai/mythalion-13b",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1",
MODEL_OPENAI_GPT_3_5_TURBO_16K = "openai/gpt-3.5-turbo-16k",
MODEL_MANCER_WEAVER = "mancer/weaver",
MODEL_UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b",
MODEL_GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b",
MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314",
MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo",
MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314",
MODEL_OPENAI_GPT_4 = "openai/gpt-4"
}