From e8af7b8dd0c47b5886a45ad7f80ab5090fcf6bbe Mon Sep 17 00:00:00 2001 From: Babayaga Date: Wed, 4 Feb 2026 16:25:59 +0100 Subject: [PATCH] maintainence love:) --- packages/kbot/dist-in/data/openai_models.json | 2 +- .../kbot/dist-in/data/openrouter_models.json | 2595 ++++++----------- .../dist-in/src/models/cache/openrouter.ts | 2 +- packages/kbot/package-lock.json | 4 +- packages/kbot/package.json | 4 +- packages/kbot/schema.json | 4 +- packages/kbot/schema_ui.json | 4 +- .../models/cache/openrouter-models-free.ts | 21 +- .../src/models/cache/openrouter-models.ts | 32 +- packages/kbot/src/zod_types.ts | 472 ++- 10 files changed, 1290 insertions(+), 1850 deletions(-) diff --git a/packages/kbot/dist-in/data/openai_models.json b/packages/kbot/dist-in/data/openai_models.json index c0ad756a..03054cf7 100644 --- a/packages/kbot/dist-in/data/openai_models.json +++ b/packages/kbot/dist-in/data/openai_models.json @@ -1,5 +1,5 @@ { - "timestamp": 1769293001025, + "timestamp": 1770218751214, "models": [ { "id": "gpt-4-0613", diff --git a/packages/kbot/dist-in/data/openrouter_models.json b/packages/kbot/dist-in/data/openrouter_models.json index b3b2ac60..43196ffb 100644 --- a/packages/kbot/dist-in/data/openrouter_models.json +++ b/packages/kbot/dist-in/data/openrouter_models.json @@ -1,6 +1,310 @@ { - "timestamp": 1769293001166, + "timestamp": 1770218751571, "models": [ + { + "id": "qwen/qwen3-coder-next", + "canonical_slug": "qwen/qwen3-coder-next-2025-02-03", + "hugging_face_id": "Qwen/Qwen3-Coder-Next", + "name": "Qwen: Qwen3 Coder Next", + "created": 1770164101, + "description": "Qwen3-Coder-Next is an open-weight causal language model optimized for coding agents and local development workflows. It uses a sparse MoE design with 80B total parameters and only 3B activated per token, delivering performance comparable to models with 10 to 20x higher active compute, which makes it well suited for cost-sensitive, always-on agent deployment.\n\nThe model is trained with a strong agentic focus and performs reliably on long-horizon coding tasks, complex tool usage, and recovery from execution failures. With a native 256k context window, it integrates cleanly into real-world CLI and IDE environments and adapts well to common agent scaffolds used by modern coding tools. The model operates exclusively in non-thinking mode and does not emit blocks, simplifying integration for production coding agents.", + "context_length": 262144, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000002", + "completion": "0.0000015" + }, + "top_provider": { + "context_length": 262144, + "max_completion_tokens": 65536, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "max_tokens", + "min_p", + "presence_penalty", + "repetition_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_p" + ], + "default_parameters": { + "temperature": 1, + "top_p": 0.95, + "frequency_penalty": null + }, + "expiration_date": null + }, + { + "id": "openrouter/free", + "canonical_slug": "openrouter/free", + "hugging_face_id": "", + "name": "Free Models Router", + "created": 1769917427, + "description": "The simplest way to get free inference. openrouter/free is a router that selects free models at random from the models available on OpenRouter. The router smartly filters for models that support features needed for your request such as image understanding, tool calling, structured outputs and more. ", + "context_length": 200000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Router", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0" + }, + "top_provider": { + "context_length": null, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "include_reasoning", + "logit_bias", + "max_tokens", + "min_p", + "presence_penalty", + "reasoning", + "repetition_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_p" + ], + "default_parameters": { + "temperature": null, + "top_p": null, + "frequency_penalty": null + }, + "expiration_date": null + }, + { + "id": "stepfun/step-3.5-flash:free", + "canonical_slug": "stepfun/step-3.5-flash", + "hugging_face_id": "stepfun-ai/Step-3.5-Flash", + "name": "StepFun: Step 3.5 Flash (free)", + "created": 1769728337, + "description": "Step 3.5 Flash is StepFun's most capable open-source foundation model. Built on a sparse Mixture of Experts (MoE) architecture, it selectively activates only 11B of its 196B parameters per token. It is a reasoning model that is incredibly speed efficient even at long contexts.", + "context_length": 256000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0" + }, + "top_provider": { + "context_length": 256000, + "max_completion_tokens": 256000, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "include_reasoning", + "max_tokens", + "reasoning", + "stop", + "temperature", + "tools", + "top_p" + ], + "default_parameters": { + "temperature": null, + "top_p": null, + "frequency_penalty": null + }, + "expiration_date": null + }, + { + "id": "arcee-ai/trinity-large-preview:free", + "canonical_slug": "arcee-ai/trinity-large-preview", + "hugging_face_id": "arcee-ai/Trinity-Large-Preview", + "name": "Arcee AI: Trinity Large Preview (free)", + "created": 1769552670, + "description": "Trinity-Large-Preview is a frontier-scale open-weight language model from Arcee, built as a 400B-parameter sparse Mixture-of-Experts with 13B active parameters per token using 4-of-256 expert routing. \n\nIt excels in creative writing, storytelling, role-play, chat scenarios, and real-time voice assistance, better than your average reasoning model usually can. But we’re also introducing some of our newer agentic performance. It was trained to navigate well in agent harnesses like OpenCode, Cline, and Kilo Code, and to handle complex toolchains and long, constraint-filled prompts. \n\nThe architecture natively supports very long context windows up to 512k tokens, with the Preview API currently served at 128k context using 8-bit quantization for practical deployment. Trinity-Large-Preview reflects Arcee’s efficiency-first design philosophy, offering a production-oriented frontier model with open weights and permissive licensing suitable for real-world applications and experimentation.", + "context_length": 131000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0" + }, + "top_provider": { + "context_length": 131000, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "response_format", + "structured_outputs", + "temperature", + "tools", + "top_k", + "top_p" + ], + "default_parameters": { + "temperature": 0.8, + "top_p": 0.8, + "frequency_penalty": null + }, + "expiration_date": null + }, + { + "id": "moonshotai/kimi-k2.5", + "canonical_slug": "moonshotai/kimi-k2.5-0127", + "hugging_face_id": "moonshotai/Kimi-K2.5", + "name": "MoonshotAI: Kimi K2.5", + "created": 1769487076, + "description": "Kimi K2.5 is Moonshot AI's native multimodal model, delivering state-of-the-art visual coding capability and a self-directed agent swarm paradigm. Built on Kimi K2 with continued pretraining over approximately 15T mixed visual and text tokens, it delivers strong performance in general reasoning, visual coding, and agentic tool-calling.", + "context_length": 262144, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000045", + "completion": "0.0000025" + }, + "top_provider": { + "context_length": 262144, + "max_completion_tokens": 65535, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "include_reasoning", + "logit_bias", + "logprobs", + "max_tokens", + "min_p", + "presence_penalty", + "reasoning", + "repetition_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_logprobs", + "top_p" + ], + "default_parameters": { + "temperature": null, + "top_p": null, + "frequency_penalty": null + }, + "expiration_date": null + }, + { + "id": "upstage/solar-pro-3:free", + "canonical_slug": "upstage/solar-pro-3", + "hugging_face_id": "", + "name": "Upstage: Solar Pro 3 (free)", + "created": 1769481200, + "description": "Solar Pro 3 is Upstage's powerful Mixture-of-Experts (MoE) language model. With 102B total parameters and 12B active parameters per forward pass, it delivers exceptional performance while maintaining computational efficiency. Optimized for Korean with English and Japanese support.", + "context_length": 128000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0" + }, + "top_provider": { + "context_length": 128000, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "include_reasoning", + "max_tokens", + "reasoning", + "response_format", + "structured_outputs", + "temperature", + "tool_choice", + "tools" + ], + "default_parameters": { + "temperature": null, + "top_p": null, + "frequency_penalty": null + }, + "expiration_date": "2026-03-02" + }, { "id": "minimax/minimax-m2-her", "canonical_slug": "minimax/minimax-m2-her-20260123", @@ -8,7 +312,7 @@ "name": "MiniMax: MiniMax M2-her", "created": 1769177239, "description": "MiniMax M2-her is a dialogue-first large language model built for immersive roleplay, character-driven chat, and expressive multi-turn conversations. Designed to stay consistent in tone and personality, it supports rich message roles (user_system, group, sample_message_user, sample_message_ai) and can learn from example dialogue to better match the style and pacing of your scenario, making it a strong choice for storytelling, companions, and conversational experiences where natural flow and vivid interaction matter most.", - "context_length": 32768, + "context_length": 65536, "architecture": { "modality": "text->text", "input_modalities": [ @@ -23,15 +327,10 @@ "pricing": { "prompt": "0.0000003", "completion": "0.0000012", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0.00000003", - "input_cache_write": "0.000000375" + "input_cache_read": "0.00000003" }, "top_provider": { - "context_length": 32768, + "context_length": 65536, "max_completion_tokens": 2048, "is_moderated": false }, @@ -69,11 +368,7 @@ }, "pricing": { "prompt": "0.0000006", - "completion": "0.000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.000006" }, "top_provider": { "context_length": 1040000, @@ -321,10 +616,6 @@ "pricing": { "prompt": "0.00000007", "completion": "0.0000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", "input_cache_read": "0.00000001" }, "top_provider": { @@ -436,11 +727,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 36864, @@ -489,11 +776,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.0000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000006" }, "top_provider": { "context_length": 65536, @@ -547,13 +830,7 @@ }, "pricing": { "prompt": "0.000000075", - "completion": "0.0000003", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0", - "input_cache_write": "0" + "completion": "0.0000003" }, "top_provider": { "context_length": 262144, @@ -604,13 +881,7 @@ }, "pricing": { "prompt": "0.00000025", - "completion": "0.000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0", - "input_cache_write": "0" + "completion": "0.000002" }, "top_provider": { "context_length": 262144, @@ -716,11 +987,7 @@ }, "pricing": { "prompt": "0.0000004", - "completion": "0.0000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000015" }, "top_provider": { "context_length": 202752, @@ -735,8 +1002,10 @@ "logprobs", "max_tokens", "min_p", + "parallel_tool_calls", "presence_penalty", "reasoning", + "reasoning_effort", "repetition_penalty", "response_format", "seed", @@ -876,11 +1145,7 @@ }, "pricing": { "prompt": "0.00000015", - "completion": "0.0000005", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000005" }, "top_provider": { "context_length": 65536, @@ -911,59 +1176,6 @@ }, "expiration_date": null }, - { - "id": "xiaomi/mimo-v2-flash:free", - "canonical_slug": "xiaomi/mimo-v2-flash-20251210", - "hugging_face_id": "XiaomiMiMo/MiMo-V2-Flash", - "name": "Xiaomi: MiMo-V2-Flash (free)", - "created": 1765731308, - "description": "MiMo-V2-Flash is an open-source foundation language model developed by Xiaomi. It is a Mixture-of-Experts model with 309B total parameters and 15B active parameters, adopting hybrid attention architecture. MiMo-V2-Flash supports a hybrid-thinking toggle and a 256K context window, and excels at reasoning, coding, and agent scenarios. On SWE-bench Verified and SWE-bench Multilingual, MiMo-V2-Flash ranks as the top #1 open-source model globally, delivering performance comparable to Claude Sonnet 4.5 while costing only about 3.5% as much.\n\nUsers can control the reasoning behaviour with the `reasoning` `enabled` boolean. [Learn more in our docs](https://openrouter.ai/docs/use-cases/reasoning-tokens#enable-reasoning-with-default-config).", - "context_length": 262144, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Other", - "instruct_type": null - }, - "pricing": { - "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 262144, - "max_completion_tokens": 65536, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "include_reasoning", - "max_tokens", - "presence_penalty", - "reasoning", - "response_format", - "stop", - "temperature", - "tool_choice", - "tools", - "top_p" - ], - "default_parameters": { - "temperature": null, - "top_p": 0.95, - "frequency_penalty": null - }, - "expiration_date": "2026-01-26" - }, { "id": "xiaomi/mimo-v2-flash", "canonical_slug": "xiaomi/mimo-v2-flash-20251210", @@ -985,11 +1197,7 @@ }, "pricing": { "prompt": "0.00000009", - "completion": "0.00000029", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000029" }, "top_provider": { "context_length": 262144, @@ -1042,11 +1250,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 256000, @@ -1091,16 +1295,12 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000006", - "completion": "0.00000024", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "prompt": "0.00000005", + "completion": "0.0000002" }, "top_provider": { "context_length": 262144, - "max_completion_tokens": 262144, + "max_completion_tokens": null, "is_moderated": false }, "per_request_limits": null, @@ -1276,55 +1476,6 @@ }, "expiration_date": null }, - { - "id": "mistralai/devstral-2512:free", - "canonical_slug": "mistralai/devstral-2512", - "hugging_face_id": "mistralai/Devstral-2-123B-Instruct-2512", - "name": "Mistral: Devstral 2 2512 (free)", - "created": 1765285419, - "description": "Devstral 2 is a state-of-the-art open-source model by Mistral AI specializing in agentic coding. It is a 123B-parameter dense transformer model supporting a 256K context window.\n\nDevstral 2 supports exploring codebases and orchestrating changes across multiple files while maintaining architecture-level context. It tracks framework dependencies, detects failures, and retries with corrections—solving challenges like bug fixing and modernizing legacy systems. The model can be fine-tuned to prioritize specific languages or optimize for large enterprise codebases. It is available under a modified MIT license.", - "context_length": 262144, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Mistral", - "instruct_type": null - }, - "pricing": { - "prompt": "0", - "completion": "0" - }, - "top_provider": { - "context_length": 262144, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "max_tokens", - "presence_penalty", - "response_format", - "seed", - "stop", - "structured_outputs", - "temperature", - "tool_choice", - "tools", - "top_p" - ], - "default_parameters": { - "temperature": 0.3, - "top_p": null, - "frequency_penalty": null - }, - "expiration_date": "2026-01-27" - }, { "id": "mistralai/devstral-2512", "canonical_slug": "mistralai/devstral-2512", @@ -1346,11 +1497,7 @@ }, "pricing": { "prompt": "0.00000005", - "completion": "0.00000022", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000022" }, "top_provider": { "context_length": 262144, @@ -1448,12 +1595,7 @@ }, "pricing": { "prompt": "0.0000003", - "completion": "0.0000009", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0" + "completion": "0.0000009" }, "top_provider": { "context_length": 131072, @@ -1508,13 +1650,7 @@ }, "pricing": { "prompt": "0.00000027", - "completion": "0.000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0", - "input_cache_write": "0" + "completion": "0.000001" }, "top_provider": { "context_length": 131072, @@ -1560,11 +1696,7 @@ }, "pricing": { "prompt": "0.00000015", - "completion": "0.00000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000015" }, "top_provider": { "context_length": 32768, @@ -1703,11 +1835,7 @@ }, "pricing": { "prompt": "0.0000003", - "completion": "0.0000025", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000025" }, "top_provider": { "context_length": 1000000, @@ -1958,11 +2086,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 131072, @@ -2010,12 +2134,7 @@ }, "pricing": { "prompt": "0.000000045", - "completion": "0.00000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0" + "completion": "0.00000015" }, "top_provider": { "context_length": 131072, @@ -2069,11 +2188,7 @@ }, "pricing": { "prompt": "0.00000027", - "completion": "0.00000041", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000041" }, "top_provider": { "context_length": 163840, @@ -2125,11 +2240,7 @@ }, "pricing": { "prompt": "0.00000025", - "completion": "0.00000038", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000038" }, "top_provider": { "context_length": 163840, @@ -2186,11 +2297,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.0000011", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000011" }, "top_provider": { "context_length": 131072, @@ -2244,11 +2351,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 163840, @@ -2301,11 +2404,7 @@ }, "pricing": { "prompt": "0.00000025", - "completion": "0.00000085", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000085" }, "top_provider": { "context_length": 163840, @@ -2412,11 +2511,7 @@ }, "pricing": { "prompt": "0.00000015", - "completion": "0.0000005", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000005" }, "top_provider": { "context_length": 65536, @@ -2468,11 +2563,7 @@ }, "pricing": { "prompt": "0.0000001", - "completion": "0.0000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000002" }, "top_provider": { "context_length": 65536, @@ -2522,11 +2613,7 @@ }, "pricing": { "prompt": "0.00000012", - "completion": "0.0000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000002" }, "top_provider": { "context_length": 65536, @@ -2634,10 +2721,7 @@ "pricing": { "prompt": "0.0000002", "completion": "0.0000005", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.005", "input_cache_read": "0.00000005" }, "top_provider": { @@ -2746,11 +2830,7 @@ }, "pricing": { "prompt": "0.00000125", - "completion": "0.00000125", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000125" }, "top_provider": { "context_length": 128000, @@ -2997,10 +3077,6 @@ "pricing": { "prompt": "0.000000207", "completion": "0.000000828", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", "input_cache_read": "0.0000000414" }, "top_provider": { @@ -3052,11 +3128,7 @@ }, "pricing": { "prompt": "0.0000004", - "completion": "0.00000175", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000175" }, "top_provider": { "context_length": 262144, @@ -3115,10 +3187,6 @@ "pricing": { "prompt": "0.0000025", "completion": "0.0000125", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", "input_cache_read": "0.000000625" }, "top_provider": { @@ -3268,10 +3336,6 @@ "pricing": { "prompt": "0.000000075", "completion": "0.0000003", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", "input_cache_read": "0.000000037" }, "top_provider": { @@ -3322,11 +3386,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 128000, @@ -3374,11 +3434,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.0000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000006" }, "top_provider": { "context_length": 131072, @@ -3428,12 +3484,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000002", + "prompt": "0.000000255", "completion": "0.000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", "input_cache_read": "0.00000003" }, "top_provider": { @@ -3446,7 +3498,6 @@ "frequency_penalty", "include_reasoning", "max_tokens", - "min_p", "presence_penalty", "reasoning", "repetition_penalty", @@ -3489,11 +3540,7 @@ }, "pricing": { "prompt": "0.0000005", - "completion": "0.0000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000015" }, "top_provider": { "context_length": 262144, @@ -3639,11 +3686,7 @@ }, "pricing": { "prompt": "0.000000017", - "completion": "0.00000011", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000011" }, "top_provider": { "context_length": 131000, @@ -3689,11 +3732,7 @@ }, "pricing": { "prompt": "0.0000035", - "completion": "0.0000035", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000035" }, "top_provider": { "context_length": 32768, @@ -3815,7 +3854,7 @@ "top_provider": { "context_length": 200000, "max_completion_tokens": 64000, - "is_moderated": true + "is_moderated": false }, "per_request_limits": null, "supported_parameters": [ @@ -3911,12 +3950,7 @@ }, "pricing": { "prompt": "0.00000008", - "completion": "0.0000005", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0" + "completion": "0.0000005" }, "top_provider": { "context_length": 131072, @@ -4144,11 +4178,7 @@ }, "pricing": { "prompt": "0.0000001", - "completion": "0.0000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000004" }, "top_provider": { "context_length": 131072, @@ -4197,11 +4227,7 @@ }, "pricing": { "prompt": "0.00000007", - "completion": "0.00000028", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000028" }, "top_provider": { "context_length": 131072, @@ -4302,12 +4328,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0" + "completion": "0.000001" }, "top_provider": { "context_length": 131072, @@ -4361,10 +4382,7 @@ "pricing": { "prompt": "0.00000015", "completion": "0.0000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "input_cache_read": "0.000000075" }, "top_provider": { "context_length": 262144, @@ -4468,11 +4486,7 @@ }, "pricing": { "prompt": "0.00000035", - "completion": "0.0000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000015" }, "top_provider": { "context_length": 202752, @@ -4531,10 +4545,7 @@ "pricing": { "prompt": "0.00000044", "completion": "0.00000176", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "input_cache_read": "0.00000011" }, "top_provider": { "context_length": 204800, @@ -4597,7 +4608,7 @@ "top_provider": { "context_length": 1000000, "max_completion_tokens": 64000, - "is_moderated": true + "is_moderated": false }, "per_request_limits": null, "supported_parameters": [ @@ -4640,12 +4651,8 @@ "instruct_type": "deepseek-v3.1" }, "pricing": { - "prompt": "0.00000021", - "completion": "0.00000032", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "prompt": "0.00000027", + "completion": "0.00000041" }, "top_provider": { "context_length": 163840, @@ -4698,11 +4705,7 @@ }, "pricing": { "prompt": "0.0000003", - "completion": "0.0000005", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000005" }, "top_provider": { "context_length": 131072, @@ -4752,11 +4755,7 @@ }, "pricing": { "prompt": "0.00000085", - "completion": "0.00000125", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000125" }, "top_provider": { "context_length": 256000, @@ -4810,7 +4809,7 @@ }, "top_provider": { "context_length": 1048576, - "max_completion_tokens": 65535, + "max_completion_tokens": 65536, "is_moderated": false }, "per_request_limits": null, @@ -4914,11 +4913,7 @@ }, "pricing": { "prompt": "0.00000045", - "completion": "0.0000035", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000035" }, "top_provider": { "context_length": 262144, @@ -4972,11 +4967,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.0000012", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000012" }, "top_provider": { "context_length": 262144, @@ -5183,10 +5174,6 @@ "pricing": { "prompt": "0.00000021", "completion": "0.00000079", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", "input_cache_read": "0.000000168" }, "top_provider": { @@ -5242,10 +5229,6 @@ "pricing": { "prompt": "0.00000021", "completion": "0.00000079", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", "input_cache_read": "0.000000168" }, "top_provider": { @@ -5302,10 +5285,7 @@ "pricing": { "prompt": "0.0000002", "completion": "0.0000005", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.005", "input_cache_read": "0.00000005" }, "top_provider": { @@ -5462,11 +5442,7 @@ }, "pricing": { "prompt": "0.0000001", - "completion": "0.00000039", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000039" }, "top_provider": { "context_length": 32768, @@ -5511,11 +5487,7 @@ }, "pricing": { "prompt": "0.00000015", - "completion": "0.0000012", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000012" }, "top_provider": { "context_length": 128000, @@ -5570,11 +5542,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 262144, @@ -5619,11 +5587,7 @@ }, "pricing": { "prompt": "0.00000009", - "completion": "0.0000011", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000011" }, "top_provider": { "context_length": 262144, @@ -5673,14 +5637,11 @@ "pricing": { "prompt": "0.0000002", "completion": "0.0000008", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "input_cache_read": "0.0000002" }, "top_provider": { "context_length": 131072, - "max_completion_tokens": 131072, + "max_completion_tokens": 32768, "is_moderated": false }, "per_request_limits": null, @@ -5817,11 +5778,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 128000, @@ -5865,11 +5822,7 @@ }, "pricing": { "prompt": "0.00000004", - "completion": "0.00000016", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000016" }, "top_provider": { "context_length": 131072, @@ -5919,11 +5872,7 @@ }, "pricing": { "prompt": "0.00000039", - "completion": "0.0000019", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000019" }, "top_provider": { "context_length": 262144, @@ -5974,11 +5923,7 @@ }, "pricing": { "prompt": "0.0000006", - "completion": "0.0000025", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000025" }, "top_provider": { "context_length": 262144, @@ -6023,11 +5968,7 @@ }, "pricing": { "prompt": "0.00000088", - "completion": "0.00000088", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000088" }, "top_provider": { "context_length": 32768, @@ -6082,11 +6023,7 @@ }, "pricing": { "prompt": "0.00000018", - "completion": "0.00000059", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000059" }, "top_provider": { "context_length": 32767, @@ -6135,11 +6072,7 @@ }, "pricing": { "prompt": "0.00000057", - "completion": "0.00000142", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000142" }, "top_provider": { "context_length": 65536, @@ -6183,11 +6116,7 @@ }, "pricing": { "prompt": "0.000000051", - "completion": "0.00000034", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000034" }, "top_provider": { "context_length": 32768, @@ -6236,10 +6165,7 @@ "pricing": { "prompt": "0.0000002", "completion": "0.0000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.005", "input_cache_read": "0.00000002" }, "top_provider": { @@ -6287,11 +6213,7 @@ }, "pricing": { "prompt": "0.00000011", - "completion": "0.00000038", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000038" }, "top_provider": { "context_length": 131072, @@ -6340,11 +6262,7 @@ }, "pricing": { "prompt": "0.000001", - "completion": "0.000003", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.000003" }, "top_provider": { "context_length": 131072, @@ -6388,11 +6306,7 @@ }, "pricing": { "prompt": "0.00000015", - "completion": "0.00000075", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000075" }, "top_provider": { "context_length": 32768, @@ -6548,11 +6462,7 @@ }, "pricing": { "prompt": "0.00000007", - "completion": "0.00000028", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000028" }, "top_provider": { "context_length": 120000, @@ -6602,11 +6512,7 @@ }, "pricing": { "prompt": "0.00000014", - "completion": "0.00000056", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000056" }, "top_provider": { "context_length": 30000, @@ -6655,12 +6561,7 @@ "pricing": { "prompt": "0.0000006", "completion": "0.0000018", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0.00000011", - "input_cache_write": "0" + "input_cache_read": "0.00000011" }, "top_provider": { "context_length": 65536, @@ -6713,11 +6614,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.0000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000004" }, "top_provider": { "context_length": 256000, @@ -6758,11 +6655,7 @@ }, "pricing": { "prompt": "0.000002", - "completion": "0.000008", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.000008" }, "top_provider": { "context_length": 256000, @@ -6987,15 +6880,11 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 131072, - "max_completion_tokens": null, + "max_completion_tokens": 131072, "is_moderated": true }, "per_request_limits": null, @@ -7037,11 +6926,7 @@ }, "pricing": { "prompt": "0.000000039", - "completion": "0.00000019", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000019" }, "top_provider": { "context_length": 131072, @@ -7099,11 +6984,7 @@ }, "pricing": { "prompt": "0.000000039", - "completion": "0.00000019", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000019" }, "top_provider": { "context_length": 131072, @@ -7157,15 +7038,11 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 131072, - "max_completion_tokens": null, + "max_completion_tokens": 131072, "is_moderated": true }, "per_request_limits": null, @@ -7207,11 +7084,7 @@ }, "pricing": { "prompt": "0.00000002", - "completion": "0.0000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000001" }, "top_provider": { "context_length": 131072, @@ -7270,10 +7143,7 @@ "pricing": { "prompt": "0.000015", "completion": "0.000075", - "request": "0", - "image": "0.024", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.01", "input_cache_read": "0.0000015", "input_cache_write": "0.00001875" }, @@ -7371,11 +7241,7 @@ }, "pricing": { "prompt": "0.00000007", - "completion": "0.00000027", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000027" }, "top_provider": { "context_length": 160000, @@ -7422,11 +7288,7 @@ }, "pricing": { "prompt": "0.00000008", - "completion": "0.00000033", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000033" }, "top_provider": { "context_length": 262144, @@ -7473,11 +7335,7 @@ }, "pricing": { "prompt": "0.00000035", - "completion": "0.00000155", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000155" }, "top_provider": { "context_length": 131072, @@ -7530,11 +7388,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 131072, @@ -7579,11 +7433,7 @@ }, "pricing": { "prompt": "0.00000005", - "completion": "0.00000022", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000022" }, "top_provider": { "context_length": 131072, @@ -7636,11 +7486,7 @@ }, "pricing": { "prompt": "0.00000011", - "completion": "0.0000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000006" }, "top_provider": { "context_length": 262144, @@ -7695,11 +7541,7 @@ }, "pricing": { "prompt": "0.0000001", - "completion": "0.0000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000001" }, "top_provider": { "context_length": 128000, @@ -7742,11 +7584,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 262000, @@ -7758,7 +7596,6 @@ "frequency_penalty", "max_tokens", "presence_penalty", - "seed", "stop", "temperature", "tool_choice", @@ -7790,11 +7627,7 @@ }, "pricing": { "prompt": "0.00000022", - "completion": "0.00000095", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000095" }, "top_provider": { "context_length": 262144, @@ -7847,10 +7680,7 @@ "pricing": { "prompt": "0.00000022", "completion": "0.0000018", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "input_cache_read": "0.000000022" }, "top_provider": { "context_length": 262144, @@ -7899,11 +7729,7 @@ }, "pricing": { "prompt": "0.0000001", - "completion": "0.0000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000002" }, "top_provider": { "context_length": 128000, @@ -8005,11 +7831,7 @@ }, "pricing": { "prompt": "0.000000071", - "completion": "0.000000463", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.000000463" }, "top_provider": { "context_length": 262144, @@ -8084,48 +7906,6 @@ "default_parameters": {}, "expiration_date": null }, - { - "id": "moonshotai/kimi-k2:free", - "canonical_slug": "moonshotai/kimi-k2", - "hugging_face_id": "moonshotai/Kimi-K2-Instruct", - "name": "MoonshotAI: Kimi K2 0711 (free)", - "created": 1752263252, - "description": "Kimi K2 Instruct is a large-scale Mixture-of-Experts (MoE) language model developed by Moonshot AI, featuring 1 trillion total parameters with 32 billion active per forward pass. It is optimized for agentic capabilities, including advanced tool use, reasoning, and code synthesis. Kimi K2 excels across a broad range of benchmarks, particularly in coding (LiveCodeBench, SWE-bench), reasoning (ZebraLogic, GPQA), and tool-use (Tau2, AceBench) tasks. It supports long-context inference up to 128K tokens and is designed with a novel training stack that includes the MuonClip optimizer for stable large-scale MoE training.", - "context_length": 32768, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Other", - "instruct_type": null - }, - "pricing": { - "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 32768, - "max_completion_tokens": null, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "seed", - "stop", - "temperature" - ], - "default_parameters": {}, - "expiration_date": null - }, { "id": "moonshotai/kimi-k2", "canonical_slug": "moonshotai/kimi-k2", @@ -8147,11 +7927,7 @@ }, "pricing": { "prompt": "0.0000005", - "completion": "0.0000024", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000024" }, "top_provider": { "context_length": 131072, @@ -8295,11 +8071,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 32768, @@ -8344,10 +8116,7 @@ "pricing": { "prompt": "0.000003", "completion": "0.000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.005", "input_cache_read": "0.00000075" }, "top_provider": { @@ -8436,11 +8205,7 @@ }, "pricing": { "prompt": "0.00000014", - "completion": "0.00000057", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000057" }, "top_provider": { "context_length": 131072, @@ -8482,11 +8247,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 163840, @@ -8531,11 +8292,7 @@ }, "pricing": { "prompt": "0.00000025", - "completion": "0.00000085", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000085" }, "top_provider": { "context_length": 163840, @@ -8584,11 +8341,7 @@ }, "pricing": { "prompt": "0.0000009", - "completion": "0.0000019", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000019" }, "top_provider": { "context_length": 262144, @@ -8629,11 +8382,7 @@ }, "pricing": { "prompt": "0.0000008", - "completion": "0.0000012", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000012" }, "top_provider": { "context_length": 81920, @@ -8675,11 +8424,7 @@ }, "pricing": { "prompt": "0.00000042", - "completion": "0.00000125", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000125" }, "top_provider": { "context_length": 123000, @@ -8724,11 +8469,7 @@ }, "pricing": { "prompt": "0.00000028", - "completion": "0.0000011", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000011" }, "top_provider": { "context_length": 123000, @@ -8823,11 +8564,7 @@ }, "pricing": { "prompt": "0.00000006", - "completion": "0.00000018", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000018" }, "top_provider": { "context_length": 131072, @@ -8878,11 +8615,7 @@ }, "pricing": { "prompt": "0.0000004", - "completion": "0.0000022", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000022" }, "top_provider": { "context_length": 1000000, @@ -9045,11 +8778,7 @@ }, "pricing": { "prompt": "0.00000029", - "completion": "0.00000115", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000115" }, "top_provider": { "context_length": 131072, @@ -9068,7 +8797,7 @@ "top_p" ], "default_parameters": {}, - "expiration_date": null + "expiration_date": "2026-02-08" }, { "id": "openai/o3-pro", @@ -9141,10 +8870,7 @@ "pricing": { "prompt": "0.0000003", "completion": "0.0000005", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.005", "input_cache_read": "0.000000075" }, "top_provider": { @@ -9197,10 +8923,7 @@ "pricing": { "prompt": "0.000003", "completion": "0.000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.005", "input_cache_read": "0.00000075" }, "top_provider": { @@ -9346,11 +9069,7 @@ }, "pricing": { "prompt": "0.0000004", - "completion": "0.00000175", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000175" }, "top_provider": { "context_length": 163840, @@ -9410,10 +9129,7 @@ "pricing": { "prompt": "0.000015", "completion": "0.000075", - "request": "0", - "image": "0.024", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.01", "input_cache_read": "0.0000015", "input_cache_write": "0.00001875" }, @@ -9465,10 +9181,7 @@ "pricing": { "prompt": "0.000003", "completion": "0.000015", - "request": "0", - "image": "0.0048", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.01", "input_cache_read": "0.0000003", "input_cache_write": "0.00000375" }, @@ -9559,11 +9272,7 @@ }, "pricing": { "prompt": "0.00000002", - "completion": "0.00000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000004" }, "top_provider": { "context_length": 32768, @@ -9607,11 +9316,7 @@ }, "pricing": { "prompt": "0.00000002", - "completion": "0.0000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000001" }, "top_provider": { "context_length": 32768, @@ -9767,11 +9472,7 @@ }, "pricing": { "prompt": "0.00000018", - "completion": "0.00000018", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000018" }, "top_provider": { "context_length": 131072, @@ -9815,11 +9516,7 @@ }, "pricing": { "prompt": "0.0000009", - "completion": "0.0000033", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000033" }, "top_provider": { "context_length": 131072, @@ -9863,11 +9560,7 @@ }, "pricing": { "prompt": "0.00000075", - "completion": "0.0000012", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000012" }, "top_provider": { "context_length": 131072, @@ -9913,11 +9606,7 @@ }, "pricing": { "prompt": "0.0000005", - "completion": "0.0000008", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000008" }, "top_provider": { "context_length": 32768, @@ -10010,11 +9699,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 40960, @@ -10062,11 +9747,7 @@ }, "pricing": { "prompt": "0.00000018", - "completion": "0.00000018", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000018" }, "top_provider": { "context_length": 163840, @@ -10112,11 +9793,7 @@ }, "pricing": { "prompt": "0.00000006", - "completion": "0.00000022", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000022" }, "top_provider": { "context_length": 40960, @@ -10170,12 +9847,8 @@ }, "pricing": { "prompt": "0.00000005", - "completion": "0.00000025", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0" + "completion": "0.0000004", + "input_cache_read": "0.00000005" }, "top_provider": { "context_length": 32000, @@ -10230,11 +9903,7 @@ }, "pricing": { "prompt": "0.00000005", - "completion": "0.00000022", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000022" }, "top_provider": { "context_length": 40960, @@ -10284,11 +9953,7 @@ }, "pricing": { "prompt": "0.00000008", - "completion": "0.00000024", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000024" }, "top_provider": { "context_length": 40960, @@ -10340,11 +10005,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.0000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000006" }, "top_provider": { "context_length": 40960, @@ -10397,11 +10058,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 163840, @@ -10446,11 +10103,7 @@ }, "pricing": { "prompt": "0.0000003", - "completion": "0.0000012", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000012" }, "top_provider": { "context_length": 163840, @@ -10639,11 +10292,7 @@ }, "pricing": { "prompt": "0.00000003", - "completion": "0.00000009", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000009" }, "top_provider": { "context_length": 32768, @@ -10913,10 +10562,7 @@ "pricing": { "prompt": "0.0000003", "completion": "0.0000005", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.005", "input_cache_read": "0.000000075" }, "top_provider": { @@ -10968,10 +10614,7 @@ "pricing": { "prompt": "0.000003", "completion": "0.000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.005", "input_cache_read": "0.00000075" }, "top_provider": { @@ -11018,11 +10661,7 @@ }, "pricing": { "prompt": "0.0000006", - "completion": "0.0000018", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000018" }, "top_provider": { "context_length": 131072, @@ -11068,11 +10707,7 @@ }, "pricing": { "prompt": "0.00000015", - "completion": "0.0000006", - "request": "0", - "image": "0.0006684", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000006" }, "top_provider": { "context_length": 1048576, @@ -11122,11 +10757,7 @@ }, "pricing": { "prompt": "0.00000008", - "completion": "0.0000003", - "request": "0", - "image": "0.0003342", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000003" }, "top_provider": { "context_length": 327680, @@ -11176,11 +10807,7 @@ }, "pricing": { "prompt": "0.00000005", - "completion": "0.00000022", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000022" }, "top_provider": { "context_length": 16384, @@ -11229,11 +10856,7 @@ }, "pricing": { "prompt": "0.00000019", - "completion": "0.00000087", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000087" }, "top_provider": { "context_length": 163840, @@ -11328,11 +10951,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 128000, @@ -11380,11 +10999,7 @@ }, "pricing": { "prompt": "0.00000003", - "completion": "0.00000011", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000011" }, "top_provider": { "context_length": 131072, @@ -11508,11 +11123,7 @@ }, "pricing": { "prompt": "0.00000001703012", - "completion": "0.0000000681536", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000000681536" }, "top_provider": { "context_length": 96000, @@ -11598,11 +11209,7 @@ }, "pricing": { "prompt": "0.00000003", - "completion": "0.0000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000001" }, "top_provider": { "context_length": 131072, @@ -11649,11 +11256,7 @@ }, "pricing": { "prompt": "0.0000025", - "completion": "0.00001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00001" }, "top_provider": { "context_length": 256000, @@ -11780,15 +11383,12 @@ }, "top_provider": { "context_length": 131072, - "max_completion_tokens": null, + "max_completion_tokens": 8192, "is_moderated": false }, "per_request_limits": null, "supported_parameters": [ - "frequency_penalty", "max_tokens", - "presence_penalty", - "repetition_penalty", "response_format", "seed", "stop", @@ -11826,11 +11426,7 @@ }, "pricing": { "prompt": "0.00000004", - "completion": "0.00000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000015" }, "top_provider": { "context_length": 96000, @@ -11883,11 +11479,7 @@ }, "pricing": { "prompt": "0.00000055", - "completion": "0.0000008", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000008" }, "top_provider": { "context_length": 32768, @@ -12076,7 +11668,7 @@ }, "top_provider": { "context_length": 32768, - "max_completion_tokens": null, + "max_completion_tokens": 32768, "is_moderated": false }, "per_request_limits": null, @@ -12180,10 +11772,7 @@ "pricing": { "prompt": "0.000003", "completion": "0.000015", - "request": "0", - "image": "0.0048", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.01", "input_cache_read": "0.0000003", "input_cache_write": "0.00000375" }, @@ -12234,10 +11823,7 @@ "pricing": { "prompt": "0.000003", "completion": "0.000015", - "request": "0", - "image": "0.0048", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.01", "input_cache_read": "0.0000003", "input_cache_write": "0.00000375" }, @@ -12333,11 +11919,7 @@ }, "pricing": { "prompt": "0.00000002", - "completion": "0.00000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000006" }, "top_provider": { "context_length": 131072, @@ -12740,11 +12322,7 @@ }, "pricing": { "prompt": "0.00000015", - "completion": "0.0000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000006" }, "top_provider": { "context_length": 32768, @@ -12768,7 +12346,7 @@ "top_p" ], "default_parameters": {}, - "expiration_date": null + "expiration_date": "2026-02-16" }, { "id": "qwen/qwen-plus", @@ -12927,11 +12505,7 @@ }, "pricing": { "prompt": "0.00000003", - "completion": "0.00000011", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000011" }, "top_provider": { "context_length": 32768, @@ -12988,7 +12562,7 @@ }, "top_provider": { "context_length": 32768, - "max_completion_tokens": null, + "max_completion_tokens": 32768, "is_moderated": false }, "per_request_limits": null, @@ -13077,11 +12651,7 @@ }, "pricing": { "prompt": "0.00000003", - "completion": "0.00000011", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000011" }, "top_provider": { "context_length": 131072, @@ -13103,8 +12673,6 @@ "stop", "structured_outputs", "temperature", - "tool_choice", - "tools", "top_k", "top_p" ], @@ -13132,11 +12700,7 @@ }, "pricing": { "prompt": "0.0000007", - "completion": "0.0000025", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000025" }, "top_provider": { "context_length": 64000, @@ -13184,11 +12748,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.0000011", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000011" }, "top_provider": { "context_length": 1000192, @@ -13229,7 +12789,7 @@ }, "top_provider": { "context_length": 16384, - "max_completion_tokens": null, + "max_completion_tokens": 16384, "is_moderated": false }, "per_request_limits": null, @@ -13271,11 +12831,7 @@ }, "pricing": { "prompt": "0.000003", - "completion": "0.000003", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.000003" }, "top_provider": { "context_length": 16000, @@ -13320,11 +12876,7 @@ }, "pricing": { "prompt": "0.0000003", - "completion": "0.0000012", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000012" }, "top_provider": { "context_length": 163840, @@ -13461,11 +13013,7 @@ }, "pricing": { "prompt": "0.0000000375", - "completion": "0.00000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000015" }, "top_provider": { "context_length": 128000, @@ -13488,49 +13036,6 @@ "default_parameters": {}, "expiration_date": null }, - { - "id": "google/gemini-2.0-flash-exp:free", - "canonical_slug": "google/gemini-2.0-flash-exp", - "hugging_face_id": "", - "name": "Google: Gemini 2.0 Flash Experimental (free)", - "created": 1733937523, - "description": "Gemini Flash 2.0 offers a significantly faster time to first token (TTFT) compared to [Gemini Flash 1.5](/google/gemini-flash-1.5), while maintaining quality on par with larger models like [Gemini Pro 1.5](/google/gemini-pro-1.5). It introduces notable enhancements in multimodal understanding, coding capabilities, complex instruction following, and function calling. These advancements come together to deliver more seamless and robust agentic experiences.", - "context_length": 1048576, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Gemini", - "instruct_type": null - }, - "pricing": { - "prompt": "0", - "completion": "0" - }, - "top_provider": { - "context_length": 1048576, - "max_completion_tokens": 8192, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "response_format", - "seed", - "stop", - "temperature", - "tool_choice", - "tools", - "top_p" - ], - "default_parameters": {}, - "expiration_date": "2026-03-03" - }, { "id": "meta-llama/llama-3.3-70b-instruct:free", "canonical_slug": "meta-llama/llama-3.3-70b-instruct", @@ -13538,7 +13043,7 @@ "name": "Meta: Llama 3.3 70B Instruct (free)", "created": 1733506137, "description": "The Meta Llama 3.3 multilingual large language model (LLM) is a pretrained and instruction tuned generative model in 70B (text in/text out). The Llama 3.3 instruction tuned text only model is optimized for multilingual dialogue use cases and outperforms many of the available open source and closed chat models on common industry benchmarks.\n\nSupported languages: English, German, French, Italian, Portuguese, Hindi, Spanish, and Thai.\n\n[Model Card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_3/MODEL_CARD.md)", - "context_length": 131072, + "context_length": 128000, "architecture": { "modality": "text->text", "input_modalities": [ @@ -13555,16 +13060,15 @@ "completion": "0" }, "top_provider": { - "context_length": 131072, - "max_completion_tokens": null, - "is_moderated": false + "context_length": 128000, + "max_completion_tokens": 128000, + "is_moderated": true }, "per_request_limits": null, "supported_parameters": [ "frequency_penalty", "max_tokens", "presence_penalty", - "repetition_penalty", "seed", "stop", "temperature", @@ -13597,11 +13101,7 @@ }, "pricing": { "prompt": "0.0000001", - "completion": "0.00000032", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000032" }, "top_provider": { "context_length": 131072, @@ -13653,11 +13153,7 @@ }, "pricing": { "prompt": "0.00000006", - "completion": "0.00000024", - "request": "0", - "image": "0.00009", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000024" }, "top_provider": { "context_length": 300000, @@ -13697,11 +13193,7 @@ }, "pricing": { "prompt": "0.000000035", - "completion": "0.00000014", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000014" }, "top_provider": { "context_length": 128000, @@ -13742,11 +13234,7 @@ }, "pricing": { "prompt": "0.0000008", - "completion": "0.0000032", - "request": "0", - "image": "0.0012", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000032" }, "top_provider": { "context_length": 300000, @@ -13980,11 +13468,7 @@ }, "pricing": { "prompt": "0.00000003", - "completion": "0.00000011", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000011" }, "top_provider": { "context_length": 32768, @@ -14031,11 +13515,7 @@ }, "pricing": { "prompt": "0.0000045", - "completion": "0.0000045", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000045" }, "top_provider": { "context_length": 16000, @@ -14084,7 +13564,7 @@ }, "top_provider": { "context_length": 32768, - "max_completion_tokens": null, + "max_completion_tokens": 32768, "is_moderated": false }, "per_request_limits": null, @@ -14126,10 +13606,7 @@ "pricing": { "prompt": "0.0000008", "completion": "0.000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", + "web_search": "0.01", "input_cache_read": "0.00000008", "input_cache_write": "0.000001" }, @@ -14176,11 +13653,7 @@ }, "pricing": { "prompt": "0.000003", - "completion": "0.000005", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.000005" }, "top_provider": { "context_length": 16384, @@ -14231,11 +13704,7 @@ }, "pricing": { "prompt": "0.000006", - "completion": "0.00003", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00003" }, "top_provider": { "context_length": 200000, @@ -14255,53 +13724,6 @@ "default_parameters": {}, "expiration_date": null }, - { - "id": "mistralai/ministral-3b", - "canonical_slug": "mistralai/ministral-3b", - "hugging_face_id": null, - "name": "Mistral: Ministral 3B", - "created": 1729123200, - "description": "Ministral 3B is a 3B parameter model optimized for on-device and edge computing. It excels in knowledge, commonsense reasoning, and function-calling, outperforming larger models like Mistral 7B on most benchmarks. Supporting up to 128k context length, it’s ideal for orchestrating agentic workflows and specialist tasks with efficient inference.", - "context_length": 131072, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Mistral", - "instruct_type": null - }, - "pricing": { - "prompt": "0.00000004", - "completion": "0.00000004" - }, - "top_provider": { - "context_length": 131072, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "max_tokens", - "presence_penalty", - "response_format", - "seed", - "stop", - "structured_outputs", - "temperature", - "tool_choice", - "tools", - "top_p" - ], - "default_parameters": { - "temperature": 0.3 - }, - "expiration_date": null - }, { "id": "mistralai/ministral-8b", "canonical_slug": "mistralai/ministral-8b", @@ -14349,6 +13771,53 @@ }, "expiration_date": null }, + { + "id": "mistralai/ministral-3b", + "canonical_slug": "mistralai/ministral-3b", + "hugging_face_id": null, + "name": "Mistral: Ministral 3B", + "created": 1729123200, + "description": "Ministral 3B is a 3B parameter model optimized for on-device and edge computing. It excels in knowledge, commonsense reasoning, and function-calling, outperforming larger models like Mistral 7B on most benchmarks. Supporting up to 128k context length, it’s ideal for orchestrating agentic workflows and specialist tasks with efficient inference.", + "context_length": 131072, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Mistral", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000004", + "completion": "0.00000004" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "max_tokens", + "presence_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_p" + ], + "default_parameters": { + "temperature": 0.3 + }, + "expiration_date": null + }, { "id": "qwen/qwen-2.5-7b-instruct", "canonical_slug": "qwen/qwen-2.5-7b-instruct", @@ -14370,11 +13839,7 @@ }, "pricing": { "prompt": "0.00000004", - "completion": "0.0000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000001" }, "top_provider": { "context_length": 32768, @@ -14425,11 +13890,7 @@ }, "pricing": { "prompt": "0.0000012", - "completion": "0.0000012", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000012" }, "top_provider": { "context_length": 131072, @@ -14476,11 +13937,7 @@ }, "pricing": { "prompt": "0.0000025", - "completion": "0.00001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00001" }, "top_provider": { "context_length": 8000, @@ -14518,11 +13975,7 @@ }, "pricing": { "prompt": "0.0000025", - "completion": "0.00001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00001" }, "top_provider": { "context_length": 8000, @@ -14564,7 +14017,7 @@ }, "top_provider": { "context_length": 32768, - "max_completion_tokens": null, + "max_completion_tokens": 32768, "is_moderated": false }, "per_request_limits": null, @@ -14588,52 +14041,6 @@ "default_parameters": {}, "expiration_date": null }, - { - "id": "meta-llama/llama-3.2-1b-instruct", - "canonical_slug": "meta-llama/llama-3.2-1b-instruct", - "hugging_face_id": "meta-llama/Llama-3.2-1B-Instruct", - "name": "Meta: Llama 3.2 1B Instruct", - "created": 1727222400, - "description": "Llama 3.2 1B is a 1-billion-parameter language model focused on efficiently performing natural language tasks, such as summarization, dialogue, and multilingual text analysis. Its smaller size allows it to operate efficiently in low-resource environments while maintaining strong task performance.\n\nSupporting eight core languages and fine-tunable for more, Llama 1.3B is ideal for businesses or developers seeking lightweight yet powerful AI solutions that can operate in diverse multilingual settings without the high computational demand of larger models.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).", - "context_length": 60000, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0.000000027", - "completion": "0.0000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 60000, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "max_tokens", - "presence_penalty", - "repetition_penalty", - "seed", - "temperature", - "top_k", - "top_p" - ], - "default_parameters": {}, - "expiration_date": null - }, { "id": "meta-llama/llama-3.2-3b-instruct:free", "canonical_slug": "meta-llama/llama-3.2-3b-instruct", @@ -14655,11 +14062,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 131072, @@ -14700,11 +14103,7 @@ }, "pricing": { "prompt": "0.00000002", - "completion": "0.00000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000002" }, "top_provider": { "context_length": 131072, @@ -14729,6 +14128,48 @@ "default_parameters": {}, "expiration_date": null }, + { + "id": "meta-llama/llama-3.2-1b-instruct", + "canonical_slug": "meta-llama/llama-3.2-1b-instruct", + "hugging_face_id": "meta-llama/Llama-3.2-1B-Instruct", + "name": "Meta: Llama 3.2 1B Instruct", + "created": 1727222400, + "description": "Llama 3.2 1B is a 1-billion-parameter language model focused on efficiently performing natural language tasks, such as summarization, dialogue, and multilingual text analysis. Its smaller size allows it to operate efficiently in low-resource environments while maintaining strong task performance.\n\nSupporting eight core languages and fine-tunable for more, Llama 1.3B is ideal for businesses or developers seeking lightweight yet powerful AI solutions that can operate in diverse multilingual settings without the high computational demand of larger models.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).", + "context_length": 60000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.000000027", + "completion": "0.0000002" + }, + "top_provider": { + "context_length": 60000, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "max_tokens", + "presence_penalty", + "repetition_penalty", + "seed", + "temperature", + "top_k", + "top_p" + ], + "default_parameters": {}, + "expiration_date": null + }, { "id": "meta-llama/llama-3.2-11b-vision-instruct", "canonical_slug": "meta-llama/llama-3.2-11b-vision-instruct", @@ -14751,11 +14192,7 @@ }, "pricing": { "prompt": "0.000000049", - "completion": "0.000000049", - "request": "0", - "image": "0.00007948", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.000000049" }, "top_provider": { "context_length": 131072, @@ -14801,11 +14238,7 @@ }, "pricing": { "prompt": "0.00000012", - "completion": "0.00000039", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000039" }, "top_provider": { "context_length": 32768, @@ -14858,7 +14291,7 @@ }, "top_provider": { "context_length": 32768, - "max_completion_tokens": null, + "max_completion_tokens": 4096, "is_moderated": false }, "per_request_limits": null, @@ -14897,11 +14330,7 @@ }, "pricing": { "prompt": "0.0000001", - "completion": "0.0000001", - "request": "0", - "image": "0.0001445", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000001" }, "top_provider": { "context_length": 32768, @@ -14931,56 +14360,6 @@ }, "expiration_date": null }, - { - "id": "cohere/command-r-plus-08-2024", - "canonical_slug": "cohere/command-r-plus-08-2024", - "hugging_face_id": null, - "name": "Cohere: Command R+ (08-2024)", - "created": 1724976000, - "description": "command-r-plus-08-2024 is an update of the [Command R+](/models/cohere/command-r-plus) with roughly 50% higher throughput and 25% lower latencies as compared to the previous Command R+ version, while keeping the hardware footprint the same.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", - "context_length": 128000, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Cohere", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000025", - "completion": "0.00001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 128000, - "max_completion_tokens": 4000, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "max_tokens", - "presence_penalty", - "response_format", - "seed", - "stop", - "structured_outputs", - "temperature", - "tool_choice", - "tools", - "top_k", - "top_p" - ], - "default_parameters": {}, - "expiration_date": null - }, { "id": "cohere/command-r-08-2024", "canonical_slug": "cohere/command-r-08-2024", @@ -15002,11 +14381,7 @@ }, "pricing": { "prompt": "0.00000015", - "completion": "0.0000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000006" }, "top_provider": { "context_length": 128000, @@ -15032,89 +14407,45 @@ "expiration_date": null }, { - "id": "qwen/qwen-2.5-vl-7b-instruct:free", - "canonical_slug": "qwen/qwen-2-vl-7b-instruct", - "hugging_face_id": "Qwen/Qwen2.5-VL-7B-Instruct", - "name": "Qwen: Qwen2.5-VL 7B Instruct (free)", - "created": 1724803200, - "description": "Qwen2.5 VL 7B is a multimodal LLM from the Qwen Team with the following key enhancements:\n\n- SoTA understanding of images of various resolution & ratio: Qwen2.5-VL achieves state-of-the-art performance on visual understanding benchmarks, including MathVista, DocVQA, RealWorldQA, MTVQA, etc.\n\n- Understanding videos of 20min+: Qwen2.5-VL can understand videos over 20 minutes for high-quality video-based question answering, dialog, content creation, etc.\n\n- Agent that can operate your mobiles, robots, etc.: with the abilities of complex reasoning and decision making, Qwen2.5-VL can be integrated with devices like mobile phones, robots, etc., for automatic operation based on visual environment and text instructions.\n\n- Multilingual Support: to serve global users, besides English and Chinese, Qwen2.5-VL now supports the understanding of texts in different languages inside images, including most European languages, Japanese, Korean, Arabic, Vietnamese, etc.\n\nFor more details, see this [blog post](https://qwenlm.github.io/blog/qwen2-vl/) and [GitHub repo](https://github.com/QwenLM/Qwen2-VL).\n\nUsage of this model is subject to [Tongyi Qianwen LICENSE AGREEMENT](https://huggingface.co/Qwen/Qwen1.5-110B-Chat/blob/main/LICENSE).", - "context_length": 32768, + "id": "cohere/command-r-plus-08-2024", + "canonical_slug": "cohere/command-r-plus-08-2024", + "hugging_face_id": null, + "name": "Cohere: Command R+ (08-2024)", + "created": 1724976000, + "description": "command-r-plus-08-2024 is an update of the [Command R+](/models/cohere/command-r-plus) with roughly 50% higher throughput and 25% lower latencies as compared to the previous Command R+ version, while keeping the hardware footprint the same.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", + "context_length": 128000, "architecture": { - "modality": "text+image->text", + "modality": "text->text", "input_modalities": [ - "text", - "image" + "text" ], "output_modalities": [ "text" ], - "tokenizer": "Qwen", + "tokenizer": "Cohere", "instruct_type": null }, "pricing": { - "prompt": "0", - "completion": "0" + "prompt": "0.0000025", + "completion": "0.00001" }, "top_provider": { - "context_length": 32768, - "max_completion_tokens": null, - "is_moderated": false + "context_length": 128000, + "max_completion_tokens": 4000, + "is_moderated": true }, "per_request_limits": null, "supported_parameters": [ "frequency_penalty", "max_tokens", "presence_penalty", - "repetition_penalty", - "temperature" - ], - "default_parameters": {}, - "expiration_date": null - }, - { - "id": "qwen/qwen-2.5-vl-7b-instruct", - "canonical_slug": "qwen/qwen-2-vl-7b-instruct", - "hugging_face_id": "Qwen/Qwen2.5-VL-7B-Instruct", - "name": "Qwen: Qwen2.5-VL 7B Instruct", - "created": 1724803200, - "description": "Qwen2.5 VL 7B is a multimodal LLM from the Qwen Team with the following key enhancements:\n\n- SoTA understanding of images of various resolution & ratio: Qwen2.5-VL achieves state-of-the-art performance on visual understanding benchmarks, including MathVista, DocVQA, RealWorldQA, MTVQA, etc.\n\n- Understanding videos of 20min+: Qwen2.5-VL can understand videos over 20 minutes for high-quality video-based question answering, dialog, content creation, etc.\n\n- Agent that can operate your mobiles, robots, etc.: with the abilities of complex reasoning and decision making, Qwen2.5-VL can be integrated with devices like mobile phones, robots, etc., for automatic operation based on visual environment and text instructions.\n\n- Multilingual Support: to serve global users, besides English and Chinese, Qwen2.5-VL now supports the understanding of texts in different languages inside images, including most European languages, Japanese, Korean, Arabic, Vietnamese, etc.\n\nFor more details, see this [blog post](https://qwenlm.github.io/blog/qwen2-vl/) and [GitHub repo](https://github.com/QwenLM/Qwen2-VL).\n\nUsage of this model is subject to [Tongyi Qianwen LICENSE AGREEMENT](https://huggingface.co/Qwen/Qwen1.5-110B-Chat/blob/main/LICENSE).", - "context_length": 32768, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Qwen", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000002", - "completion": "0.0000002", - "request": "0", - "image": "0.0001445", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 32768, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logit_bias", - "max_tokens", - "min_p", - "presence_penalty", - "repetition_penalty", + "response_format", "seed", "stop", + "structured_outputs", "temperature", + "tool_choice", + "tools", "top_k", "top_p" ], @@ -15146,7 +14477,7 @@ }, "top_provider": { "context_length": 32768, - "max_completion_tokens": null, + "max_completion_tokens": 32768, "is_moderated": false }, "per_request_limits": null, @@ -15169,6 +14500,52 @@ "default_parameters": {}, "expiration_date": null }, + { + "id": "qwen/qwen-2.5-vl-7b-instruct", + "canonical_slug": "qwen/qwen-2-vl-7b-instruct", + "hugging_face_id": "Qwen/Qwen2.5-VL-7B-Instruct", + "name": "Qwen: Qwen2.5-VL 7B Instruct", + "created": 1724803200, + "description": "Qwen2.5 VL 7B is a multimodal LLM from the Qwen Team with the following key enhancements:\n\n- SoTA understanding of images of various resolution & ratio: Qwen2.5-VL achieves state-of-the-art performance on visual understanding benchmarks, including MathVista, DocVQA, RealWorldQA, MTVQA, etc.\n\n- Understanding videos of 20min+: Qwen2.5-VL can understand videos over 20 minutes for high-quality video-based question answering, dialog, content creation, etc.\n\n- Agent that can operate your mobiles, robots, etc.: with the abilities of complex reasoning and decision making, Qwen2.5-VL can be integrated with devices like mobile phones, robots, etc., for automatic operation based on visual environment and text instructions.\n\n- Multilingual Support: to serve global users, besides English and Chinese, Qwen2.5-VL now supports the understanding of texts in different languages inside images, including most European languages, Japanese, Korean, Arabic, Vietnamese, etc.\n\nFor more details, see this [blog post](https://qwenlm.github.io/blog/qwen2-vl/) and [GitHub repo](https://github.com/QwenLM/Qwen2-VL).\n\nUsage of this model is subject to [Tongyi Qianwen LICENSE AGREEMENT](https://huggingface.co/Qwen/Qwen1.5-110B-Chat/blob/main/LICENSE).", + "context_length": 32768, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000002", + "completion": "0.0000002" + }, + "top_provider": { + "context_length": 32768, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "max_tokens", + "min_p", + "presence_penalty", + "repetition_penalty", + "seed", + "stop", + "temperature", + "top_k", + "top_p" + ], + "default_parameters": {}, + "expiration_date": null + }, { "id": "nousresearch/hermes-3-llama-3.1-70b", "canonical_slug": "nousresearch/hermes-3-llama-3.1-70b", @@ -15194,7 +14571,7 @@ }, "top_provider": { "context_length": 65536, - "max_completion_tokens": null, + "max_completion_tokens": 65536, "is_moderated": false }, "per_request_limits": null, @@ -15236,11 +14613,7 @@ }, "pricing": { "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0" }, "top_provider": { "context_length": 131072, @@ -15281,11 +14654,7 @@ }, "pricing": { "prompt": "0.000001", - "completion": "0.000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.000001" }, "top_provider": { "context_length": 131072, @@ -15377,11 +14746,7 @@ }, "pricing": { "prompt": "0.00000004", - "completion": "0.00000005", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000005" }, "top_provider": { "context_length": 8192, @@ -15430,10 +14795,6 @@ "pricing": { "prompt": "0.0000025", "completion": "0.00001", - "request": "0", - "image": "0.003613", - "web_search": "0", - "internal_reasoning": "0", "input_cache_read": "0.00000125" }, "top_provider": { @@ -15483,11 +14844,7 @@ }, "pricing": { "prompt": "0.000004", - "completion": "0.000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.000004" }, "top_provider": { "context_length": 32768, @@ -15511,150 +14868,6 @@ "default_parameters": {}, "expiration_date": null }, - { - "id": "meta-llama/llama-3.1-70b-instruct", - "canonical_slug": "meta-llama/llama-3.1-70b-instruct", - "hugging_face_id": "meta-llama/Meta-Llama-3.1-70B-Instruct", - "name": "Meta: Llama 3.1 70B Instruct", - "created": 1721692800, - "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 70B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", - "context_length": 131072, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0.0000004", - "completion": "0.0000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 131072, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logit_bias", - "max_tokens", - "min_p", - "presence_penalty", - "repetition_penalty", - "response_format", - "seed", - "stop", - "temperature", - "tool_choice", - "tools", - "top_k", - "top_p" - ], - "default_parameters": {}, - "expiration_date": null - }, - { - "id": "meta-llama/llama-3.1-405b-instruct:free", - "canonical_slug": "meta-llama/llama-3.1-405b-instruct", - "hugging_face_id": "meta-llama/Meta-Llama-3.1-405B-Instruct", - "name": "Meta: Llama 3.1 405B Instruct (free)", - "created": 1721692800, - "description": "The highly anticipated 400B class of Llama3 is here! Clocking in at 128k context with impressive eval scores, the Meta AI team continues to push the frontier of open-source LLMs.\n\nMeta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 405B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models including GPT-4o and Claude 3.5 Sonnet in evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", - "context_length": 131072, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0", - "completion": "0" - }, - "top_provider": { - "context_length": 131072, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "max_tokens", - "presence_penalty", - "repetition_penalty", - "temperature" - ], - "default_parameters": {}, - "expiration_date": null - }, - { - "id": "meta-llama/llama-3.1-405b-instruct", - "canonical_slug": "meta-llama/llama-3.1-405b-instruct", - "hugging_face_id": "meta-llama/Meta-Llama-3.1-405B-Instruct", - "name": "Meta: Llama 3.1 405B Instruct", - "created": 1721692800, - "description": "The highly anticipated 400B class of Llama3 is here! Clocking in at 128k context with impressive eval scores, the Meta AI team continues to push the frontier of open-source LLMs.\n\nMeta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 405B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models including GPT-4o and Claude 3.5 Sonnet in evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", - "context_length": 10000, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0.0000035", - "completion": "0.0000035", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 10000, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logit_bias", - "max_tokens", - "min_p", - "presence_penalty", - "repetition_penalty", - "response_format", - "seed", - "stop", - "structured_outputs", - "temperature", - "tool_choice", - "tools", - "top_k", - "top_p" - ], - "default_parameters": {}, - "expiration_date": "2026-02-06" - }, { "id": "meta-llama/llama-3.1-8b-instruct", "canonical_slug": "meta-llama/llama-3.1-8b-instruct", @@ -15676,11 +14889,7 @@ }, "pricing": { "prompt": "0.00000002", - "completion": "0.00000005", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000005" }, "top_provider": { "context_length": 16384, @@ -15710,6 +14919,103 @@ "default_parameters": {}, "expiration_date": null }, + { + "id": "meta-llama/llama-3.1-405b-instruct", + "canonical_slug": "meta-llama/llama-3.1-405b-instruct", + "hugging_face_id": "meta-llama/Meta-Llama-3.1-405B-Instruct", + "name": "Meta: Llama 3.1 405B Instruct", + "created": 1721692800, + "description": "The highly anticipated 400B class of Llama3 is here! Clocking in at 128k context with impressive eval scores, the Meta AI team continues to push the frontier of open-source LLMs.\n\nMeta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 405B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models including GPT-4o and Claude 3.5 Sonnet in evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", + "context_length": 10000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.0000035", + "completion": "0.0000035" + }, + "top_provider": { + "context_length": 10000, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "max_tokens", + "min_p", + "presence_penalty", + "repetition_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_p" + ], + "default_parameters": {}, + "expiration_date": "2026-02-06" + }, + { + "id": "meta-llama/llama-3.1-70b-instruct", + "canonical_slug": "meta-llama/llama-3.1-70b-instruct", + "hugging_face_id": "meta-llama/Meta-Llama-3.1-70B-Instruct", + "name": "Meta: Llama 3.1 70B Instruct", + "created": 1721692800, + "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 70B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", + "context_length": 131072, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.0000004", + "completion": "0.0000004" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "max_tokens", + "min_p", + "presence_penalty", + "repetition_penalty", + "response_format", + "seed", + "stop", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_p" + ], + "default_parameters": {}, + "expiration_date": null + }, { "id": "mistralai/mistral-nemo", "canonical_slug": "mistralai/mistral-nemo", @@ -15731,11 +15037,7 @@ }, "pricing": { "prompt": "0.00000002", - "completion": "0.00000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000004" }, "top_provider": { "context_length": 131072, @@ -15765,10 +15067,10 @@ "expiration_date": null }, { - "id": "openai/gpt-4o-mini", - "canonical_slug": "openai/gpt-4o-mini", + "id": "openai/gpt-4o-mini-2024-07-18", + "canonical_slug": "openai/gpt-4o-mini-2024-07-18", "hugging_face_id": null, - "name": "OpenAI: GPT-4o-mini", + "name": "OpenAI: GPT-4o-mini (2024-07-18)", "created": 1721260800, "description": "GPT-4o mini is OpenAI's newest model after [GPT-4 Omni](/models/openai/gpt-4o), supporting both text and image inputs with text outputs.\n\nAs their most advanced small model, it is many multiples more affordable than other recent frontier models, and more than 60% cheaper than [GPT-3.5 Turbo](/models/openai/gpt-3.5-turbo). It maintains SOTA intelligence, while being significantly more cost-effective.\n\nGPT-4o mini achieves an 82% score on MMLU and presently ranks higher than GPT-4 on chat preferences [common leaderboards](https://arena.lmsys.org/).\n\nCheck out the [launch announcement](https://openai.com/index/gpt-4o-mini-advancing-cost-efficient-intelligence/) to learn more.\n\n#multimodal", "context_length": 128000, @@ -15817,10 +15119,10 @@ "expiration_date": null }, { - "id": "openai/gpt-4o-mini-2024-07-18", - "canonical_slug": "openai/gpt-4o-mini-2024-07-18", + "id": "openai/gpt-4o-mini", + "canonical_slug": "openai/gpt-4o-mini", "hugging_face_id": null, - "name": "OpenAI: GPT-4o-mini (2024-07-18)", + "name": "OpenAI: GPT-4o-mini", "created": 1721260800, "description": "GPT-4o mini is OpenAI's newest model after [GPT-4 Omni](/models/openai/gpt-4o), supporting both text and image inputs with text outputs.\n\nAs their most advanced small model, it is many multiples more affordable than other recent frontier models, and more than 60% cheaper than [GPT-3.5 Turbo](/models/openai/gpt-3.5-turbo). It maintains SOTA intelligence, while being significantly more cost-effective.\n\nGPT-4o mini achieves an 82% score on MMLU and presently ranks higher than GPT-4 on chat preferences [common leaderboards](https://arena.lmsys.org/).\n\nCheck out the [launch announcement](https://openai.com/index/gpt-4o-mini-advancing-cost-efficient-intelligence/) to learn more.\n\n#multimodal", "context_length": 128000, @@ -15893,7 +15195,7 @@ }, "top_provider": { "context_length": 8192, - "max_completion_tokens": null, + "max_completion_tokens": 2048, "is_moderated": false }, "per_request_limits": null, @@ -15931,11 +15233,7 @@ }, "pricing": { "prompt": "0.00000003", - "completion": "0.00000009", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000009" }, "top_provider": { "context_length": 8192, @@ -15976,11 +15274,7 @@ }, "pricing": { "prompt": "0.00000148", - "completion": "0.00000148", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000148" }, "top_provider": { "context_length": 8192, @@ -16004,6 +15298,51 @@ "default_parameters": {}, "expiration_date": null }, + { + "id": "nousresearch/hermes-2-pro-llama-3-8b", + "canonical_slug": "nousresearch/hermes-2-pro-llama-3-8b", + "hugging_face_id": "NousResearch/Hermes-2-Pro-Llama-3-8B", + "name": "NousResearch: Hermes 2 Pro - Llama-3 8B", + "created": 1716768000, + "description": "Hermes 2 Pro is an upgraded, retrained version of Nous Hermes 2, consisting of an updated and cleaned version of the OpenHermes 2.5 Dataset, as well as a newly introduced Function Calling and JSON Mode dataset developed in-house.", + "context_length": 8192, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "chatml" + }, + "pricing": { + "prompt": "0.00000014", + "completion": "0.00000014" + }, + "top_provider": { + "context_length": 8192, + "max_completion_tokens": 8192, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "max_tokens", + "presence_penalty", + "repetition_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "top_k", + "top_p" + ], + "default_parameters": {}, + "expiration_date": null + }, { "id": "mistralai/mistral-7b-instruct", "canonical_slug": "mistralai/mistral-7b-instruct", @@ -16025,11 +15364,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.0000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000002" }, "top_provider": { "context_length": 32768, @@ -16054,55 +15389,6 @@ }, "expiration_date": null }, - { - "id": "nousresearch/hermes-2-pro-llama-3-8b", - "canonical_slug": "nousresearch/hermes-2-pro-llama-3-8b", - "hugging_face_id": "NousResearch/Hermes-2-Pro-Llama-3-8B", - "name": "NousResearch: Hermes 2 Pro - Llama-3 8B", - "created": 1716768000, - "description": "Hermes 2 Pro is an upgraded, retrained version of Nous Hermes 2, consisting of an updated and cleaned version of the OpenHermes 2.5 Dataset, as well as a newly introduced Function Calling and JSON Mode dataset developed in-house.", - "context_length": 8192, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "chatml" - }, - "pricing": { - "prompt": "0.00000014", - "completion": "0.00000014", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 8192, - "max_completion_tokens": 8192, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "max_tokens", - "presence_penalty", - "repetition_penalty", - "response_format", - "seed", - "stop", - "structured_outputs", - "temperature", - "top_k", - "top_p" - ], - "default_parameters": {}, - "expiration_date": null - }, { "id": "mistralai/mistral-7b-instruct-v0.3", "canonical_slug": "mistralai/mistral-7b-instruct-v0.3", @@ -16124,11 +15410,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.0000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000002" }, "top_provider": { "context_length": 32768, @@ -16174,11 +15456,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.0000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000002" }, "top_provider": { "context_length": 8192, @@ -16201,6 +15479,57 @@ "default_parameters": {}, "expiration_date": null }, + { + "id": "openai/gpt-4o-2024-05-13", + "canonical_slug": "openai/gpt-4o-2024-05-13", + "hugging_face_id": null, + "name": "OpenAI: GPT-4o (2024-05-13)", + "created": 1715558400, + "description": "GPT-4o (\"o\" for \"omni\") is OpenAI's latest AI model, supporting both text and image inputs with text outputs. It maintains the intelligence level of [GPT-4 Turbo](/models/openai/gpt-4-turbo) while being twice as fast and 50% more cost-effective. GPT-4o also offers improved performance in processing non-English languages and enhanced visual capabilities.\n\nFor benchmarking against other models, it was briefly called [\"im-also-a-good-gpt2-chatbot\"](https://twitter.com/LiamFedus/status/1790064963966370209)\n\n#multimodal", + "context_length": 128000, + "architecture": { + "modality": "text+image+file->text", + "input_modalities": [ + "text", + "image", + "file" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "GPT", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000005", + "completion": "0.000015" + }, + "top_provider": { + "context_length": 128000, + "max_completion_tokens": 4096, + "is_moderated": true + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "logprobs", + "max_tokens", + "presence_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_logprobs", + "top_p", + "web_search_options" + ], + "default_parameters": {}, + "expiration_date": null + }, { "id": "openai/gpt-4o", "canonical_slug": "openai/gpt-4o", @@ -16305,52 +15634,46 @@ "expiration_date": null }, { - "id": "openai/gpt-4o-2024-05-13", - "canonical_slug": "openai/gpt-4o-2024-05-13", - "hugging_face_id": null, - "name": "OpenAI: GPT-4o (2024-05-13)", - "created": 1715558400, - "description": "GPT-4o (\"o\" for \"omni\") is OpenAI's latest AI model, supporting both text and image inputs with text outputs. It maintains the intelligence level of [GPT-4 Turbo](/models/openai/gpt-4-turbo) while being twice as fast and 50% more cost-effective. GPT-4o also offers improved performance in processing non-English languages and enhanced visual capabilities.\n\nFor benchmarking against other models, it was briefly called [\"im-also-a-good-gpt2-chatbot\"](https://twitter.com/LiamFedus/status/1790064963966370209)\n\n#multimodal", - "context_length": 128000, + "id": "meta-llama/llama-3-70b-instruct", + "canonical_slug": "meta-llama/llama-3-70b-instruct", + "hugging_face_id": "meta-llama/Meta-Llama-3-70B-Instruct", + "name": "Meta: Llama 3 70B Instruct", + "created": 1713398400, + "description": "Meta's latest class of model (Llama 3) launched with a variety of sizes & flavors. This 70B instruct-tuned version was optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", + "context_length": 8192, "architecture": { - "modality": "text+image+file->text", + "modality": "text->text", "input_modalities": [ - "text", - "image", - "file" + "text" ], "output_modalities": [ "text" ], - "tokenizer": "GPT", - "instruct_type": null + "tokenizer": "Llama3", + "instruct_type": "llama3" }, "pricing": { - "prompt": "0.000005", - "completion": "0.000015" + "prompt": "0.00000051", + "completion": "0.00000074" }, "top_provider": { - "context_length": 128000, - "max_completion_tokens": 4096, - "is_moderated": true + "context_length": 8192, + "max_completion_tokens": 8000, + "is_moderated": false }, "per_request_limits": null, "supported_parameters": [ "frequency_penalty", - "logit_bias", - "logprobs", "max_tokens", "presence_penalty", + "repetition_penalty", "response_format", "seed", "stop", "structured_outputs", "temperature", - "tool_choice", - "tools", - "top_logprobs", - "top_p", - "web_search_options" + "top_k", + "top_p" ], "default_parameters": {}, "expiration_date": null @@ -16376,11 +15699,7 @@ }, "pricing": { "prompt": "0.00000003", - "completion": "0.00000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000004" }, "top_provider": { "context_length": 8192, @@ -16407,57 +15726,6 @@ "default_parameters": {}, "expiration_date": null }, - { - "id": "meta-llama/llama-3-70b-instruct", - "canonical_slug": "meta-llama/llama-3-70b-instruct", - "hugging_face_id": "meta-llama/Meta-Llama-3-70B-Instruct", - "name": "Meta: Llama 3 70B Instruct", - "created": 1713398400, - "description": "Meta's latest class of model (Llama 3) launched with a variety of sizes & flavors. This 70B instruct-tuned version was optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", - "context_length": 8192, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0.0000004", - "completion": "0.0000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 8192, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logit_bias", - "max_tokens", - "min_p", - "presence_penalty", - "repetition_penalty", - "response_format", - "seed", - "stop", - "structured_outputs", - "temperature", - "top_k", - "top_p" - ], - "default_parameters": {}, - "expiration_date": null - }, { "id": "mistralai/mixtral-8x22b-instruct", "canonical_slug": "mistralai/mixtral-8x22b-instruct", @@ -16526,11 +15794,7 @@ }, "pricing": { "prompt": "0.00000048", - "completion": "0.00000048", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000048" }, "top_provider": { "context_length": 65536, @@ -16715,11 +15979,7 @@ }, "pricing": { "prompt": "0.000001", - "completion": "0.000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.000002" }, "top_provider": { "context_length": 4095, @@ -16862,11 +16122,7 @@ }, "pricing": { "prompt": "0.0000002", - "completion": "0.0000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000002" }, "top_provider": { "context_length": 32768, @@ -16912,11 +16168,7 @@ }, "pricing": { "prompt": "0.00000054", - "completion": "0.00000054", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000054" }, "top_provider": { "context_length": 32768, @@ -16970,26 +16222,18 @@ }, "top_provider": { "context_length": 4096, - "max_completion_tokens": null, + "max_completion_tokens": 2048, "is_moderated": false }, "per_request_limits": null, "supported_parameters": [ "frequency_penalty", - "logit_bias", - "logprobs", "max_tokens", - "min_p", "presence_penalty", - "repetition_penalty", "response_format", - "seed", "stop", "structured_outputs", "temperature", - "top_a", - "top_k", - "top_logprobs", "top_p" ], "default_parameters": {}, @@ -17016,11 +16260,7 @@ }, "pricing": { "prompt": "0.00000375", - "completion": "0.0000075", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.0000075" }, "top_provider": { "context_length": 6144, @@ -17054,15 +16294,20 @@ "hugging_face_id": null, "name": "Auto Router", "created": 1699401600, - "description": "Your prompt will be processed by a meta-model and routed to one of dozens of models (see below), optimizing for the best possible output.\n\nTo see which model was used, visit [Activity](/activity), or read the `model` attribute of the response. Your response will be priced at the same rate as the routed model.\n\nLearn more, including how to customize the models for routing, in our [docs](/docs/guides/routing/routers/auto-router).\n\nRequests will be routed to the following models:\n- [openai/gpt-5.1](/openai/gpt-5.1)\n- [openai/gpt-5](/openai/gpt-5)\n- [openai/gpt-5-mini](/openai/gpt-5-mini)\n- [openai/gpt-5-nano](/openai/gpt-5-nano)\n- [openai/gpt-4.1](/openai/gpt-4.1)\n- [openai/gpt-4.1-mini](/openai/gpt-4.1-mini)\n- [openai/gpt-4.1-nano](/openai/gpt-4.1-nano)\n- [openai/gpt-4o](/openai/gpt-4o)\n- [openai/gpt-4o-2024-05-13](/openai/gpt-4o-2024-05-13)\n- [openai/gpt-4o-2024-08-06](/openai/gpt-4o-2024-08-06)\n- [openai/gpt-4o-2024-11-20](/openai/gpt-4o-2024-11-20)\n- [openai/gpt-4o-mini](/openai/gpt-4o-mini)\n- [openai/gpt-4o-mini-2024-07-18](/openai/gpt-4o-mini-2024-07-18)\n- [openai/gpt-4-turbo](/openai/gpt-4-turbo)\n- [openai/gpt-4-turbo-preview](/openai/gpt-4-turbo-preview)\n- [openai/gpt-4-1106-preview](/openai/gpt-4-1106-preview)\n- [openai/gpt-4](/openai/gpt-4)\n- [openai/gpt-3.5-turbo](/openai/gpt-3.5-turbo)\n- [openai/gpt-oss-120b](/openai/gpt-oss-120b)\n- [anthropic/claude-opus-4.5](/anthropic/claude-opus-4.5)\n- [anthropic/claude-opus-4.1](/anthropic/claude-opus-4.1)\n- [anthropic/claude-opus-4](/anthropic/claude-opus-4)\n- [anthropic/claude-sonnet-4.5](/anthropic/claude-sonnet-4.5)\n- [anthropic/claude-sonnet-4](/anthropic/claude-sonnet-4)\n- [anthropic/claude-3.7-sonnet](/anthropic/claude-3.7-sonnet)\n- [anthropic/claude-haiku-4.5](/anthropic/claude-haiku-4.5)\n- [anthropic/claude-3.5-haiku](/anthropic/claude-3.5-haiku)\n- [anthropic/claude-3-haiku](/anthropic/claude-3-haiku)\n- [google/gemini-3-pro-preview](/google/gemini-3-pro-preview)\n- [google/gemini-2.5-pro](/google/gemini-2.5-pro)\n- [google/gemini-2.0-flash-001](/google/gemini-2.0-flash-001)\n- [google/gemini-2.5-flash](/google/gemini-2.5-flash)\n- [mistralai/mistral-large](/mistralai/mistral-large)\n- [mistralai/mistral-large-2407](/mistralai/mistral-large-2407)\n- [mistralai/mistral-large-2411](/mistralai/mistral-large-2411)\n- [mistralai/mistral-medium-3.1](/mistralai/mistral-medium-3.1)\n- [mistralai/mistral-nemo](/mistralai/mistral-nemo)\n- [mistralai/mistral-7b-instruct](/mistralai/mistral-7b-instruct)\n- [mistralai/mixtral-8x7b-instruct](/mistralai/mixtral-8x7b-instruct)\n- [mistralai/mixtral-8x22b-instruct](/mistralai/mixtral-8x22b-instruct)\n- [mistralai/codestral-2508](/mistralai/codestral-2508)\n- [x-ai/grok-4](/x-ai/grok-4)\n- [x-ai/grok-3](/x-ai/grok-3)\n- [x-ai/grok-3-mini](/x-ai/grok-3-mini)\n- [deepseek/deepseek-r1](/deepseek/deepseek-r1)\n- [meta-llama/llama-3.3-70b-instruct](/meta-llama/llama-3.3-70b-instruct)\n- [meta-llama/llama-3.1-405b-instruct](/meta-llama/llama-3.1-405b-instruct)\n- [meta-llama/llama-3.1-70b-instruct](/meta-llama/llama-3.1-70b-instruct)\n- [meta-llama/llama-3.1-8b-instruct](/meta-llama/llama-3.1-8b-instruct)\n- [meta-llama/llama-3-70b-instruct](/meta-llama/llama-3-70b-instruct)\n- [meta-llama/llama-3-8b-instruct](/meta-llama/llama-3-8b-instruct)\n- [qwen/qwen3-235b-a22b](/qwen/qwen3-235b-a22b)\n- [qwen/qwen3-32b](/qwen/qwen3-32b)\n- [qwen/qwen3-14b](/qwen/qwen3-14b)\n- [cohere/command-r-plus-08-2024](/cohere/command-r-plus-08-2024)\n- [cohere/command-r-08-2024](/cohere/command-r-08-2024)\n- [moonshotai/kimi-k2-thinking](/moonshotai/kimi-k2-thinking)\n- [perplexity/sonar](/perplexity/sonar)", + "description": "Your prompt will be processed by a meta-model and routed to one of dozens of models (see below), optimizing for the best possible output.\n\nTo see which model was used, visit [Activity](/activity), or read the `model` attribute of the response. Your response will be priced at the same rate as the routed model.\n\nLearn more, including how to customize the models for routing, in our [docs](/docs/guides/routing/routers/auto-router).\n\nRequests will be routed to the following models:\n- [openai/gpt-5.2](/openai/gpt-5.2)\n- [openai/gpt-5.2-pro](/openai/gpt-5.2-pro)\n- [openai/gpt-5.1](/openai/gpt-5.1)\n- [openai/gpt-5](/openai/gpt-5)\n- [openai/gpt-5-mini](/openai/gpt-5-mini)\n- [openai/gpt-5-nano](/openai/gpt-5-nano)\n- [openai/gpt-4.1](/openai/gpt-4.1)\n- [openai/gpt-4.1-mini](/openai/gpt-4.1-mini)\n- [openai/gpt-4.1-nano](/openai/gpt-4.1-nano)\n- [openai/gpt-oss-120b](/openai/gpt-oss-120b)\n- [anthropic/claude-opus-4.5](/anthropic/claude-opus-4.5)\n- [anthropic/claude-sonnet-4.5](/anthropic/claude-sonnet-4.5)\n- [anthropic/claude-haiku-4.5](/anthropic/claude-haiku-4.5)\n- [google/gemini-3-pro-preview](/google/gemini-3-pro-preview)\n- [google/gemini-2.5-pro](/google/gemini-2.5-pro)\n- [google/gemini-2.5-flash](/google/gemini-2.5-flash)\n- [mistralai/mistral-large](/mistralai/mistral-large)\n- [mistralai/mistral-large-2407](/mistralai/mistral-large-2407)\n- [mistralai/mistral-large-2411](/mistralai/mistral-large-2411)\n- [mistralai/mistral-medium-3.1](/mistralai/mistral-medium-3.1)\n- [mistralai/mistral-nemo](/mistralai/mistral-nemo)\n- [mistralai/mistral-7b-instruct](/mistralai/mistral-7b-instruct)\n- [mistralai/mixtral-8x7b-instruct](/mistralai/mixtral-8x7b-instruct)\n- [mistralai/mixtral-8x22b-instruct](/mistralai/mixtral-8x22b-instruct)\n- [mistralai/codestral-2508](/mistralai/codestral-2508)\n- [x-ai/grok-4](/x-ai/grok-4)\n- [x-ai/grok-3](/x-ai/grok-3)\n- [x-ai/grok-3-mini](/x-ai/grok-3-mini)\n- [deepseek/deepseek-r1](/deepseek/deepseek-r1)\n- [meta-llama/llama-3.3-70b-instruct](/meta-llama/llama-3.3-70b-instruct)\n- [meta-llama/llama-3.1-405b-instruct](/meta-llama/llama-3.1-405b-instruct)\n- [meta-llama/llama-3.1-70b-instruct](/meta-llama/llama-3.1-70b-instruct)\n- [meta-llama/llama-3.1-8b-instruct](/meta-llama/llama-3.1-8b-instruct)\n- [meta-llama/llama-3-70b-instruct](/meta-llama/llama-3-70b-instruct)\n- [meta-llama/llama-3-8b-instruct](/meta-llama/llama-3-8b-instruct)\n- [qwen/qwen3-235b-a22b](/qwen/qwen3-235b-a22b)\n- [qwen/qwen3-32b](/qwen/qwen3-32b)\n- [qwen/qwen3-14b](/qwen/qwen3-14b)\n- [cohere/command-r-plus-08-2024](/cohere/command-r-plus-08-2024)\n- [cohere/command-r-08-2024](/cohere/command-r-08-2024)\n- [moonshotai/kimi-k2-thinking](/moonshotai/kimi-k2-thinking)\n- [perplexity/sonar](/perplexity/sonar)", "context_length": 2000000, "architecture": { - "modality": "text->text", + "modality": "text+image+file+audio+video->text+image", "input_modalities": [ - "text" + "text", + "image", + "audio", + "file", + "video" ], "output_modalities": [ - "text" + "text", + "image" ], "tokenizer": "Router", "instruct_type": null @@ -17077,7 +16322,29 @@ "is_moderated": false }, "per_request_limits": null, - "supported_parameters": [], + "supported_parameters": [ + "frequency_penalty", + "include_reasoning", + "logit_bias", + "logprobs", + "max_tokens", + "min_p", + "presence_penalty", + "reasoning", + "reasoning_effort", + "repetition_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_logprobs", + "top_p", + "web_search_options" + ], "default_parameters": { "temperature": null, "top_p": null, @@ -17200,11 +16467,7 @@ }, "pricing": { "prompt": "0.00000011", - "completion": "0.00000019", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.00000019" }, "top_provider": { "context_length": 2824, @@ -17296,11 +16559,7 @@ }, "pricing": { "prompt": "0.00000075", - "completion": "0.000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" + "completion": "0.000001" }, "top_provider": { "context_length": 8000, @@ -17353,7 +16612,7 @@ }, "top_provider": { "context_length": 6144, - "max_completion_tokens": null, + "max_completion_tokens": 4096, "is_moderated": false }, "per_request_limits": null, @@ -17403,7 +16662,7 @@ }, "top_provider": { "context_length": 4096, - "max_completion_tokens": null, + "max_completion_tokens": 4096, "is_moderated": false }, "per_request_limits": null, diff --git a/packages/kbot/dist-in/src/models/cache/openrouter.ts b/packages/kbot/dist-in/src/models/cache/openrouter.ts index 4f614aa2..c8ed7eb6 100644 --- a/packages/kbot/dist-in/src/models/cache/openrouter.ts +++ b/packages/kbot/dist-in/src/models/cache/openrouter.ts @@ -1 +1 @@ -export const models = [{"id":"minimax/minimax-m2-her","name":"MiniMax: MiniMax M2-her","pricing":{"prompt":"0.0000003","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000003","input_cache_write":"0.000000375"},"created":1769177239,"top_provider":{"context_length":32768,"max_completion_tokens":2048,"is_moderated":false}},{"id":"writer/palmyra-x5","name":"Writer: Palmyra X5","pricing":{"prompt":"0.0000006","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1769003823,"top_provider":{"context_length":1040000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"liquid/lfm-2.5-1.2b-thinking:free","name":"LiquidAI: LFM2.5-1.2B-Thinking (free)","pricing":{"prompt":"0","completion":"0"},"created":1768927527,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-2.5-1.2b-instruct:free","name":"LiquidAI: LFM2.5-1.2B-Instruct (free)","pricing":{"prompt":"0","completion":"0"},"created":1768927521,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-audio","name":"OpenAI: GPT Audio","pricing":{"prompt":"0.0000025","completion":"0.00001","audio":"0.000032"},"created":1768862569,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-audio-mini","name":"OpenAI: GPT Audio Mini","pricing":{"prompt":"0.0000006","completion":"0.0000024","audio":"0.0000006"},"created":1768859419,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"z-ai/glm-4.7-flash","name":"Z.AI: GLM 4.7 Flash","pricing":{"prompt":"0.00000007","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000001"},"created":1768833913,"top_provider":{"context_length":200000,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/gpt-5.2-codex","name":"OpenAI: GPT-5.2-Codex","pricing":{"prompt":"0.00000175","completion":"0.000014","web_search":"0.01","input_cache_read":"0.000000175"},"created":1768409315,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"allenai/molmo-2-8b:free","name":"AllenAI: Molmo2 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1767996672,"top_provider":{"context_length":36864,"max_completion_tokens":36864,"is_moderated":false}},{"id":"allenai/olmo-3.1-32b-instruct","name":"AllenAI: Olmo 3.1 32B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1767728554,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"bytedance-seed/seed-1.6-flash","name":"ByteDance Seed: Seed 1.6 Flash","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1766505011,"top_provider":{"context_length":262144,"max_completion_tokens":32768,"is_moderated":false}},{"id":"bytedance-seed/seed-1.6","name":"ByteDance Seed: Seed 1.6","pricing":{"prompt":"0.00000025","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1766504997,"top_provider":{"context_length":262144,"max_completion_tokens":32768,"is_moderated":false}},{"id":"minimax/minimax-m2.1","name":"MiniMax: MiniMax M2.1","pricing":{"prompt":"0.00000027","completion":"0.0000011"},"created":1766454997,"top_provider":{"context_length":196608,"max_completion_tokens":196608,"is_moderated":false}},{"id":"z-ai/glm-4.7","name":"Z.AI: GLM 4.7","pricing":{"prompt":"0.0000004","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1766378014,"top_provider":{"context_length":202752,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-3-flash-preview","name":"Google: Gemini 3 Flash Preview","pricing":{"prompt":"0.0000005","completion":"0.000003","image":"0.0000005","audio":"0.000001","internal_reasoning":"0.000003","input_cache_read":"0.00000005","input_cache_write":"0.00000008333333333333334"},"created":1765987078,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"mistralai/mistral-small-creative","name":"Mistral: Mistral Small Creative","pricing":{"prompt":"0.0000001","completion":"0.0000003"},"created":1765908653,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"allenai/olmo-3.1-32b-think","name":"AllenAI: Olmo 3.1 32B Think","pricing":{"prompt":"0.00000015","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1765907719,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"xiaomi/mimo-v2-flash:free","name":"Xiaomi: MiMo-V2-Flash (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1765731308,"top_provider":{"context_length":262144,"max_completion_tokens":65536,"is_moderated":false}},{"id":"xiaomi/mimo-v2-flash","name":"Xiaomi: MiMo-V2-Flash","pricing":{"prompt":"0.00000009","completion":"0.00000029","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1765731308,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/nemotron-3-nano-30b-a3b:free","name":"NVIDIA: Nemotron 3 Nano 30B A3B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1765731275,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/nemotron-3-nano-30b-a3b","name":"NVIDIA: Nemotron 3 Nano 30B A3B","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1765731275,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"openai/gpt-5.2-chat","name":"OpenAI: GPT-5.2 Chat","pricing":{"prompt":"0.00000175","completion":"0.000014","web_search":"0.01","input_cache_read":"0.000000175"},"created":1765389783,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-5.2-pro","name":"OpenAI: GPT-5.2 Pro","pricing":{"prompt":"0.000021","completion":"0.000168","web_search":"0.01"},"created":1765389780,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5.2","name":"OpenAI: GPT-5.2","pricing":{"prompt":"0.00000175","completion":"0.000014","web_search":"0.01","input_cache_read":"0.000000175"},"created":1765389775,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"mistralai/devstral-2512:free","name":"Mistral: Devstral 2 2512 (free)","pricing":{"prompt":"0","completion":"0"},"created":1765285419,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-2512","name":"Mistral: Devstral 2 2512","pricing":{"prompt":"0.00000005","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1765285419,"top_provider":{"context_length":262144,"max_completion_tokens":65536,"is_moderated":false}},{"id":"relace/relace-search","name":"Relace: Relace Search","pricing":{"prompt":"0.000001","completion":"0.000003"},"created":1765213560,"top_provider":{"context_length":256000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"z-ai/glm-4.6v","name":"Z.AI: GLM 4.6V","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0"},"created":1765207462,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"nex-agi/deepseek-v3.1-nex-n1","name":"Nex AGI: DeepSeek V3.1 Nex N1","pricing":{"prompt":"0.00000027","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0","input_cache_write":"0"},"created":1765204393,"top_provider":{"context_length":131072,"max_completion_tokens":163840,"is_moderated":false}},{"id":"essentialai/rnj-1-instruct","name":"EssentialAI: Rnj 1 Instruct","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1765094847,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openrouter/bodybuilder","name":"Body Builder (beta)","pricing":{"prompt":"-1","completion":"-1"},"created":1764903653,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-5.1-codex-max","name":"OpenAI: GPT-5.1-Codex-Max","pricing":{"prompt":"0.00000125","completion":"0.00001","web_search":"0.01","input_cache_read":"0.000000125"},"created":1764878934,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"amazon/nova-2-lite-v1","name":"Amazon: Nova 2 Lite","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1764696672,"top_provider":{"context_length":1000000,"max_completion_tokens":65535,"is_moderated":true}},{"id":"mistralai/ministral-14b-2512","name":"Mistral: Ministral 3 14B 2512","pricing":{"prompt":"0.0000002","completion":"0.0000002"},"created":1764681735,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-8b-2512","name":"Mistral: Ministral 3 8B 2512","pricing":{"prompt":"0.00000015","completion":"0.00000015"},"created":1764681654,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-3b-2512","name":"Mistral: Ministral 3 3B 2512","pricing":{"prompt":"0.0000001","completion":"0.0000001"},"created":1764681560,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-large-2512","name":"Mistral: Mistral Large 3 2512","pricing":{"prompt":"0.0000005","completion":"0.0000015"},"created":1764624472,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"arcee-ai/trinity-mini:free","name":"Arcee AI: Trinity Mini (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1764601720,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"arcee-ai/trinity-mini","name":"Arcee AI: Trinity Mini","pricing":{"prompt":"0.000000045","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0"},"created":1764601720,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"deepseek/deepseek-v3.2-speciale","name":"DeepSeek: DeepSeek V3.2 Speciale","pricing":{"prompt":"0.00000027","completion":"0.00000041","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1764594837,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepseek/deepseek-v3.2","name":"DeepSeek: DeepSeek V3.2","pricing":{"prompt":"0.00000025","completion":"0.00000038","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1764594642,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"prime-intellect/intellect-3","name":"Prime Intellect: INTELLECT-3","pricing":{"prompt":"0.0000002","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1764212534,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"tngtech/tng-r1t-chimera:free","name":"TNG: R1T Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1764184161,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"tngtech/tng-r1t-chimera","name":"TNG: R1T Chimera","pricing":{"prompt":"0.00000025","completion":"0.00000085","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1764184161,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"anthropic/claude-opus-4.5","name":"Anthropic: Claude Opus 4.5","pricing":{"prompt":"0.000005","completion":"0.000025","web_search":"0.01","input_cache_read":"0.0000005","input_cache_write":"0.00000625"},"created":1764010580,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"allenai/olmo-3-32b-think","name":"AllenAI: Olmo 3 32B Think","pricing":{"prompt":"0.00000015","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1763758276,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"allenai/olmo-3-7b-instruct","name":"AllenAI: Olmo 3 7B Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1763758273,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"allenai/olmo-3-7b-think","name":"AllenAI: Olmo 3 7B Think","pricing":{"prompt":"0.00000012","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1763758270,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"google/gemini-3-pro-image-preview","name":"Google: Nano Banana Pro (Gemini 3 Pro Image Preview)","pricing":{"prompt":"0.000002","completion":"0.000012","image":"0.000002","audio":"0.000002","internal_reasoning":"0.000012","input_cache_read":"0.0000002","input_cache_write":"0.000000375"},"created":1763653797,"top_provider":{"context_length":65536,"max_completion_tokens":32768,"is_moderated":false}},{"id":"x-ai/grok-4.1-fast","name":"xAI: Grok 4.1 Fast","pricing":{"prompt":"0.0000002","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000005"},"created":1763587502,"top_provider":{"context_length":2000000,"max_completion_tokens":30000,"is_moderated":false}},{"id":"google/gemini-3-pro-preview","name":"Google: Gemini 3 Pro Preview","pricing":{"prompt":"0.000002","completion":"0.000012","image":"0.000002","audio":"0.000002","internal_reasoning":"0.000012","input_cache_read":"0.0000002","input_cache_write":"0.000000375"},"created":1763474668,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepcogito/cogito-v2.1-671b","name":"Deep Cogito: Cogito v2.1 671B","pricing":{"prompt":"0.00000125","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1763071233,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-5.1","name":"OpenAI: GPT-5.1","pricing":{"prompt":"0.00000125","completion":"0.00001","web_search":"0.01","input_cache_read":"0.000000125"},"created":1763060305,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5.1-chat","name":"OpenAI: GPT-5.1 Chat","pricing":{"prompt":"0.00000125","completion":"0.00001","web_search":"0.01","input_cache_read":"0.000000125"},"created":1763060302,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-5.1-codex","name":"OpenAI: GPT-5.1-Codex","pricing":{"prompt":"0.00000125","completion":"0.00001","input_cache_read":"0.000000125"},"created":1763060298,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5.1-codex-mini","name":"OpenAI: GPT-5.1-Codex-Mini","pricing":{"prompt":"0.00000025","completion":"0.000002","input_cache_read":"0.000000025"},"created":1763057820,"top_provider":{"context_length":400000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"kwaipilot/kat-coder-pro","name":"Kwaipilot: KAT-Coder-Pro V1","pricing":{"prompt":"0.000000207","completion":"0.000000828","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000000414"},"created":1762745912,"top_provider":{"context_length":256000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"moonshotai/kimi-k2-thinking","name":"MoonshotAI: Kimi K2 Thinking","pricing":{"prompt":"0.0000004","completion":"0.00000175","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1762440622,"top_provider":{"context_length":262144,"max_completion_tokens":65535,"is_moderated":false}},{"id":"amazon/nova-premier-v1","name":"Amazon: Nova Premier 1.0","pricing":{"prompt":"0.0000025","completion":"0.0000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000625"},"created":1761950332,"top_provider":{"context_length":1000000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"perplexity/sonar-pro-search","name":"Perplexity: Sonar Pro Search","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0.018","image":"0","web_search":"0","internal_reasoning":"0"},"created":1761854366,"top_provider":{"context_length":200000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"mistralai/voxtral-small-24b-2507","name":"Mistral: Voxtral Small 24B 2507","pricing":{"prompt":"0.0000001","completion":"0.0000003","audio":"0.0001"},"created":1761835144,"top_provider":{"context_length":32000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-oss-safeguard-20b","name":"OpenAI: gpt-oss-safeguard-20b","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000037"},"created":1761752836,"top_provider":{"context_length":131072,"max_completion_tokens":65536,"is_moderated":false}},{"id":"nvidia/nemotron-nano-12b-v2-vl:free","name":"NVIDIA: Nemotron Nano 12B 2 VL (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1761675565,"top_provider":{"context_length":128000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"nvidia/nemotron-nano-12b-v2-vl","name":"NVIDIA: Nemotron Nano 12B 2 VL","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1761675565,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"minimax/minimax-m2","name":"MiniMax: MiniMax M2","pricing":{"prompt":"0.0000002","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000003"},"created":1761252093,"top_provider":{"context_length":196608,"max_completion_tokens":65536,"is_moderated":false}},{"id":"qwen/qwen3-vl-32b-instruct","name":"Qwen: Qwen3 VL 32B Instruct","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1761231332,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm2-8b-a1b","name":"LiquidAI: LFM2-8B-A1B","pricing":{"prompt":"0.00000001","completion":"0.00000002"},"created":1760970984,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-2.2-6b","name":"LiquidAI: LFM2-2.6B","pricing":{"prompt":"0.00000001","completion":"0.00000002"},"created":1760970889,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"ibm-granite/granite-4.0-h-micro","name":"IBM: Granite 4.0 Micro","pricing":{"prompt":"0.000000017","completion":"0.00000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1760927695,"top_provider":{"context_length":131000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-llama-405b","name":"Deep Cogito: Cogito V2 Preview Llama 405B","pricing":{"prompt":"0.0000035","completion":"0.0000035","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1760709933,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-5-image-mini","name":"OpenAI: GPT-5 Image Mini","pricing":{"prompt":"0.0000025","completion":"0.000002","web_search":"0.01","input_cache_read":"0.00000025"},"created":1760624583,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"anthropic/claude-haiku-4.5","name":"Anthropic: Claude Haiku 4.5","pricing":{"prompt":"0.000001","completion":"0.000005","web_search":"0.01","input_cache_read":"0.0000001","input_cache_write":"0.00000125"},"created":1760547638,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"qwen/qwen3-vl-8b-thinking","name":"Qwen: Qwen3 VL 8B Thinking","pricing":{"prompt":"0.00000018","completion":"0.0000021","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1760463746,"top_provider":{"context_length":256000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-vl-8b-instruct","name":"Qwen: Qwen3 VL 8B Instruct","pricing":{"prompt":"0.00000008","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0"},"created":1760463308,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"openai/gpt-5-image","name":"OpenAI: GPT-5 Image","pricing":{"prompt":"0.00001","completion":"0.00001","web_search":"0.01","input_cache_read":"0.00000125"},"created":1760447986,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/o3-deep-research","name":"OpenAI: o3 Deep Research","pricing":{"prompt":"0.00001","completion":"0.00004","web_search":"0.01","input_cache_read":"0.0000025"},"created":1760129661,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o4-mini-deep-research","name":"OpenAI: o4 Mini Deep Research","pricing":{"prompt":"0.000002","completion":"0.000008","web_search":"0.01","input_cache_read":"0.0000005"},"created":1760129642,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"nvidia/llama-3.3-nemotron-super-49b-v1.5","name":"NVIDIA: Llama 3.3 Nemotron Super 49B V1.5","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1760101395,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"baidu/ernie-4.5-21b-a3b-thinking","name":"Baidu: ERNIE 4.5 21B A3B Thinking","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1760048887,"top_provider":{"context_length":131072,"max_completion_tokens":65536,"is_moderated":false}},{"id":"google/gemini-2.5-flash-image","name":"Google: Gemini 2.5 Flash Image (Nano Banana)","pricing":{"prompt":"0.0000003","completion":"0.0000025","image":"0.0000003","audio":"0.000001","internal_reasoning":"0.0000025","input_cache_read":"0.00000003","input_cache_write":"0.00000008333333333333334"},"created":1759870431,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-vl-30b-a3b-thinking","name":"Qwen: Qwen3 VL 30B A3B Thinking","pricing":{"prompt":"0.0000002","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0"},"created":1759794479,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-vl-30b-a3b-instruct","name":"Qwen: Qwen3 VL 30B A3B Instruct","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1759794476,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-5-pro","name":"OpenAI: GPT-5 Pro","pricing":{"prompt":"0.000015","completion":"0.00012","web_search":"0.01"},"created":1759776663,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"z-ai/glm-4.6","name":"Z.AI: GLM 4.6","pricing":{"prompt":"0.00000035","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1759235576,"top_provider":{"context_length":202752,"max_completion_tokens":65536,"is_moderated":false}},{"id":"z-ai/glm-4.6:exacto","name":"Z.AI: GLM 4.6 (exacto)","pricing":{"prompt":"0.00000044","completion":"0.00000176","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1759235576,"top_provider":{"context_length":204800,"max_completion_tokens":131072,"is_moderated":false}},{"id":"anthropic/claude-sonnet-4.5","name":"Anthropic: Claude Sonnet 4.5","pricing":{"prompt":"0.000003","completion":"0.000015","web_search":"0.01","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1759161676,"top_provider":{"context_length":1000000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"deepseek/deepseek-v3.2-exp","name":"DeepSeek: DeepSeek V3.2 Exp","pricing":{"prompt":"0.00000021","completion":"0.00000032","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1759150481,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"thedrummer/cydonia-24b-v4.1","name":"TheDrummer: Cydonia 24B V4.1","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758931878,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"relace/relace-apply-3","name":"Relace: Relace Apply 3","pricing":{"prompt":"0.00000085","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758891572,"top_provider":{"context_length":256000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"google/gemini-2.5-flash-preview-09-2025","name":"Google: Gemini 2.5 Flash Preview 09-2025","pricing":{"prompt":"0.0000003","completion":"0.0000025","image":"0.0000003","audio":"0.000001","internal_reasoning":"0.0000025","input_cache_read":"0.00000003","input_cache_write":"0.00000008333333333333334"},"created":1758820178,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite-preview-09-2025","name":"Google: Gemini 2.5 Flash Lite Preview 09-2025","pricing":{"prompt":"0.0000001","completion":"0.0000004","image":"0.0000001","audio":"0.0000003","internal_reasoning":"0.0000004","input_cache_read":"0.00000001","input_cache_write":"0.00000008333333333333334"},"created":1758819686,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen3-vl-235b-a22b-thinking","name":"Qwen: Qwen3 VL 235B A22B Thinking","pricing":{"prompt":"0.00000045","completion":"0.0000035","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758668690,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"qwen/qwen3-vl-235b-a22b-instruct","name":"Qwen: Qwen3 VL 235B A22B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1758668687,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-max","name":"Qwen: Qwen3 Max","pricing":{"prompt":"0.0000012","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000024"},"created":1758662808,"top_provider":{"context_length":256000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-coder-plus","name":"Qwen: Qwen3 Coder Plus","pricing":{"prompt":"0.000001","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000001"},"created":1758662707,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":false}},{"id":"openai/gpt-5-codex","name":"OpenAI: GPT-5 Codex","pricing":{"prompt":"0.00000125","completion":"0.00001","input_cache_read":"0.000000125"},"created":1758643403,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"deepseek/deepseek-v3.1-terminus:exacto","name":"DeepSeek: DeepSeek V3.1 Terminus (exacto)","pricing":{"prompt":"0.00000021","completion":"0.00000079","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000168"},"created":1758548275,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-v3.1-terminus","name":"DeepSeek: DeepSeek V3.1 Terminus","pricing":{"prompt":"0.00000021","completion":"0.00000079","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000168"},"created":1758548275,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-4-fast","name":"xAI: Grok 4 Fast","pricing":{"prompt":"0.0000002","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000005"},"created":1758240090,"top_provider":{"context_length":2000000,"max_completion_tokens":30000,"is_moderated":false}},{"id":"alibaba/tongyi-deepresearch-30b-a3b","name":"Tongyi DeepResearch 30B A3B","pricing":{"prompt":"0.00000009","completion":"0.0000004"},"created":1758210804,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen3-coder-flash","name":"Qwen: Qwen3 Coder Flash","pricing":{"prompt":"0.0000003","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008"},"created":1758115536,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":false}},{"id":"opengvlab/internvl3-78b","name":"OpenGVLab: InternVL3 78B","pricing":{"prompt":"0.0000001","completion":"0.00000039","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757962555,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-next-80b-a3b-thinking","name":"Qwen: Qwen3 Next 80B A3B Thinking","pricing":{"prompt":"0.00000015","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757612284,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-next-80b-a3b-instruct:free","name":"Qwen: Qwen3 Next 80B A3B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757612213,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-next-80b-a3b-instruct","name":"Qwen: Qwen3 Next 80B A3B Instruct","pricing":{"prompt":"0.00000009","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757612213,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"meituan/longcat-flash-chat","name":"Meituan: LongCat Flash Chat","pricing":{"prompt":"0.0000002","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757427658,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen-plus-2025-07-28","name":"Qwen: Qwen Plus 0728","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757347599,"top_provider":{"context_length":1000000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-plus-2025-07-28:thinking","name":"Qwen: Qwen Plus 0728 (thinking)","pricing":{"prompt":"0.0000004","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757347599,"top_provider":{"context_length":1000000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"nvidia/nemotron-nano-9b-v2:free","name":"NVIDIA: Nemotron Nano 9B V2 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757106807,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/nemotron-nano-9b-v2","name":"NVIDIA: Nemotron Nano 9B V2","pricing":{"prompt":"0.00000004","completion":"0.00000016","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757106807,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2-0905","name":"MoonshotAI: Kimi K2 0905","pricing":{"prompt":"0.00000039","completion":"0.0000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757021147,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"moonshotai/kimi-k2-0905:exacto","name":"MoonshotAI: Kimi K2 0905 (exacto)","pricing":{"prompt":"0.0000006","completion":"0.0000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757021147,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-llama-70b","name":"Deep Cogito: Cogito V2 Preview Llama 70B","pricing":{"prompt":"0.00000088","completion":"0.00000088","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756831784,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-llama-109b-moe","name":"Cogito V2 Preview Llama 109B","pricing":{"prompt":"0.00000018","completion":"0.00000059","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756831568,"top_provider":{"context_length":32767,"max_completion_tokens":null,"is_moderated":false}},{"id":"stepfun-ai/step3","name":"StepFun: Step3","pricing":{"prompt":"0.00000057","completion":"0.00000142","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756415375,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-thinking-2507","name":"Qwen: Qwen3 30B A3B Thinking 2507","pricing":{"prompt":"0.000000051","completion":"0.00000034","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756399192,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-code-fast-1","name":"xAI: Grok Code Fast 1","pricing":{"prompt":"0.0000002","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1756238927,"top_provider":{"context_length":256000,"max_completion_tokens":10000,"is_moderated":false}},{"id":"nousresearch/hermes-4-70b","name":"Nous: Hermes 4 70B","pricing":{"prompt":"0.00000011","completion":"0.00000038","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756236182,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"nousresearch/hermes-4-405b","name":"Nous: Hermes 4 405B","pricing":{"prompt":"0.000001","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756235463,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3.1","name":"DeepSeek: DeepSeek V3.1","pricing":{"prompt":"0.00000015","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755779628,"top_provider":{"context_length":32768,"max_completion_tokens":7168,"is_moderated":false}},{"id":"openai/gpt-4o-audio-preview","name":"OpenAI: GPT-4o Audio","pricing":{"prompt":"0.0000025","completion":"0.00001","audio":"0.00004"},"created":1755233061,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-medium-3.1","name":"Mistral: Mistral Medium 3.1","pricing":{"prompt":"0.0000004","completion":"0.000002"},"created":1755095639,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"baidu/ernie-4.5-21b-a3b","name":"Baidu: ERNIE 4.5 21B A3B","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755034167,"top_provider":{"context_length":120000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-28b-a3b","name":"Baidu: ERNIE 4.5 VL 28B A3B","pricing":{"prompt":"0.00000014","completion":"0.00000056","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755032836,"top_provider":{"context_length":30000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"z-ai/glm-4.5v","name":"Z.AI: GLM 4.5V","pricing":{"prompt":"0.0000006","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000011","input_cache_write":"0"},"created":1754922288,"top_provider":{"context_length":65536,"max_completion_tokens":16384,"is_moderated":false}},{"id":"ai21/jamba-mini-1.7","name":"AI21: Jamba Mini 1.7","pricing":{"prompt":"0.0000002","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754670601,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"ai21/jamba-large-1.7","name":"AI21: Jamba Large 1.7","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754669020,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-5-chat","name":"OpenAI: GPT-5 Chat","pricing":{"prompt":"0.00000125","completion":"0.00001","web_search":"0.01","input_cache_read":"0.000000125"},"created":1754587837,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-5","name":"OpenAI: GPT-5","pricing":{"prompt":"0.00000125","completion":"0.00001","web_search":"0.01","input_cache_read":"0.000000125"},"created":1754587413,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-mini","name":"OpenAI: GPT-5 Mini","pricing":{"prompt":"0.00000025","completion":"0.000002","web_search":"0.01","input_cache_read":"0.000000025"},"created":1754587407,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-nano","name":"OpenAI: GPT-5 Nano","pricing":{"prompt":"0.00000005","completion":"0.0000004","web_search":"0.01","input_cache_read":"0.000000005"},"created":1754587402,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-oss-120b:free","name":"OpenAI: gpt-oss-120b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414231,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":true}},{"id":"openai/gpt-oss-120b","name":"OpenAI: gpt-oss-120b","pricing":{"prompt":"0.000000039","completion":"0.00000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414231,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-oss-120b:exacto","name":"OpenAI: gpt-oss-120b (exacto)","pricing":{"prompt":"0.000000039","completion":"0.00000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414231,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-oss-20b:free","name":"OpenAI: gpt-oss-20b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414229,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":true}},{"id":"openai/gpt-oss-20b","name":"OpenAI: gpt-oss-20b","pricing":{"prompt":"0.00000002","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414229,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"anthropic/claude-opus-4.1","name":"Anthropic: Claude Opus 4.1","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1754411591,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"mistralai/codestral-2508","name":"Mistral: Codestral 2508","pricing":{"prompt":"0.0000003","completion":"0.0000009"},"created":1754079630,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder-30b-a3b-instruct","name":"Qwen: Qwen3 Coder 30B A3B Instruct","pricing":{"prompt":"0.00000007","completion":"0.00000027","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753972379,"top_provider":{"context_length":160000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-instruct-2507","name":"Qwen: Qwen3 30B A3B Instruct 2507","pricing":{"prompt":"0.00000008","completion":"0.00000033","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753806965,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"z-ai/glm-4.5","name":"Z.AI: GLM 4.5","pricing":{"prompt":"0.00000035","completion":"0.00000155","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471347,"top_provider":{"context_length":131072,"max_completion_tokens":65536,"is_moderated":false}},{"id":"z-ai/glm-4.5-air:free","name":"Z.AI: GLM 4.5 Air (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":96000,"is_moderated":false}},{"id":"z-ai/glm-4.5-air","name":"Z.AI: GLM 4.5 Air","pricing":{"prompt":"0.00000005","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-thinking-2507","name":"Qwen: Qwen3 235B A22B Thinking 2507","pricing":{"prompt":"0.00000011","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753449557,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"z-ai/glm-4-32b","name":"Z.AI: GLM 4 32B ","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753376617,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder:free","name":"Qwen: Qwen3 Coder 480B A35B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753230546,"top_provider":{"context_length":262000,"max_completion_tokens":262000,"is_moderated":false}},{"id":"qwen/qwen3-coder","name":"Qwen: Qwen3 Coder 480B A35B","pricing":{"prompt":"0.00000022","completion":"0.00000095","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"qwen/qwen3-coder:exacto","name":"Qwen: Qwen3 Coder 480B A35B (exacto)","pricing":{"prompt":"0.00000022","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":65536,"is_moderated":false}},{"id":"bytedance/ui-tars-1.5-7b","name":"ByteDance: UI-TARS 7B ","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753205056,"top_provider":{"context_length":128000,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite","name":"Google: Gemini 2.5 Flash Lite","pricing":{"prompt":"0.0000001","completion":"0.0000004","image":"0.0000001","audio":"0.0000003","internal_reasoning":"0.0000004","input_cache_read":"0.00000001","input_cache_write":"0.00000008333333333333334"},"created":1753200276,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-2507","name":"Qwen: Qwen3 235B A22B Instruct 2507","pricing":{"prompt":"0.000000071","completion":"0.000000463","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753119555,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"switchpoint/router","name":"Switchpoint Router","pricing":{"prompt":"0.00000085","completion":"0.0000034"},"created":1752272899,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2:free","name":"MoonshotAI: Kimi K2 0711 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752263252,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":true}},{"id":"moonshotai/kimi-k2","name":"MoonshotAI: Kimi K2 0711","pricing":{"prompt":"0.0000005","completion":"0.0000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752263252,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-medium","name":"Mistral: Devstral Medium","pricing":{"prompt":"0.0000004","completion":"0.000002"},"created":1752161321,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small","name":"Mistral: Devstral Small 1.1","pricing":{"prompt":"0.0000001","completion":"0.0000003"},"created":1752160751,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mistral-24b-venice-edition:free","name":"Venice: Uncensored (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752094966,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-4","name":"xAI: Grok 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1752087689,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3n-e2b-it:free","name":"Google: Gemma 3n 2B (free)","pricing":{"prompt":"0","completion":"0"},"created":1752074904,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"tencent/hunyuan-a13b-instruct","name":"Tencent: Hunyuan A13B Instruct","pricing":{"prompt":"0.00000014","completion":"0.00000057","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751987664,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"tngtech/deepseek-r1t2-chimera:free","name":"TNG: DeepSeek R1T2 Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751986985,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t2-chimera","name":"TNG: DeepSeek R1T2 Chimera","pricing":{"prompt":"0.00000025","completion":"0.00000085","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751986985,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"morph/morph-v3-large","name":"Morph: Morph V3 Large","pricing":{"prompt":"0.0000009","completion":"0.0000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751910858,"top_provider":{"context_length":262144,"max_completion_tokens":131072,"is_moderated":false}},{"id":"morph/morph-v3-fast","name":"Morph: Morph V3 Fast","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751910002,"top_provider":{"context_length":81920,"max_completion_tokens":38000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-424b-a47b","name":"Baidu: ERNIE 4.5 VL 424B A47B ","pricing":{"prompt":"0.00000042","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300903,"top_provider":{"context_length":123000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"baidu/ernie-4.5-300b-a47b","name":"Baidu: ERNIE 4.5 300B A47B ","pricing":{"prompt":"0.00000028","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300139,"top_provider":{"context_length":123000,"max_completion_tokens":12000,"is_moderated":false}},{"id":"inception/mercury","name":"Inception: Mercury","pricing":{"prompt":"0.00000025","completion":"0.000001"},"created":1750973026,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct","name":"Mistral: Mistral Small 3.2 24B","pricing":{"prompt":"0.00000006","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"minimax/minimax-m1","name":"MiniMax: MiniMax M1","pricing":{"prompt":"0.0000004","completion":"0.0000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750200414,"top_provider":{"context_length":1000000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-flash","name":"Google: Gemini 2.5 Flash","pricing":{"prompt":"0.0000003","completion":"0.0000025","image":"0.0000003","audio":"0.000001","internal_reasoning":"0.0000025","input_cache_read":"0.00000003","input_cache_write":"0.00000008333333333333334"},"created":1750172488,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-pro","name":"Google: Gemini 2.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.00001","image":"0.00000125","audio":"0.00000125","internal_reasoning":"0.00001","input_cache_read":"0.000000125","input_cache_write":"0.000000375"},"created":1750169544,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b","name":"MoonshotAI: Kimi Dev 72B","pricing":{"prompt":"0.00000029","completion":"0.00000115","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/o3-pro","name":"OpenAI: o3 Pro","pricing":{"prompt":"0.00002","completion":"0.00008","web_search":"0.01"},"created":1749598352,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"x-ai/grok-3-mini","name":"xAI: Grok 3 Mini","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1749583245,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3","name":"xAI: Grok 3","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1749582908,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview","name":"Google: Gemini 2.5 Pro Preview 06-05","pricing":{"prompt":"0.00000125","completion":"0.00001","image":"0.00000125","audio":"0.00000125","internal_reasoning":"0.00001","input_cache_read":"0.000000125","input_cache_write":"0.000000375"},"created":1749137257,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528:free","name":"DeepSeek: R1 0528 (free)","pricing":{"prompt":"0","completion":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528","name":"DeepSeek: R1 0528","pricing":{"prompt":"0.0000004","completion":"0.00000175","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"anthropic/claude-opus-4","name":"Anthropic: Claude Opus 4","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1747931245,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"anthropic/claude-sonnet-4","name":"Anthropic: Claude Sonnet 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1747930371,"top_provider":{"context_length":1000000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it:free","name":"Google: Gemma 3n 4B (free)","pricing":{"prompt":"0","completion":"0"},"created":1747776824,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it","name":"Google: Gemma 3n 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/deephermes-3-mistral-24b-preview","name":"Nous: DeepHermes 3 Mistral 24B Preview","pricing":{"prompt":"0.00000002","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746830904,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"mistralai/mistral-medium-3","name":"Mistral: Mistral Medium 3","pricing":{"prompt":"0.0000004","completion":"0.000002"},"created":1746627341,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview-05-06","name":"Google: Gemini 2.5 Pro Preview 05-06","pricing":{"prompt":"0.00000125","completion":"0.00001","image":"0.00000125","audio":"0.00000125","internal_reasoning":"0.00001","input_cache_read":"0.000000125","input_cache_write":"0.000000375"},"created":1746578513,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"arcee-ai/spotlight","name":"Arcee AI: Spotlight","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481552,"top_provider":{"context_length":131072,"max_completion_tokens":65537,"is_moderated":false}},{"id":"arcee-ai/maestro-reasoning","name":"Arcee AI: Maestro Reasoning","pricing":{"prompt":"0.0000009","completion":"0.0000033","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481269,"top_provider":{"context_length":131072,"max_completion_tokens":32000,"is_moderated":false}},{"id":"arcee-ai/virtuoso-large","name":"Arcee AI: Virtuoso Large","pricing":{"prompt":"0.00000075","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478885,"top_provider":{"context_length":131072,"max_completion_tokens":64000,"is_moderated":false}},{"id":"arcee-ai/coder-large","name":"Arcee AI: Coder Large","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478663,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"inception/mercury-coder","name":"Inception: Mercury Coder","pricing":{"prompt":"0.00000025","completion":"0.000001"},"created":1746033880,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen3-4b:free","name":"Qwen: Qwen3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746031104,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-4-12b","name":"Meta: Llama Guard 4 12B","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745975193,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b","name":"Qwen: Qwen3 30B A3B","pricing":{"prompt":"0.00000006","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-8b","name":"Qwen: Qwen3 8B","pricing":{"prompt":"0.00000005","completion":"0.00000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0"},"created":1745876632,"top_provider":{"context_length":32000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen3-14b","name":"Qwen: Qwen3 14B","pricing":{"prompt":"0.00000005","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-32b","name":"Qwen: Qwen3 32B","pricing":{"prompt":"0.00000008","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875945,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b","name":"Qwen: Qwen3 235B A22B","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera:free","name":"TNG: DeepSeek R1T Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera","name":"TNG: DeepSeek R1T Chimera","pricing":{"prompt":"0.0000003","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"openai/o4-mini-high","name":"OpenAI: o4 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","web_search":"0.01","input_cache_read":"0.000000275"},"created":1744824212,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o3","name":"OpenAI: o3","pricing":{"prompt":"0.000002","completion":"0.000008","web_search":"0.01","input_cache_read":"0.0000005"},"created":1744823457,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o4-mini","name":"OpenAI: o4 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","web_search":"0.01","input_cache_read":"0.000000275"},"created":1744820942,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"qwen/qwen2.5-coder-7b-instruct","name":"Qwen: Qwen2.5 Coder 7B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744734887,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4.1","name":"OpenAI: GPT-4.1","pricing":{"prompt":"0.000002","completion":"0.000008","web_search":"0.01","input_cache_read":"0.0000005"},"created":1744651385,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-mini","name":"OpenAI: GPT-4.1 Mini","pricing":{"prompt":"0.0000004","completion":"0.0000016","web_search":"0.01","input_cache_read":"0.0000001"},"created":1744651381,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-nano","name":"OpenAI: GPT-4.1 Nano","pricing":{"prompt":"0.0000001","completion":"0.0000004","web_search":"0.01","input_cache_read":"0.000000025"},"created":1744651369,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"eleutherai/llemma_7b","name":"EleutherAI: Llemma 7b","pricing":{"prompt":"0.0000008","completion":"0.0000012"},"created":1744643225,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"alfredpros/codellama-7b-instruct-solidity","name":"AlfredPros: CodeLLaMa 7B Instruct Solidity","pricing":{"prompt":"0.0000008","completion":"0.0000012"},"created":1744641874,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"x-ai/grok-3-mini-beta","name":"xAI: Grok 3 Mini Beta","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1744240195,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-beta","name":"xAI: Grok 3 Beta","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1744240068,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1","pricing":{"prompt":"0.0000006","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-maverick","name":"Meta: Llama 4 Maverick","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.0006684","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":1048576,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-4-scout","name":"Meta: Llama 4 Scout","pricing":{"prompt":"0.00000008","completion":"0.0000003","request":"0","image":"0.0003342","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":327680,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct","name":"Qwen: Qwen2.5 VL 32B Instruct","pricing":{"prompt":"0.00000005","completion":"0.00000022","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324","name":"DeepSeek: DeepSeek V3 0324","pricing":{"prompt":"0.00000019","completion":"0.00000087","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"openai/o1-pro","name":"OpenAI: o1-pro","pricing":{"prompt":"0.00015","completion":"0.0006"},"created":1742423211,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-3.1-24b-instruct:free","name":"Mistral: Mistral Small 3.1 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.1-24b-instruct","name":"Mistral: Mistral Small 3.1 24B","pricing":{"prompt":"0.00000003","completion":"0.00000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"allenai/olmo-2-0325-32b-instruct","name":"AllenAI: Olmo 2 32B Instruct","pricing":{"prompt":"0.00000005","completion":"0.0000002"},"created":1741988556,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-4b-it:free","name":"Google: Gemma 3 4B (free)","pricing":{"prompt":"0","completion":"0"},"created":1741905510,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-4b-it","name":"Google: Gemma 3 4B","pricing":{"prompt":"0.00000001703012","completion":"0.0000000681536","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-12b-it:free","name":"Google: Gemma 3 12B (free)","pricing":{"prompt":"0","completion":"0"},"created":1741902625,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-12b-it","name":"Google: Gemma 3 12B","pricing":{"prompt":"0.00000003","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"cohere/command-a","name":"Cohere: Command A","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741894342,"top_provider":{"context_length":256000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"openai/gpt-4o-mini-search-preview","name":"OpenAI: GPT-4o-mini Search Preview","pricing":{"prompt":"0.00000015","completion":"0.0000006","web_search":"0.0275"},"created":1741818122,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-search-preview","name":"OpenAI: GPT-4o Search Preview","pricing":{"prompt":"0.0000025","completion":"0.00001","web_search":"0.035"},"created":1741817949,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-3-27b-it:free","name":"Google: Gemma 3 27B (free)","pricing":{"prompt":"0","completion":"0"},"created":1741756359,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-27b-it","name":"Google: Gemma 3 27B","pricing":{"prompt":"0.00000004","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":96000,"is_moderated":false}},{"id":"thedrummer/skyfall-36b-v2","name":"TheDrummer: Skyfall 36B V2","pricing":{"prompt":"0.00000055","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741636566,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"perplexity/sonar-reasoning-pro","name":"Perplexity: Sonar Reasoning Pro","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741313308,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-pro","name":"Perplexity: Sonar Pro","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741312423,"top_provider":{"context_length":200000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"perplexity/sonar-deep-research","name":"Perplexity: Sonar Deep Research","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0.000003"},"created":1741311246,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b","name":"Qwen: QwQ 32B","pricing":{"prompt":"0.00000015","completion":"0.0000004"},"created":1741208814,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.0-flash-lite-001","name":"Google: Gemini 2.0 Flash Lite","pricing":{"prompt":"0.000000075","completion":"0.0000003","image":"0.000000075","audio":"0.000000075","internal_reasoning":"0.0000003"},"created":1740506212,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet:thinking","name":"Anthropic: Claude 3.7 Sonnet (thinking)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet","name":"Anthropic: Claude 3.7 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"mistralai/mistral-saba","name":"Mistral: Saba","pricing":{"prompt":"0.0000002","completion":"0.0000006"},"created":1739803239,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-3-8b","name":"Llama Guard 3 8B","pricing":{"prompt":"0.00000002","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739401318,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o3-mini-high","name":"OpenAI: o3 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","input_cache_read":"0.00000055"},"created":1739372611,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"google/gemini-2.0-flash-001","name":"Google: Gemini 2.0 Flash","pricing":{"prompt":"0.0000001","completion":"0.0000004","image":"0.0000001","audio":"0.0000007","internal_reasoning":"0.0000004","input_cache_read":"0.000000025","input_cache_write":"0.00000008333333333333334"},"created":1738769413,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-vl-plus","name":"Qwen: Qwen VL Plus","pricing":{"prompt":"0.00000021","completion":"0.00000063","request":"0","image":"0.0002688","web_search":"0","internal_reasoning":"0"},"created":1738731255,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"aion-labs/aion-1.0","name":"AionLabs: Aion-1.0","pricing":{"prompt":"0.000004","completion":"0.000008"},"created":1738697557,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-1.0-mini","name":"AionLabs: Aion-1.0-Mini","pricing":{"prompt":"0.0000007","completion":"0.0000014"},"created":1738697107,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-rp-llama-3.1-8b","name":"AionLabs: Aion-RP 1.0 (8B)","pricing":{"prompt":"0.0000008","completion":"0.0000016"},"created":1738696718,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-vl-max","name":"Qwen: Qwen VL Max","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.001024","web_search":"0","internal_reasoning":"0"},"created":1738434304,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-turbo","name":"Qwen: Qwen-Turbo","pricing":{"prompt":"0.00000005","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1738410974,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct","name":"Qwen: Qwen2.5 VL 72B Instruct","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-plus","name":"Qwen: Qwen-Plus","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000016"},"created":1738409840,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-max","name":"Qwen: Qwen-Max ","pricing":{"prompt":"0.0000016","completion":"0.0000064","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000064"},"created":1738402289,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/o3-mini","name":"OpenAI: o3 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","input_cache_read":"0.00000055"},"created":1738351721,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-24b-instruct-2501","name":"Mistral: Mistral Small 3","pricing":{"prompt":"0.00000003","completion":"0.00000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-32b","name":"DeepSeek: R1 Distill Qwen 32B","pricing":{"prompt":"0.00000029","completion":"0.00000029"},"created":1738194830,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar","name":"Perplexity: Sonar","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738013808,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b","name":"DeepSeek: R1 Distill Llama 70B","pricing":{"prompt":"0.00000003","completion":"0.00000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"deepseek/deepseek-r1","name":"DeepSeek: R1","pricing":{"prompt":"0.0000007","completion":"0.0000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":64000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"minimax/minimax-01","name":"MiniMax: MiniMax-01","pricing":{"prompt":"0.0000002","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736915462,"top_provider":{"context_length":1000192,"max_completion_tokens":1000192,"is_moderated":false}},{"id":"microsoft/phi-4","name":"Microsoft: Phi 4","pricing":{"prompt":"0.00000006","completion":"0.00000014"},"created":1736489872,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3.1-70b-hanami-x1","name":"Sao10K: Llama 3.1 70B Hanami x1","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736302854,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat","name":"DeepSeek: DeepSeek V3","pricing":{"prompt":"0.0000003","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1735241320,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"sao10k/l3.3-euryale-70b","name":"Sao10K: Llama 3.3 Euryale 70B","pricing":{"prompt":"0.00000065","completion":"0.00000075"},"created":1734535928,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/o1","name":"OpenAI: o1","pricing":{"prompt":"0.000015","completion":"0.00006","input_cache_read":"0.0000075"},"created":1734459999,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"cohere/command-r7b-12-2024","name":"Cohere: Command R7B (12-2024)","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734158152,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"google/gemini-2.0-flash-exp:free","name":"Google: Gemini 2.0 Flash Experimental (free)","pricing":{"prompt":"0","completion":"0"},"created":1733937523,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct:free","name":"Meta: Llama 3.3 70B Instruct (free)","pricing":{"prompt":"0","completion":"0"},"created":1733506137,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct","name":"Meta: Llama 3.3 70B Instruct","pricing":{"prompt":"0.0000001","completion":"0.00000032","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"amazon/nova-lite-v1","name":"Amazon: Nova Lite 1.0","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0.00009","web_search":"0","internal_reasoning":"0"},"created":1733437363,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-micro-v1","name":"Amazon: Nova Micro 1.0","pricing":{"prompt":"0.000000035","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733437237,"top_provider":{"context_length":128000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-pro-v1","name":"Amazon: Nova Pro 1.0","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.0012","web_search":"0","internal_reasoning":"0"},"created":1733436303,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"openai/gpt-4o-2024-11-20","name":"OpenAI: GPT-4o (2024-11-20)","pricing":{"prompt":"0.0000025","completion":"0.00001","input_cache_read":"0.00000125"},"created":1732127594,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-large-2411","name":"Mistral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006"},"created":1731978685,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-large-2407","name":"Mistral Large 2407","pricing":{"prompt":"0.000002","completion":"0.000006"},"created":1731978415,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/pixtral-large-2411","name":"Mistral: Pixtral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006"},"created":1731977388,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct","name":"Qwen2.5 Coder 32B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"raifle/sorcererlm-8x22b","name":"SorcererLM 8x22B","pricing":{"prompt":"0.0000045","completion":"0.0000045","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731105083,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"thedrummer/unslopnemo-12b","name":"TheDrummer: UnslopNemo 12B","pricing":{"prompt":"0.0000004","completion":"0.0000004"},"created":1731103448,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku","name":"Anthropic: Claude 3.5 Haiku","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"anthracite-org/magnum-v4-72b","name":"Magnum v4 72B","pricing":{"prompt":"0.000003","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet","name":"Anthropic: Claude 3.5 Sonnet","pricing":{"prompt":"0.000006","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"mistralai/ministral-3b","name":"Mistral: Ministral 3B","pricing":{"prompt":"0.00000004","completion":"0.00000004"},"created":1729123200,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-8b","name":"Mistral: Ministral 8B","pricing":{"prompt":"0.0000001","completion":"0.0000001"},"created":1729123200,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-7b-instruct","name":"Qwen: Qwen2.5 7B Instruct","pricing":{"prompt":"0.00000004","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729036800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-70b-instruct","name":"NVIDIA: Llama 3.1 Nemotron 70B Instruct","pricing":{"prompt":"0.0000012","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728950400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"inflection/inflection-3-pi","name":"Inflection: Inflection 3 Pi","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"inflection/inflection-3-productivity","name":"Inflection: Inflection 3 Productivity","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"thedrummer/rocinante-12b","name":"TheDrummer: Rocinante 12B","pricing":{"prompt":"0.00000017","completion":"0.00000043"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-1b-instruct","name":"Meta: Llama 3.2 1B Instruct","pricing":{"prompt":"0.000000027","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":60000,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct:free","name":"Meta: Llama 3.2 3B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct","name":"Meta: Llama 3.2 3B Instruct","pricing":{"prompt":"0.00000002","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct","name":"Meta: Llama 3.2 11B Vision Instruct","pricing":{"prompt":"0.000000049","completion":"0.000000049","request":"0","image":"0.00007948","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct","name":"Qwen2.5 72B Instruct","pricing":{"prompt":"0.00000012","completion":"0.00000039","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/llama-3.1-lumimaid-8b","name":"NeverSleep: Lumimaid v0.2 8B","pricing":{"prompt":"0.00000009","completion":"0.0000006"},"created":1726358400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/pixtral-12b","name":"Mistral: Pixtral 12B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1725926400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r-plus-08-2024","name":"Cohere: Command R+ (08-2024)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-08-2024","name":"Cohere: Command R (08-2024)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"qwen/qwen-2.5-vl-7b-instruct:free","name":"Qwen: Qwen2.5-VL 7B Instruct (free)","pricing":{"prompt":"0","completion":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-vl-7b-instruct","name":"Qwen: Qwen2.5-VL 7B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3.1-euryale-70b","name":"Sao10K: Llama 3.1 Euryale 70B v2.2","pricing":{"prompt":"0.00000065","completion":"0.00000075"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-70b","name":"Nous: Hermes 3 70B Instruct","pricing":{"prompt":"0.0000003","completion":"0.0000003"},"created":1723939200,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-405b:free","name":"Nous: Hermes 3 405B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723766400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-405b","name":"Nous: Hermes 3 405B Instruct","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723766400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/chatgpt-4o-latest","name":"OpenAI: ChatGPT-4o","pricing":{"prompt":"0.000005","completion":"0.000015"},"created":1723593600,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"sao10k/l3-lunaris-8b","name":"Sao10K: Llama 3 8B Lunaris","pricing":{"prompt":"0.00000004","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723507200,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-08-06","name":"OpenAI: GPT-4o (2024-08-06)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1722902400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b","name":"Meta: Llama 3.1 405B (base)","pricing":{"prompt":"0.000004","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722556800,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"meta-llama/llama-3.1-70b-instruct","name":"Meta: Llama 3.1 70B Instruct","pricing":{"prompt":"0.0000004","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct:free","name":"Meta: Llama 3.1 405B Instruct (free)","pricing":{"prompt":"0","completion":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct","name":"Meta: Llama 3.1 405B Instruct","pricing":{"prompt":"0.0000035","completion":"0.0000035","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":10000,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-8b-instruct","name":"Meta: Llama 3.1 8B Instruct","pricing":{"prompt":"0.00000002","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-nemo","name":"Mistral: Mistral Nemo","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/gpt-4o-mini","name":"OpenAI: GPT-4o-mini","pricing":{"prompt":"0.00000015","completion":"0.0000006","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-mini-2024-07-18","name":"OpenAI: GPT-4o-mini (2024-07-18)","pricing":{"prompt":"0.00000015","completion":"0.0000006","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-2-27b-it","name":"Google: Gemma 2 27B","pricing":{"prompt":"0.00000065","completion":"0.00000065"},"created":1720828800,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-2-9b-it","name":"Google: Gemma 2 9B","pricing":{"prompt":"0.00000003","completion":"0.00000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3-euryale-70b","name":"Sao10k: Llama 3 Euryale 70B v2.1","pricing":{"prompt":"0.00000148","completion":"0.00000148","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1718668800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct","name":"Mistral: Mistral 7B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"nousresearch/hermes-2-pro-llama-3-8b","name":"NousResearch: Hermes 2 Pro - Llama-3 8B","pricing":{"prompt":"0.00000014","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.3","name":"Mistral: Mistral 7B Instruct v0.3","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"meta-llama/llama-guard-2-8b","name":"Meta: LlamaGuard 2 8B","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o","name":"OpenAI: GPT-4o","pricing":{"prompt":"0.0000025","completion":"0.00001","input_cache_read":"0.00000125"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o:extended","name":"OpenAI: GPT-4o (extended)","pricing":{"prompt":"0.000006","completion":"0.000018"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"openai/gpt-4o-2024-05-13","name":"OpenAI: GPT-4o (2024-05-13)","pricing":{"prompt":"0.000005","completion":"0.000015"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"meta-llama/llama-3-8b-instruct","name":"Meta: Llama 3 8B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3-70b-instruct","name":"Meta: Llama 3 70B Instruct","pricing":{"prompt":"0.0000004","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mixtral-8x22b-instruct","name":"Mistral: Mixtral 8x22B Instruct","pricing":{"prompt":"0.000002","completion":"0.000006"},"created":1713312000,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/wizardlm-2-8x22b","name":"WizardLM-2 8x22B","pricing":{"prompt":"0.00000048","completion":"0.00000048","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713225600,"top_provider":{"context_length":65536,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/gpt-4-turbo","name":"OpenAI: GPT-4 Turbo","pricing":{"prompt":"0.00001","completion":"0.00003"},"created":1712620800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-haiku","name":"Anthropic: Claude 3 Haiku","pricing":{"prompt":"0.00000025","completion":"0.00000125","input_cache_read":"0.00000003","input_cache_write":"0.0000003"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-large","name":"Mistral Large","pricing":{"prompt":"0.000002","completion":"0.000006"},"created":1708905600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-0613","name":"OpenAI: GPT-3.5 Turbo (older v0613)","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4-turbo-preview","name":"OpenAI: GPT-4 Turbo Preview","pricing":{"prompt":"0.00001","completion":"0.00003"},"created":1706140800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-tiny","name":"Mistral Tiny","pricing":{"prompt":"0.00000025","completion":"0.00000025"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.2","name":"Mistral: Mistral 7B Instruct v0.2","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1703721600,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mixtral-8x7b-instruct","name":"Mistral: Mixtral 8x7B Instruct","pricing":{"prompt":"0.00000054","completion":"0.00000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1702166400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/noromaid-20b","name":"Noromaid 20B","pricing":{"prompt":"0.000001","completion":"0.00000175"},"created":1700956800,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"alpindale/goliath-120b","name":"Goliath 120B","pricing":{"prompt":"0.00000375","completion":"0.0000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699574400,"top_provider":{"context_length":6144,"max_completion_tokens":1024,"is_moderated":false}},{"id":"openrouter/auto","name":"Auto Router","pricing":{"prompt":"-1","completion":"-1"},"created":1699401600,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-1106-preview","name":"OpenAI: GPT-4 Turbo (older v1106)","pricing":{"prompt":"0.00001","completion":"0.00003"},"created":1699228800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo-instruct","name":"OpenAI: GPT-3.5 Turbo Instruct","pricing":{"prompt":"0.0000015","completion":"0.000002"},"created":1695859200,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-7b-instruct-v0.1","name":"Mistral: Mistral 7B Instruct v0.1","pricing":{"prompt":"0.00000011","completion":"0.00000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":2824,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-16k","name":"OpenAI: GPT-3.5 Turbo 16k","pricing":{"prompt":"0.000003","completion":"0.000004"},"created":1693180800,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mancer/weaver","name":"Mancer: Weaver (alpha)","pricing":{"prompt":"0.00000075","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1690934400,"top_provider":{"context_length":8000,"max_completion_tokens":2000,"is_moderated":false}},{"id":"undi95/remm-slerp-l2-13b","name":"ReMM SLERP 13B","pricing":{"prompt":"0.00000045","completion":"0.00000065"},"created":1689984000,"top_provider":{"context_length":6144,"max_completion_tokens":null,"is_moderated":false}},{"id":"gryphe/mythomax-l2-13b","name":"MythoMax 13B","pricing":{"prompt":"0.00000006","completion":"0.00000006"},"created":1688256000,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-0314","name":"OpenAI: GPT-4 (older v0314)","pricing":{"prompt":"0.00003","completion":"0.00006"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4","name":"OpenAI: GPT-4","pricing":{"prompt":"0.00003","completion":"0.00006"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo","name":"OpenAI: GPT-3.5 Turbo","pricing":{"prompt":"0.0000005","completion":"0.0000015"},"created":1685232000,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}}] \ No newline at end of file +export const models = [{"id":"qwen/qwen3-coder-next","name":"Qwen: Qwen3 Coder Next","pricing":{"prompt":"0.0000002","completion":"0.0000015"},"created":1770164101,"top_provider":{"context_length":262144,"max_completion_tokens":65536,"is_moderated":false}},{"id":"openrouter/free","name":"Free Models Router","pricing":{"prompt":"0","completion":"0"},"created":1769917427,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"stepfun/step-3.5-flash:free","name":"StepFun: Step 3.5 Flash (free)","pricing":{"prompt":"0","completion":"0"},"created":1769728337,"top_provider":{"context_length":256000,"max_completion_tokens":256000,"is_moderated":false}},{"id":"arcee-ai/trinity-large-preview:free","name":"Arcee AI: Trinity Large Preview (free)","pricing":{"prompt":"0","completion":"0"},"created":1769552670,"top_provider":{"context_length":131000,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2.5","name":"MoonshotAI: Kimi K2.5","pricing":{"prompt":"0.00000045","completion":"0.0000025"},"created":1769487076,"top_provider":{"context_length":262144,"max_completion_tokens":65535,"is_moderated":false}},{"id":"upstage/solar-pro-3:free","name":"Upstage: Solar Pro 3 (free)","pricing":{"prompt":"0","completion":"0"},"created":1769481200,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"minimax/minimax-m2-her","name":"MiniMax: MiniMax M2-her","pricing":{"prompt":"0.0000003","completion":"0.0000012","input_cache_read":"0.00000003"},"created":1769177239,"top_provider":{"context_length":65536,"max_completion_tokens":2048,"is_moderated":false}},{"id":"writer/palmyra-x5","name":"Writer: Palmyra X5","pricing":{"prompt":"0.0000006","completion":"0.000006"},"created":1769003823,"top_provider":{"context_length":1040000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"liquid/lfm-2.5-1.2b-thinking:free","name":"LiquidAI: LFM2.5-1.2B-Thinking (free)","pricing":{"prompt":"0","completion":"0"},"created":1768927527,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-2.5-1.2b-instruct:free","name":"LiquidAI: LFM2.5-1.2B-Instruct (free)","pricing":{"prompt":"0","completion":"0"},"created":1768927521,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-audio","name":"OpenAI: GPT Audio","pricing":{"prompt":"0.0000025","completion":"0.00001","audio":"0.000032"},"created":1768862569,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-audio-mini","name":"OpenAI: GPT Audio Mini","pricing":{"prompt":"0.0000006","completion":"0.0000024","audio":"0.0000006"},"created":1768859419,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"z-ai/glm-4.7-flash","name":"Z.AI: GLM 4.7 Flash","pricing":{"prompt":"0.00000007","completion":"0.0000004","input_cache_read":"0.00000001"},"created":1768833913,"top_provider":{"context_length":200000,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/gpt-5.2-codex","name":"OpenAI: GPT-5.2-Codex","pricing":{"prompt":"0.00000175","completion":"0.000014","web_search":"0.01","input_cache_read":"0.000000175"},"created":1768409315,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"allenai/molmo-2-8b:free","name":"AllenAI: Molmo2 8B (free)","pricing":{"prompt":"0","completion":"0"},"created":1767996672,"top_provider":{"context_length":36864,"max_completion_tokens":36864,"is_moderated":false}},{"id":"allenai/olmo-3.1-32b-instruct","name":"AllenAI: Olmo 3.1 32B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000006"},"created":1767728554,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"bytedance-seed/seed-1.6-flash","name":"ByteDance Seed: Seed 1.6 Flash","pricing":{"prompt":"0.000000075","completion":"0.0000003"},"created":1766505011,"top_provider":{"context_length":262144,"max_completion_tokens":32768,"is_moderated":false}},{"id":"bytedance-seed/seed-1.6","name":"ByteDance Seed: Seed 1.6","pricing":{"prompt":"0.00000025","completion":"0.000002"},"created":1766504997,"top_provider":{"context_length":262144,"max_completion_tokens":32768,"is_moderated":false}},{"id":"minimax/minimax-m2.1","name":"MiniMax: MiniMax M2.1","pricing":{"prompt":"0.00000027","completion":"0.0000011"},"created":1766454997,"top_provider":{"context_length":196608,"max_completion_tokens":196608,"is_moderated":false}},{"id":"z-ai/glm-4.7","name":"Z.AI: GLM 4.7","pricing":{"prompt":"0.0000004","completion":"0.0000015"},"created":1766378014,"top_provider":{"context_length":202752,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-3-flash-preview","name":"Google: Gemini 3 Flash Preview","pricing":{"prompt":"0.0000005","completion":"0.000003","image":"0.0000005","audio":"0.000001","internal_reasoning":"0.000003","input_cache_read":"0.00000005","input_cache_write":"0.00000008333333333333334"},"created":1765987078,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"mistralai/mistral-small-creative","name":"Mistral: Mistral Small Creative","pricing":{"prompt":"0.0000001","completion":"0.0000003"},"created":1765908653,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"allenai/olmo-3.1-32b-think","name":"AllenAI: Olmo 3.1 32B Think","pricing":{"prompt":"0.00000015","completion":"0.0000005"},"created":1765907719,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"xiaomi/mimo-v2-flash","name":"Xiaomi: MiMo-V2-Flash","pricing":{"prompt":"0.00000009","completion":"0.00000029"},"created":1765731308,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/nemotron-3-nano-30b-a3b:free","name":"NVIDIA: Nemotron 3 Nano 30B A3B (free)","pricing":{"prompt":"0","completion":"0"},"created":1765731275,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/nemotron-3-nano-30b-a3b","name":"NVIDIA: Nemotron 3 Nano 30B A3B","pricing":{"prompt":"0.00000005","completion":"0.0000002"},"created":1765731275,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-5.2-chat","name":"OpenAI: GPT-5.2 Chat","pricing":{"prompt":"0.00000175","completion":"0.000014","web_search":"0.01","input_cache_read":"0.000000175"},"created":1765389783,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-5.2-pro","name":"OpenAI: GPT-5.2 Pro","pricing":{"prompt":"0.000021","completion":"0.000168","web_search":"0.01"},"created":1765389780,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5.2","name":"OpenAI: GPT-5.2","pricing":{"prompt":"0.00000175","completion":"0.000014","web_search":"0.01","input_cache_read":"0.000000175"},"created":1765389775,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"mistralai/devstral-2512","name":"Mistral: Devstral 2 2512","pricing":{"prompt":"0.00000005","completion":"0.00000022"},"created":1765285419,"top_provider":{"context_length":262144,"max_completion_tokens":65536,"is_moderated":false}},{"id":"relace/relace-search","name":"Relace: Relace Search","pricing":{"prompt":"0.000001","completion":"0.000003"},"created":1765213560,"top_provider":{"context_length":256000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"z-ai/glm-4.6v","name":"Z.AI: GLM 4.6V","pricing":{"prompt":"0.0000003","completion":"0.0000009"},"created":1765207462,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"nex-agi/deepseek-v3.1-nex-n1","name":"Nex AGI: DeepSeek V3.1 Nex N1","pricing":{"prompt":"0.00000027","completion":"0.000001"},"created":1765204393,"top_provider":{"context_length":131072,"max_completion_tokens":163840,"is_moderated":false}},{"id":"essentialai/rnj-1-instruct","name":"EssentialAI: Rnj 1 Instruct","pricing":{"prompt":"0.00000015","completion":"0.00000015"},"created":1765094847,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openrouter/bodybuilder","name":"Body Builder (beta)","pricing":{"prompt":"-1","completion":"-1"},"created":1764903653,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-5.1-codex-max","name":"OpenAI: GPT-5.1-Codex-Max","pricing":{"prompt":"0.00000125","completion":"0.00001","web_search":"0.01","input_cache_read":"0.000000125"},"created":1764878934,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"amazon/nova-2-lite-v1","name":"Amazon: Nova 2 Lite","pricing":{"prompt":"0.0000003","completion":"0.0000025"},"created":1764696672,"top_provider":{"context_length":1000000,"max_completion_tokens":65535,"is_moderated":true}},{"id":"mistralai/ministral-14b-2512","name":"Mistral: Ministral 3 14B 2512","pricing":{"prompt":"0.0000002","completion":"0.0000002"},"created":1764681735,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-8b-2512","name":"Mistral: Ministral 3 8B 2512","pricing":{"prompt":"0.00000015","completion":"0.00000015"},"created":1764681654,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-3b-2512","name":"Mistral: Ministral 3 3B 2512","pricing":{"prompt":"0.0000001","completion":"0.0000001"},"created":1764681560,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-large-2512","name":"Mistral: Mistral Large 3 2512","pricing":{"prompt":"0.0000005","completion":"0.0000015"},"created":1764624472,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"arcee-ai/trinity-mini:free","name":"Arcee AI: Trinity Mini (free)","pricing":{"prompt":"0","completion":"0"},"created":1764601720,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"arcee-ai/trinity-mini","name":"Arcee AI: Trinity Mini","pricing":{"prompt":"0.000000045","completion":"0.00000015"},"created":1764601720,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"deepseek/deepseek-v3.2-speciale","name":"DeepSeek: DeepSeek V3.2 Speciale","pricing":{"prompt":"0.00000027","completion":"0.00000041"},"created":1764594837,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepseek/deepseek-v3.2","name":"DeepSeek: DeepSeek V3.2","pricing":{"prompt":"0.00000025","completion":"0.00000038"},"created":1764594642,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"prime-intellect/intellect-3","name":"Prime Intellect: INTELLECT-3","pricing":{"prompt":"0.0000002","completion":"0.0000011"},"created":1764212534,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"tngtech/tng-r1t-chimera:free","name":"TNG: R1T Chimera (free)","pricing":{"prompt":"0","completion":"0"},"created":1764184161,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"tngtech/tng-r1t-chimera","name":"TNG: R1T Chimera","pricing":{"prompt":"0.00000025","completion":"0.00000085"},"created":1764184161,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"anthropic/claude-opus-4.5","name":"Anthropic: Claude Opus 4.5","pricing":{"prompt":"0.000005","completion":"0.000025","web_search":"0.01","input_cache_read":"0.0000005","input_cache_write":"0.00000625"},"created":1764010580,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"allenai/olmo-3-32b-think","name":"AllenAI: Olmo 3 32B Think","pricing":{"prompt":"0.00000015","completion":"0.0000005"},"created":1763758276,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"allenai/olmo-3-7b-instruct","name":"AllenAI: Olmo 3 7B Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000002"},"created":1763758273,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"allenai/olmo-3-7b-think","name":"AllenAI: Olmo 3 7B Think","pricing":{"prompt":"0.00000012","completion":"0.0000002"},"created":1763758270,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"google/gemini-3-pro-image-preview","name":"Google: Nano Banana Pro (Gemini 3 Pro Image Preview)","pricing":{"prompt":"0.000002","completion":"0.000012","image":"0.000002","audio":"0.000002","internal_reasoning":"0.000012","input_cache_read":"0.0000002","input_cache_write":"0.000000375"},"created":1763653797,"top_provider":{"context_length":65536,"max_completion_tokens":32768,"is_moderated":false}},{"id":"x-ai/grok-4.1-fast","name":"xAI: Grok 4.1 Fast","pricing":{"prompt":"0.0000002","completion":"0.0000005","web_search":"0.005","input_cache_read":"0.00000005"},"created":1763587502,"top_provider":{"context_length":2000000,"max_completion_tokens":30000,"is_moderated":false}},{"id":"google/gemini-3-pro-preview","name":"Google: Gemini 3 Pro Preview","pricing":{"prompt":"0.000002","completion":"0.000012","image":"0.000002","audio":"0.000002","internal_reasoning":"0.000012","input_cache_read":"0.0000002","input_cache_write":"0.000000375"},"created":1763474668,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepcogito/cogito-v2.1-671b","name":"Deep Cogito: Cogito v2.1 671B","pricing":{"prompt":"0.00000125","completion":"0.00000125"},"created":1763071233,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-5.1","name":"OpenAI: GPT-5.1","pricing":{"prompt":"0.00000125","completion":"0.00001","web_search":"0.01","input_cache_read":"0.000000125"},"created":1763060305,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5.1-chat","name":"OpenAI: GPT-5.1 Chat","pricing":{"prompt":"0.00000125","completion":"0.00001","web_search":"0.01","input_cache_read":"0.000000125"},"created":1763060302,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-5.1-codex","name":"OpenAI: GPT-5.1-Codex","pricing":{"prompt":"0.00000125","completion":"0.00001","input_cache_read":"0.000000125"},"created":1763060298,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5.1-codex-mini","name":"OpenAI: GPT-5.1-Codex-Mini","pricing":{"prompt":"0.00000025","completion":"0.000002","input_cache_read":"0.000000025"},"created":1763057820,"top_provider":{"context_length":400000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"kwaipilot/kat-coder-pro","name":"Kwaipilot: KAT-Coder-Pro V1","pricing":{"prompt":"0.000000207","completion":"0.000000828","input_cache_read":"0.0000000414"},"created":1762745912,"top_provider":{"context_length":256000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"moonshotai/kimi-k2-thinking","name":"MoonshotAI: Kimi K2 Thinking","pricing":{"prompt":"0.0000004","completion":"0.00000175"},"created":1762440622,"top_provider":{"context_length":262144,"max_completion_tokens":65535,"is_moderated":false}},{"id":"amazon/nova-premier-v1","name":"Amazon: Nova Premier 1.0","pricing":{"prompt":"0.0000025","completion":"0.0000125","input_cache_read":"0.000000625"},"created":1761950332,"top_provider":{"context_length":1000000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"perplexity/sonar-pro-search","name":"Perplexity: Sonar Pro Search","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0.018","image":"0","web_search":"0","internal_reasoning":"0"},"created":1761854366,"top_provider":{"context_length":200000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"mistralai/voxtral-small-24b-2507","name":"Mistral: Voxtral Small 24B 2507","pricing":{"prompt":"0.0000001","completion":"0.0000003","audio":"0.0001"},"created":1761835144,"top_provider":{"context_length":32000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-oss-safeguard-20b","name":"OpenAI: gpt-oss-safeguard-20b","pricing":{"prompt":"0.000000075","completion":"0.0000003","input_cache_read":"0.000000037"},"created":1761752836,"top_provider":{"context_length":131072,"max_completion_tokens":65536,"is_moderated":false}},{"id":"nvidia/nemotron-nano-12b-v2-vl:free","name":"NVIDIA: Nemotron Nano 12B 2 VL (free)","pricing":{"prompt":"0","completion":"0"},"created":1761675565,"top_provider":{"context_length":128000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"nvidia/nemotron-nano-12b-v2-vl","name":"NVIDIA: Nemotron Nano 12B 2 VL","pricing":{"prompt":"0.0000002","completion":"0.0000006"},"created":1761675565,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"minimax/minimax-m2","name":"MiniMax: MiniMax M2","pricing":{"prompt":"0.000000255","completion":"0.000001","input_cache_read":"0.00000003"},"created":1761252093,"top_provider":{"context_length":196608,"max_completion_tokens":65536,"is_moderated":false}},{"id":"qwen/qwen3-vl-32b-instruct","name":"Qwen: Qwen3 VL 32B Instruct","pricing":{"prompt":"0.0000005","completion":"0.0000015"},"created":1761231332,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm2-8b-a1b","name":"LiquidAI: LFM2-8B-A1B","pricing":{"prompt":"0.00000001","completion":"0.00000002"},"created":1760970984,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-2.2-6b","name":"LiquidAI: LFM2-2.6B","pricing":{"prompt":"0.00000001","completion":"0.00000002"},"created":1760970889,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"ibm-granite/granite-4.0-h-micro","name":"IBM: Granite 4.0 Micro","pricing":{"prompt":"0.000000017","completion":"0.00000011"},"created":1760927695,"top_provider":{"context_length":131000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-llama-405b","name":"Deep Cogito: Cogito V2 Preview Llama 405B","pricing":{"prompt":"0.0000035","completion":"0.0000035"},"created":1760709933,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-5-image-mini","name":"OpenAI: GPT-5 Image Mini","pricing":{"prompt":"0.0000025","completion":"0.000002","web_search":"0.01","input_cache_read":"0.00000025"},"created":1760624583,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"anthropic/claude-haiku-4.5","name":"Anthropic: Claude Haiku 4.5","pricing":{"prompt":"0.000001","completion":"0.000005","web_search":"0.01","input_cache_read":"0.0000001","input_cache_write":"0.00000125"},"created":1760547638,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"qwen/qwen3-vl-8b-thinking","name":"Qwen: Qwen3 VL 8B Thinking","pricing":{"prompt":"0.00000018","completion":"0.0000021","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1760463746,"top_provider":{"context_length":256000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-vl-8b-instruct","name":"Qwen: Qwen3 VL 8B Instruct","pricing":{"prompt":"0.00000008","completion":"0.0000005"},"created":1760463308,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"openai/gpt-5-image","name":"OpenAI: GPT-5 Image","pricing":{"prompt":"0.00001","completion":"0.00001","web_search":"0.01","input_cache_read":"0.00000125"},"created":1760447986,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/o3-deep-research","name":"OpenAI: o3 Deep Research","pricing":{"prompt":"0.00001","completion":"0.00004","web_search":"0.01","input_cache_read":"0.0000025"},"created":1760129661,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o4-mini-deep-research","name":"OpenAI: o4 Mini Deep Research","pricing":{"prompt":"0.000002","completion":"0.000008","web_search":"0.01","input_cache_read":"0.0000005"},"created":1760129642,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"nvidia/llama-3.3-nemotron-super-49b-v1.5","name":"NVIDIA: Llama 3.3 Nemotron Super 49B V1.5","pricing":{"prompt":"0.0000001","completion":"0.0000004"},"created":1760101395,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"baidu/ernie-4.5-21b-a3b-thinking","name":"Baidu: ERNIE 4.5 21B A3B Thinking","pricing":{"prompt":"0.00000007","completion":"0.00000028"},"created":1760048887,"top_provider":{"context_length":131072,"max_completion_tokens":65536,"is_moderated":false}},{"id":"google/gemini-2.5-flash-image","name":"Google: Gemini 2.5 Flash Image (Nano Banana)","pricing":{"prompt":"0.0000003","completion":"0.0000025","image":"0.0000003","audio":"0.000001","internal_reasoning":"0.0000025","input_cache_read":"0.00000003","input_cache_write":"0.00000008333333333333334"},"created":1759870431,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-vl-30b-a3b-thinking","name":"Qwen: Qwen3 VL 30B A3B Thinking","pricing":{"prompt":"0.0000002","completion":"0.000001"},"created":1759794479,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-vl-30b-a3b-instruct","name":"Qwen: Qwen3 VL 30B A3B Instruct","pricing":{"prompt":"0.00000015","completion":"0.0000006","input_cache_read":"0.000000075"},"created":1759794476,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-5-pro","name":"OpenAI: GPT-5 Pro","pricing":{"prompt":"0.000015","completion":"0.00012","web_search":"0.01"},"created":1759776663,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"z-ai/glm-4.6","name":"Z.AI: GLM 4.6","pricing":{"prompt":"0.00000035","completion":"0.0000015"},"created":1759235576,"top_provider":{"context_length":202752,"max_completion_tokens":65536,"is_moderated":false}},{"id":"z-ai/glm-4.6:exacto","name":"Z.AI: GLM 4.6 (exacto)","pricing":{"prompt":"0.00000044","completion":"0.00000176","input_cache_read":"0.00000011"},"created":1759235576,"top_provider":{"context_length":204800,"max_completion_tokens":131072,"is_moderated":false}},{"id":"anthropic/claude-sonnet-4.5","name":"Anthropic: Claude Sonnet 4.5","pricing":{"prompt":"0.000003","completion":"0.000015","web_search":"0.01","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1759161676,"top_provider":{"context_length":1000000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"deepseek/deepseek-v3.2-exp","name":"DeepSeek: DeepSeek V3.2 Exp","pricing":{"prompt":"0.00000027","completion":"0.00000041"},"created":1759150481,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"thedrummer/cydonia-24b-v4.1","name":"TheDrummer: Cydonia 24B V4.1","pricing":{"prompt":"0.0000003","completion":"0.0000005"},"created":1758931878,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"relace/relace-apply-3","name":"Relace: Relace Apply 3","pricing":{"prompt":"0.00000085","completion":"0.00000125"},"created":1758891572,"top_provider":{"context_length":256000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"google/gemini-2.5-flash-preview-09-2025","name":"Google: Gemini 2.5 Flash Preview 09-2025","pricing":{"prompt":"0.0000003","completion":"0.0000025","image":"0.0000003","audio":"0.000001","internal_reasoning":"0.0000025","input_cache_read":"0.00000003","input_cache_write":"0.00000008333333333333334"},"created":1758820178,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite-preview-09-2025","name":"Google: Gemini 2.5 Flash Lite Preview 09-2025","pricing":{"prompt":"0.0000001","completion":"0.0000004","image":"0.0000001","audio":"0.0000003","internal_reasoning":"0.0000004","input_cache_read":"0.00000001","input_cache_write":"0.00000008333333333333334"},"created":1758819686,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen3-vl-235b-a22b-thinking","name":"Qwen: Qwen3 VL 235B A22B Thinking","pricing":{"prompt":"0.00000045","completion":"0.0000035"},"created":1758668690,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"qwen/qwen3-vl-235b-a22b-instruct","name":"Qwen: Qwen3 VL 235B A22B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000012"},"created":1758668687,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-max","name":"Qwen: Qwen3 Max","pricing":{"prompt":"0.0000012","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000024"},"created":1758662808,"top_provider":{"context_length":256000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-coder-plus","name":"Qwen: Qwen3 Coder Plus","pricing":{"prompt":"0.000001","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000001"},"created":1758662707,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":false}},{"id":"openai/gpt-5-codex","name":"OpenAI: GPT-5 Codex","pricing":{"prompt":"0.00000125","completion":"0.00001","input_cache_read":"0.000000125"},"created":1758643403,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"deepseek/deepseek-v3.1-terminus:exacto","name":"DeepSeek: DeepSeek V3.1 Terminus (exacto)","pricing":{"prompt":"0.00000021","completion":"0.00000079","input_cache_read":"0.000000168"},"created":1758548275,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-v3.1-terminus","name":"DeepSeek: DeepSeek V3.1 Terminus","pricing":{"prompt":"0.00000021","completion":"0.00000079","input_cache_read":"0.000000168"},"created":1758548275,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-4-fast","name":"xAI: Grok 4 Fast","pricing":{"prompt":"0.0000002","completion":"0.0000005","web_search":"0.005","input_cache_read":"0.00000005"},"created":1758240090,"top_provider":{"context_length":2000000,"max_completion_tokens":30000,"is_moderated":false}},{"id":"alibaba/tongyi-deepresearch-30b-a3b","name":"Tongyi DeepResearch 30B A3B","pricing":{"prompt":"0.00000009","completion":"0.0000004"},"created":1758210804,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen3-coder-flash","name":"Qwen: Qwen3 Coder Flash","pricing":{"prompt":"0.0000003","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008"},"created":1758115536,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":false}},{"id":"opengvlab/internvl3-78b","name":"OpenGVLab: InternVL3 78B","pricing":{"prompt":"0.0000001","completion":"0.00000039"},"created":1757962555,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-next-80b-a3b-thinking","name":"Qwen: Qwen3 Next 80B A3B Thinking","pricing":{"prompt":"0.00000015","completion":"0.0000012"},"created":1757612284,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-next-80b-a3b-instruct:free","name":"Qwen: Qwen3 Next 80B A3B Instruct (free)","pricing":{"prompt":"0","completion":"0"},"created":1757612213,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-next-80b-a3b-instruct","name":"Qwen: Qwen3 Next 80B A3B Instruct","pricing":{"prompt":"0.00000009","completion":"0.0000011"},"created":1757612213,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"meituan/longcat-flash-chat","name":"Meituan: LongCat Flash Chat","pricing":{"prompt":"0.0000002","completion":"0.0000008","input_cache_read":"0.0000002"},"created":1757427658,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-plus-2025-07-28","name":"Qwen: Qwen Plus 0728","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757347599,"top_provider":{"context_length":1000000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-plus-2025-07-28:thinking","name":"Qwen: Qwen Plus 0728 (thinking)","pricing":{"prompt":"0.0000004","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757347599,"top_provider":{"context_length":1000000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"nvidia/nemotron-nano-9b-v2:free","name":"NVIDIA: Nemotron Nano 9B V2 (free)","pricing":{"prompt":"0","completion":"0"},"created":1757106807,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/nemotron-nano-9b-v2","name":"NVIDIA: Nemotron Nano 9B V2","pricing":{"prompt":"0.00000004","completion":"0.00000016"},"created":1757106807,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2-0905","name":"MoonshotAI: Kimi K2 0905","pricing":{"prompt":"0.00000039","completion":"0.0000019"},"created":1757021147,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"moonshotai/kimi-k2-0905:exacto","name":"MoonshotAI: Kimi K2 0905 (exacto)","pricing":{"prompt":"0.0000006","completion":"0.0000025"},"created":1757021147,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-llama-70b","name":"Deep Cogito: Cogito V2 Preview Llama 70B","pricing":{"prompt":"0.00000088","completion":"0.00000088"},"created":1756831784,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-llama-109b-moe","name":"Cogito V2 Preview Llama 109B","pricing":{"prompt":"0.00000018","completion":"0.00000059"},"created":1756831568,"top_provider":{"context_length":32767,"max_completion_tokens":null,"is_moderated":false}},{"id":"stepfun-ai/step3","name":"StepFun: Step3","pricing":{"prompt":"0.00000057","completion":"0.00000142"},"created":1756415375,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-thinking-2507","name":"Qwen: Qwen3 30B A3B Thinking 2507","pricing":{"prompt":"0.000000051","completion":"0.00000034"},"created":1756399192,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-code-fast-1","name":"xAI: Grok Code Fast 1","pricing":{"prompt":"0.0000002","completion":"0.0000015","web_search":"0.005","input_cache_read":"0.00000002"},"created":1756238927,"top_provider":{"context_length":256000,"max_completion_tokens":10000,"is_moderated":false}},{"id":"nousresearch/hermes-4-70b","name":"Nous: Hermes 4 70B","pricing":{"prompt":"0.00000011","completion":"0.00000038"},"created":1756236182,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"nousresearch/hermes-4-405b","name":"Nous: Hermes 4 405B","pricing":{"prompt":"0.000001","completion":"0.000003"},"created":1756235463,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3.1","name":"DeepSeek: DeepSeek V3.1","pricing":{"prompt":"0.00000015","completion":"0.00000075"},"created":1755779628,"top_provider":{"context_length":32768,"max_completion_tokens":7168,"is_moderated":false}},{"id":"openai/gpt-4o-audio-preview","name":"OpenAI: GPT-4o Audio","pricing":{"prompt":"0.0000025","completion":"0.00001","audio":"0.00004"},"created":1755233061,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-medium-3.1","name":"Mistral: Mistral Medium 3.1","pricing":{"prompt":"0.0000004","completion":"0.000002"},"created":1755095639,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"baidu/ernie-4.5-21b-a3b","name":"Baidu: ERNIE 4.5 21B A3B","pricing":{"prompt":"0.00000007","completion":"0.00000028"},"created":1755034167,"top_provider":{"context_length":120000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-28b-a3b","name":"Baidu: ERNIE 4.5 VL 28B A3B","pricing":{"prompt":"0.00000014","completion":"0.00000056"},"created":1755032836,"top_provider":{"context_length":30000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"z-ai/glm-4.5v","name":"Z.AI: GLM 4.5V","pricing":{"prompt":"0.0000006","completion":"0.0000018","input_cache_read":"0.00000011"},"created":1754922288,"top_provider":{"context_length":65536,"max_completion_tokens":16384,"is_moderated":false}},{"id":"ai21/jamba-mini-1.7","name":"AI21: Jamba Mini 1.7","pricing":{"prompt":"0.0000002","completion":"0.0000004"},"created":1754670601,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"ai21/jamba-large-1.7","name":"AI21: Jamba Large 1.7","pricing":{"prompt":"0.000002","completion":"0.000008"},"created":1754669020,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-5-chat","name":"OpenAI: GPT-5 Chat","pricing":{"prompt":"0.00000125","completion":"0.00001","web_search":"0.01","input_cache_read":"0.000000125"},"created":1754587837,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-5","name":"OpenAI: GPT-5","pricing":{"prompt":"0.00000125","completion":"0.00001","web_search":"0.01","input_cache_read":"0.000000125"},"created":1754587413,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-mini","name":"OpenAI: GPT-5 Mini","pricing":{"prompt":"0.00000025","completion":"0.000002","web_search":"0.01","input_cache_read":"0.000000025"},"created":1754587407,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-nano","name":"OpenAI: GPT-5 Nano","pricing":{"prompt":"0.00000005","completion":"0.0000004","web_search":"0.01","input_cache_read":"0.000000005"},"created":1754587402,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-oss-120b:free","name":"OpenAI: gpt-oss-120b (free)","pricing":{"prompt":"0","completion":"0"},"created":1754414231,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":true}},{"id":"openai/gpt-oss-120b","name":"OpenAI: gpt-oss-120b","pricing":{"prompt":"0.000000039","completion":"0.00000019"},"created":1754414231,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-oss-120b:exacto","name":"OpenAI: gpt-oss-120b (exacto)","pricing":{"prompt":"0.000000039","completion":"0.00000019"},"created":1754414231,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-oss-20b:free","name":"OpenAI: gpt-oss-20b (free)","pricing":{"prompt":"0","completion":"0"},"created":1754414229,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":true}},{"id":"openai/gpt-oss-20b","name":"OpenAI: gpt-oss-20b","pricing":{"prompt":"0.00000002","completion":"0.0000001"},"created":1754414229,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"anthropic/claude-opus-4.1","name":"Anthropic: Claude Opus 4.1","pricing":{"prompt":"0.000015","completion":"0.000075","web_search":"0.01","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1754411591,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"mistralai/codestral-2508","name":"Mistral: Codestral 2508","pricing":{"prompt":"0.0000003","completion":"0.0000009"},"created":1754079630,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder-30b-a3b-instruct","name":"Qwen: Qwen3 Coder 30B A3B Instruct","pricing":{"prompt":"0.00000007","completion":"0.00000027"},"created":1753972379,"top_provider":{"context_length":160000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-instruct-2507","name":"Qwen: Qwen3 30B A3B Instruct 2507","pricing":{"prompt":"0.00000008","completion":"0.00000033"},"created":1753806965,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"z-ai/glm-4.5","name":"Z.AI: GLM 4.5","pricing":{"prompt":"0.00000035","completion":"0.00000155"},"created":1753471347,"top_provider":{"context_length":131072,"max_completion_tokens":65536,"is_moderated":false}},{"id":"z-ai/glm-4.5-air:free","name":"Z.AI: GLM 4.5 Air (free)","pricing":{"prompt":"0","completion":"0"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":96000,"is_moderated":false}},{"id":"z-ai/glm-4.5-air","name":"Z.AI: GLM 4.5 Air","pricing":{"prompt":"0.00000005","completion":"0.00000022"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-thinking-2507","name":"Qwen: Qwen3 235B A22B Thinking 2507","pricing":{"prompt":"0.00000011","completion":"0.0000006"},"created":1753449557,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"z-ai/glm-4-32b","name":"Z.AI: GLM 4 32B ","pricing":{"prompt":"0.0000001","completion":"0.0000001"},"created":1753376617,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder:free","name":"Qwen: Qwen3 Coder 480B A35B (free)","pricing":{"prompt":"0","completion":"0"},"created":1753230546,"top_provider":{"context_length":262000,"max_completion_tokens":262000,"is_moderated":false}},{"id":"qwen/qwen3-coder","name":"Qwen: Qwen3 Coder 480B A35B","pricing":{"prompt":"0.00000022","completion":"0.00000095"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"qwen/qwen3-coder:exacto","name":"Qwen: Qwen3 Coder 480B A35B (exacto)","pricing":{"prompt":"0.00000022","completion":"0.0000018","input_cache_read":"0.000000022"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":65536,"is_moderated":false}},{"id":"bytedance/ui-tars-1.5-7b","name":"ByteDance: UI-TARS 7B ","pricing":{"prompt":"0.0000001","completion":"0.0000002"},"created":1753205056,"top_provider":{"context_length":128000,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite","name":"Google: Gemini 2.5 Flash Lite","pricing":{"prompt":"0.0000001","completion":"0.0000004","image":"0.0000001","audio":"0.0000003","internal_reasoning":"0.0000004","input_cache_read":"0.00000001","input_cache_write":"0.00000008333333333333334"},"created":1753200276,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-2507","name":"Qwen: Qwen3 235B A22B Instruct 2507","pricing":{"prompt":"0.000000071","completion":"0.000000463"},"created":1753119555,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"switchpoint/router","name":"Switchpoint Router","pricing":{"prompt":"0.00000085","completion":"0.0000034"},"created":1752272899,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2","name":"MoonshotAI: Kimi K2 0711","pricing":{"prompt":"0.0000005","completion":"0.0000024"},"created":1752263252,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-medium","name":"Mistral: Devstral Medium","pricing":{"prompt":"0.0000004","completion":"0.000002"},"created":1752161321,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small","name":"Mistral: Devstral Small 1.1","pricing":{"prompt":"0.0000001","completion":"0.0000003"},"created":1752160751,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mistral-24b-venice-edition:free","name":"Venice: Uncensored (free)","pricing":{"prompt":"0","completion":"0"},"created":1752094966,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-4","name":"xAI: Grok 4","pricing":{"prompt":"0.000003","completion":"0.000015","web_search":"0.005","input_cache_read":"0.00000075"},"created":1752087689,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3n-e2b-it:free","name":"Google: Gemma 3n 2B (free)","pricing":{"prompt":"0","completion":"0"},"created":1752074904,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"tencent/hunyuan-a13b-instruct","name":"Tencent: Hunyuan A13B Instruct","pricing":{"prompt":"0.00000014","completion":"0.00000057"},"created":1751987664,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"tngtech/deepseek-r1t2-chimera:free","name":"TNG: DeepSeek R1T2 Chimera (free)","pricing":{"prompt":"0","completion":"0"},"created":1751986985,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t2-chimera","name":"TNG: DeepSeek R1T2 Chimera","pricing":{"prompt":"0.00000025","completion":"0.00000085"},"created":1751986985,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"morph/morph-v3-large","name":"Morph: Morph V3 Large","pricing":{"prompt":"0.0000009","completion":"0.0000019"},"created":1751910858,"top_provider":{"context_length":262144,"max_completion_tokens":131072,"is_moderated":false}},{"id":"morph/morph-v3-fast","name":"Morph: Morph V3 Fast","pricing":{"prompt":"0.0000008","completion":"0.0000012"},"created":1751910002,"top_provider":{"context_length":81920,"max_completion_tokens":38000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-424b-a47b","name":"Baidu: ERNIE 4.5 VL 424B A47B ","pricing":{"prompt":"0.00000042","completion":"0.00000125"},"created":1751300903,"top_provider":{"context_length":123000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"baidu/ernie-4.5-300b-a47b","name":"Baidu: ERNIE 4.5 300B A47B ","pricing":{"prompt":"0.00000028","completion":"0.0000011"},"created":1751300139,"top_provider":{"context_length":123000,"max_completion_tokens":12000,"is_moderated":false}},{"id":"inception/mercury","name":"Inception: Mercury","pricing":{"prompt":"0.00000025","completion":"0.000001"},"created":1750973026,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct","name":"Mistral: Mistral Small 3.2 24B","pricing":{"prompt":"0.00000006","completion":"0.00000018"},"created":1750443016,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"minimax/minimax-m1","name":"MiniMax: MiniMax M1","pricing":{"prompt":"0.0000004","completion":"0.0000022"},"created":1750200414,"top_provider":{"context_length":1000000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-flash","name":"Google: Gemini 2.5 Flash","pricing":{"prompt":"0.0000003","completion":"0.0000025","image":"0.0000003","audio":"0.000001","internal_reasoning":"0.0000025","input_cache_read":"0.00000003","input_cache_write":"0.00000008333333333333334"},"created":1750172488,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-pro","name":"Google: Gemini 2.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.00001","image":"0.00000125","audio":"0.00000125","internal_reasoning":"0.00001","input_cache_read":"0.000000125","input_cache_write":"0.000000375"},"created":1750169544,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b","name":"MoonshotAI: Kimi Dev 72B","pricing":{"prompt":"0.00000029","completion":"0.00000115"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/o3-pro","name":"OpenAI: o3 Pro","pricing":{"prompt":"0.00002","completion":"0.00008","web_search":"0.01"},"created":1749598352,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"x-ai/grok-3-mini","name":"xAI: Grok 3 Mini","pricing":{"prompt":"0.0000003","completion":"0.0000005","web_search":"0.005","input_cache_read":"0.000000075"},"created":1749583245,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3","name":"xAI: Grok 3","pricing":{"prompt":"0.000003","completion":"0.000015","web_search":"0.005","input_cache_read":"0.00000075"},"created":1749582908,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview","name":"Google: Gemini 2.5 Pro Preview 06-05","pricing":{"prompt":"0.00000125","completion":"0.00001","image":"0.00000125","audio":"0.00000125","internal_reasoning":"0.00001","input_cache_read":"0.000000125","input_cache_write":"0.000000375"},"created":1749137257,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528:free","name":"DeepSeek: R1 0528 (free)","pricing":{"prompt":"0","completion":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528","name":"DeepSeek: R1 0528","pricing":{"prompt":"0.0000004","completion":"0.00000175"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"anthropic/claude-opus-4","name":"Anthropic: Claude Opus 4","pricing":{"prompt":"0.000015","completion":"0.000075","web_search":"0.01","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1747931245,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"anthropic/claude-sonnet-4","name":"Anthropic: Claude Sonnet 4","pricing":{"prompt":"0.000003","completion":"0.000015","web_search":"0.01","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1747930371,"top_provider":{"context_length":1000000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it:free","name":"Google: Gemma 3n 4B (free)","pricing":{"prompt":"0","completion":"0"},"created":1747776824,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it","name":"Google: Gemma 3n 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004"},"created":1747776824,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/deephermes-3-mistral-24b-preview","name":"Nous: DeepHermes 3 Mistral 24B Preview","pricing":{"prompt":"0.00000002","completion":"0.0000001"},"created":1746830904,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"mistralai/mistral-medium-3","name":"Mistral: Mistral Medium 3","pricing":{"prompt":"0.0000004","completion":"0.000002"},"created":1746627341,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview-05-06","name":"Google: Gemini 2.5 Pro Preview 05-06","pricing":{"prompt":"0.00000125","completion":"0.00001","image":"0.00000125","audio":"0.00000125","internal_reasoning":"0.00001","input_cache_read":"0.000000125","input_cache_write":"0.000000375"},"created":1746578513,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"arcee-ai/spotlight","name":"Arcee AI: Spotlight","pricing":{"prompt":"0.00000018","completion":"0.00000018"},"created":1746481552,"top_provider":{"context_length":131072,"max_completion_tokens":65537,"is_moderated":false}},{"id":"arcee-ai/maestro-reasoning","name":"Arcee AI: Maestro Reasoning","pricing":{"prompt":"0.0000009","completion":"0.0000033"},"created":1746481269,"top_provider":{"context_length":131072,"max_completion_tokens":32000,"is_moderated":false}},{"id":"arcee-ai/virtuoso-large","name":"Arcee AI: Virtuoso Large","pricing":{"prompt":"0.00000075","completion":"0.0000012"},"created":1746478885,"top_provider":{"context_length":131072,"max_completion_tokens":64000,"is_moderated":false}},{"id":"arcee-ai/coder-large","name":"Arcee AI: Coder Large","pricing":{"prompt":"0.0000005","completion":"0.0000008"},"created":1746478663,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"inception/mercury-coder","name":"Inception: Mercury Coder","pricing":{"prompt":"0.00000025","completion":"0.000001"},"created":1746033880,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen3-4b:free","name":"Qwen: Qwen3 4B (free)","pricing":{"prompt":"0","completion":"0"},"created":1746031104,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-4-12b","name":"Meta: Llama Guard 4 12B","pricing":{"prompt":"0.00000018","completion":"0.00000018"},"created":1745975193,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b","name":"Qwen: Qwen3 30B A3B","pricing":{"prompt":"0.00000006","completion":"0.00000022"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-8b","name":"Qwen: Qwen3 8B","pricing":{"prompt":"0.00000005","completion":"0.0000004","input_cache_read":"0.00000005"},"created":1745876632,"top_provider":{"context_length":32000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen3-14b","name":"Qwen: Qwen3 14B","pricing":{"prompt":"0.00000005","completion":"0.00000022"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-32b","name":"Qwen: Qwen3 32B","pricing":{"prompt":"0.00000008","completion":"0.00000024"},"created":1745875945,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b","name":"Qwen: Qwen3 235B A22B","pricing":{"prompt":"0.0000002","completion":"0.0000006"},"created":1745875757,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera:free","name":"TNG: DeepSeek R1T Chimera (free)","pricing":{"prompt":"0","completion":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera","name":"TNG: DeepSeek R1T Chimera","pricing":{"prompt":"0.0000003","completion":"0.0000012"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"openai/o4-mini-high","name":"OpenAI: o4 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","web_search":"0.01","input_cache_read":"0.000000275"},"created":1744824212,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o3","name":"OpenAI: o3","pricing":{"prompt":"0.000002","completion":"0.000008","web_search":"0.01","input_cache_read":"0.0000005"},"created":1744823457,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o4-mini","name":"OpenAI: o4 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","web_search":"0.01","input_cache_read":"0.000000275"},"created":1744820942,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"qwen/qwen2.5-coder-7b-instruct","name":"Qwen: Qwen2.5 Coder 7B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000009"},"created":1744734887,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4.1","name":"OpenAI: GPT-4.1","pricing":{"prompt":"0.000002","completion":"0.000008","web_search":"0.01","input_cache_read":"0.0000005"},"created":1744651385,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-mini","name":"OpenAI: GPT-4.1 Mini","pricing":{"prompt":"0.0000004","completion":"0.0000016","web_search":"0.01","input_cache_read":"0.0000001"},"created":1744651381,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-nano","name":"OpenAI: GPT-4.1 Nano","pricing":{"prompt":"0.0000001","completion":"0.0000004","web_search":"0.01","input_cache_read":"0.000000025"},"created":1744651369,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"eleutherai/llemma_7b","name":"EleutherAI: Llemma 7b","pricing":{"prompt":"0.0000008","completion":"0.0000012"},"created":1744643225,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"alfredpros/codellama-7b-instruct-solidity","name":"AlfredPros: CodeLLaMa 7B Instruct Solidity","pricing":{"prompt":"0.0000008","completion":"0.0000012"},"created":1744641874,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"x-ai/grok-3-mini-beta","name":"xAI: Grok 3 Mini Beta","pricing":{"prompt":"0.0000003","completion":"0.0000005","web_search":"0.005","input_cache_read":"0.000000075"},"created":1744240195,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-beta","name":"xAI: Grok 3 Beta","pricing":{"prompt":"0.000003","completion":"0.000015","web_search":"0.005","input_cache_read":"0.00000075"},"created":1744240068,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1","pricing":{"prompt":"0.0000006","completion":"0.0000018"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-maverick","name":"Meta: Llama 4 Maverick","pricing":{"prompt":"0.00000015","completion":"0.0000006"},"created":1743881822,"top_provider":{"context_length":1048576,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-4-scout","name":"Meta: Llama 4 Scout","pricing":{"prompt":"0.00000008","completion":"0.0000003"},"created":1743881519,"top_provider":{"context_length":327680,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct","name":"Qwen: Qwen2.5 VL 32B Instruct","pricing":{"prompt":"0.00000005","completion":"0.00000022"},"created":1742839838,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324","name":"DeepSeek: DeepSeek V3 0324","pricing":{"prompt":"0.00000019","completion":"0.00000087"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":65536,"is_moderated":false}},{"id":"openai/o1-pro","name":"OpenAI: o1-pro","pricing":{"prompt":"0.00015","completion":"0.0006"},"created":1742423211,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-3.1-24b-instruct:free","name":"Mistral: Mistral Small 3.1 24B (free)","pricing":{"prompt":"0","completion":"0"},"created":1742238937,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.1-24b-instruct","name":"Mistral: Mistral Small 3.1 24B","pricing":{"prompt":"0.00000003","completion":"0.00000011"},"created":1742238937,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"allenai/olmo-2-0325-32b-instruct","name":"AllenAI: Olmo 2 32B Instruct","pricing":{"prompt":"0.00000005","completion":"0.0000002"},"created":1741988556,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-4b-it:free","name":"Google: Gemma 3 4B (free)","pricing":{"prompt":"0","completion":"0"},"created":1741905510,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-4b-it","name":"Google: Gemma 3 4B","pricing":{"prompt":"0.00000001703012","completion":"0.0000000681536"},"created":1741905510,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-12b-it:free","name":"Google: Gemma 3 12B (free)","pricing":{"prompt":"0","completion":"0"},"created":1741902625,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-12b-it","name":"Google: Gemma 3 12B","pricing":{"prompt":"0.00000003","completion":"0.0000001"},"created":1741902625,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"cohere/command-a","name":"Cohere: Command A","pricing":{"prompt":"0.0000025","completion":"0.00001"},"created":1741894342,"top_provider":{"context_length":256000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"openai/gpt-4o-mini-search-preview","name":"OpenAI: GPT-4o-mini Search Preview","pricing":{"prompt":"0.00000015","completion":"0.0000006","web_search":"0.0275"},"created":1741818122,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-search-preview","name":"OpenAI: GPT-4o Search Preview","pricing":{"prompt":"0.0000025","completion":"0.00001","web_search":"0.035"},"created":1741817949,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-3-27b-it:free","name":"Google: Gemma 3 27B (free)","pricing":{"prompt":"0","completion":"0"},"created":1741756359,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-27b-it","name":"Google: Gemma 3 27B","pricing":{"prompt":"0.00000004","completion":"0.00000015"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":96000,"is_moderated":false}},{"id":"thedrummer/skyfall-36b-v2","name":"TheDrummer: Skyfall 36B V2","pricing":{"prompt":"0.00000055","completion":"0.0000008"},"created":1741636566,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"perplexity/sonar-reasoning-pro","name":"Perplexity: Sonar Reasoning Pro","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741313308,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-pro","name":"Perplexity: Sonar Pro","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741312423,"top_provider":{"context_length":200000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"perplexity/sonar-deep-research","name":"Perplexity: Sonar Deep Research","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0.000003"},"created":1741311246,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b","name":"Qwen: QwQ 32B","pricing":{"prompt":"0.00000015","completion":"0.0000004"},"created":1741208814,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"google/gemini-2.0-flash-lite-001","name":"Google: Gemini 2.0 Flash Lite","pricing":{"prompt":"0.000000075","completion":"0.0000003","image":"0.000000075","audio":"0.000000075","internal_reasoning":"0.0000003"},"created":1740506212,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet:thinking","name":"Anthropic: Claude 3.7 Sonnet (thinking)","pricing":{"prompt":"0.000003","completion":"0.000015","web_search":"0.01","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet","name":"Anthropic: Claude 3.7 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","web_search":"0.01","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"mistralai/mistral-saba","name":"Mistral: Saba","pricing":{"prompt":"0.0000002","completion":"0.0000006"},"created":1739803239,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-3-8b","name":"Llama Guard 3 8B","pricing":{"prompt":"0.00000002","completion":"0.00000006"},"created":1739401318,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o3-mini-high","name":"OpenAI: o3 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","input_cache_read":"0.00000055"},"created":1739372611,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"google/gemini-2.0-flash-001","name":"Google: Gemini 2.0 Flash","pricing":{"prompt":"0.0000001","completion":"0.0000004","image":"0.0000001","audio":"0.0000007","internal_reasoning":"0.0000004","input_cache_read":"0.000000025","input_cache_write":"0.00000008333333333333334"},"created":1738769413,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-vl-plus","name":"Qwen: Qwen VL Plus","pricing":{"prompt":"0.00000021","completion":"0.00000063","request":"0","image":"0.0002688","web_search":"0","internal_reasoning":"0"},"created":1738731255,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"aion-labs/aion-1.0","name":"AionLabs: Aion-1.0","pricing":{"prompt":"0.000004","completion":"0.000008"},"created":1738697557,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-1.0-mini","name":"AionLabs: Aion-1.0-Mini","pricing":{"prompt":"0.0000007","completion":"0.0000014"},"created":1738697107,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-rp-llama-3.1-8b","name":"AionLabs: Aion-RP 1.0 (8B)","pricing":{"prompt":"0.0000008","completion":"0.0000016"},"created":1738696718,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-vl-max","name":"Qwen: Qwen VL Max","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.001024","web_search":"0","internal_reasoning":"0"},"created":1738434304,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-turbo","name":"Qwen: Qwen-Turbo","pricing":{"prompt":"0.00000005","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1738410974,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct","name":"Qwen: Qwen2.5 VL 72B Instruct","pricing":{"prompt":"0.00000015","completion":"0.0000006"},"created":1738410311,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-plus","name":"Qwen: Qwen-Plus","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000016"},"created":1738409840,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-max","name":"Qwen: Qwen-Max ","pricing":{"prompt":"0.0000016","completion":"0.0000064","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000064"},"created":1738402289,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/o3-mini","name":"OpenAI: o3 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","input_cache_read":"0.00000055"},"created":1738351721,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-24b-instruct-2501","name":"Mistral: Mistral Small 3","pricing":{"prompt":"0.00000003","completion":"0.00000011"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-32b","name":"DeepSeek: R1 Distill Qwen 32B","pricing":{"prompt":"0.00000029","completion":"0.00000029"},"created":1738194830,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"perplexity/sonar","name":"Perplexity: Sonar","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738013808,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b","name":"DeepSeek: R1 Distill Llama 70B","pricing":{"prompt":"0.00000003","completion":"0.00000011"},"created":1737663169,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"deepseek/deepseek-r1","name":"DeepSeek: R1","pricing":{"prompt":"0.0000007","completion":"0.0000025"},"created":1737381095,"top_provider":{"context_length":64000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"minimax/minimax-01","name":"MiniMax: MiniMax-01","pricing":{"prompt":"0.0000002","completion":"0.0000011"},"created":1736915462,"top_provider":{"context_length":1000192,"max_completion_tokens":1000192,"is_moderated":false}},{"id":"microsoft/phi-4","name":"Microsoft: Phi 4","pricing":{"prompt":"0.00000006","completion":"0.00000014"},"created":1736489872,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"sao10k/l3.1-70b-hanami-x1","name":"Sao10K: Llama 3.1 70B Hanami x1","pricing":{"prompt":"0.000003","completion":"0.000003"},"created":1736302854,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat","name":"DeepSeek: DeepSeek V3","pricing":{"prompt":"0.0000003","completion":"0.0000012"},"created":1735241320,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"sao10k/l3.3-euryale-70b","name":"Sao10K: Llama 3.3 Euryale 70B","pricing":{"prompt":"0.00000065","completion":"0.00000075"},"created":1734535928,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/o1","name":"OpenAI: o1","pricing":{"prompt":"0.000015","completion":"0.00006","input_cache_read":"0.0000075"},"created":1734459999,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"cohere/command-r7b-12-2024","name":"Cohere: Command R7B (12-2024)","pricing":{"prompt":"0.0000000375","completion":"0.00000015"},"created":1734158152,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"meta-llama/llama-3.3-70b-instruct:free","name":"Meta: Llama 3.3 70B Instruct (free)","pricing":{"prompt":"0","completion":"0"},"created":1733506137,"top_provider":{"context_length":128000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"meta-llama/llama-3.3-70b-instruct","name":"Meta: Llama 3.3 70B Instruct","pricing":{"prompt":"0.0000001","completion":"0.00000032"},"created":1733506137,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"amazon/nova-lite-v1","name":"Amazon: Nova Lite 1.0","pricing":{"prompt":"0.00000006","completion":"0.00000024"},"created":1733437363,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-micro-v1","name":"Amazon: Nova Micro 1.0","pricing":{"prompt":"0.000000035","completion":"0.00000014"},"created":1733437237,"top_provider":{"context_length":128000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-pro-v1","name":"Amazon: Nova Pro 1.0","pricing":{"prompt":"0.0000008","completion":"0.0000032"},"created":1733436303,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"openai/gpt-4o-2024-11-20","name":"OpenAI: GPT-4o (2024-11-20)","pricing":{"prompt":"0.0000025","completion":"0.00001","input_cache_read":"0.00000125"},"created":1732127594,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-large-2411","name":"Mistral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006"},"created":1731978685,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-large-2407","name":"Mistral Large 2407","pricing":{"prompt":"0.000002","completion":"0.000006"},"created":1731978415,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/pixtral-large-2411","name":"Mistral: Pixtral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006"},"created":1731977388,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct","name":"Qwen2.5 Coder 32B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000011"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"raifle/sorcererlm-8x22b","name":"SorcererLM 8x22B","pricing":{"prompt":"0.0000045","completion":"0.0000045"},"created":1731105083,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"thedrummer/unslopnemo-12b","name":"TheDrummer: UnslopNemo 12B","pricing":{"prompt":"0.0000004","completion":"0.0000004"},"created":1731103448,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku","name":"Anthropic: Claude 3.5 Haiku","pricing":{"prompt":"0.0000008","completion":"0.000004","web_search":"0.01","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"anthracite-org/magnum-v4-72b","name":"Magnum v4 72B","pricing":{"prompt":"0.000003","completion":"0.000005"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet","name":"Anthropic: Claude 3.5 Sonnet","pricing":{"prompt":"0.000006","completion":"0.00003"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"mistralai/ministral-8b","name":"Mistral: Ministral 8B","pricing":{"prompt":"0.0000001","completion":"0.0000001"},"created":1729123200,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-3b","name":"Mistral: Ministral 3B","pricing":{"prompt":"0.00000004","completion":"0.00000004"},"created":1729123200,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-7b-instruct","name":"Qwen: Qwen2.5 7B Instruct","pricing":{"prompt":"0.00000004","completion":"0.0000001"},"created":1729036800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-70b-instruct","name":"NVIDIA: Llama 3.1 Nemotron 70B Instruct","pricing":{"prompt":"0.0000012","completion":"0.0000012"},"created":1728950400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"inflection/inflection-3-pi","name":"Inflection: Inflection 3 Pi","pricing":{"prompt":"0.0000025","completion":"0.00001"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"inflection/inflection-3-productivity","name":"Inflection: Inflection 3 Productivity","pricing":{"prompt":"0.0000025","completion":"0.00001"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"thedrummer/rocinante-12b","name":"TheDrummer: Rocinante 12B","pricing":{"prompt":"0.00000017","completion":"0.00000043"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct:free","name":"Meta: Llama 3.2 3B Instruct (free)","pricing":{"prompt":"0","completion":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct","name":"Meta: Llama 3.2 3B Instruct","pricing":{"prompt":"0.00000002","completion":"0.00000002"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-1b-instruct","name":"Meta: Llama 3.2 1B Instruct","pricing":{"prompt":"0.000000027","completion":"0.0000002"},"created":1727222400,"top_provider":{"context_length":60000,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct","name":"Meta: Llama 3.2 11B Vision Instruct","pricing":{"prompt":"0.000000049","completion":"0.000000049"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct","name":"Qwen2.5 72B Instruct","pricing":{"prompt":"0.00000012","completion":"0.00000039"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/llama-3.1-lumimaid-8b","name":"NeverSleep: Lumimaid v0.2 8B","pricing":{"prompt":"0.00000009","completion":"0.0000006"},"created":1726358400,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"mistralai/pixtral-12b","name":"Mistral: Pixtral 12B","pricing":{"prompt":"0.0000001","completion":"0.0000001"},"created":1725926400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r-08-2024","name":"Cohere: Command R (08-2024)","pricing":{"prompt":"0.00000015","completion":"0.0000006"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-plus-08-2024","name":"Cohere: Command R+ (08-2024)","pricing":{"prompt":"0.0000025","completion":"0.00001"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"sao10k/l3.1-euryale-70b","name":"Sao10K: Llama 3.1 Euryale 70B v2.2","pricing":{"prompt":"0.00000065","completion":"0.00000075"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-2.5-vl-7b-instruct","name":"Qwen: Qwen2.5-VL 7B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000002"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-70b","name":"Nous: Hermes 3 70B Instruct","pricing":{"prompt":"0.0000003","completion":"0.0000003"},"created":1723939200,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-405b:free","name":"Nous: Hermes 3 405B Instruct (free)","pricing":{"prompt":"0","completion":"0"},"created":1723766400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-405b","name":"Nous: Hermes 3 405B Instruct","pricing":{"prompt":"0.000001","completion":"0.000001"},"created":1723766400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/chatgpt-4o-latest","name":"OpenAI: ChatGPT-4o","pricing":{"prompt":"0.000005","completion":"0.000015"},"created":1723593600,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"sao10k/l3-lunaris-8b","name":"Sao10K: Llama 3 8B Lunaris","pricing":{"prompt":"0.00000004","completion":"0.00000005"},"created":1723507200,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-08-06","name":"OpenAI: GPT-4o (2024-08-06)","pricing":{"prompt":"0.0000025","completion":"0.00001","input_cache_read":"0.00000125"},"created":1722902400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b","name":"Meta: Llama 3.1 405B (base)","pricing":{"prompt":"0.000004","completion":"0.000004"},"created":1722556800,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"meta-llama/llama-3.1-8b-instruct","name":"Meta: Llama 3.1 8B Instruct","pricing":{"prompt":"0.00000002","completion":"0.00000005"},"created":1721692800,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct","name":"Meta: Llama 3.1 405B Instruct","pricing":{"prompt":"0.0000035","completion":"0.0000035"},"created":1721692800,"top_provider":{"context_length":10000,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-70b-instruct","name":"Meta: Llama 3.1 70B Instruct","pricing":{"prompt":"0.0000004","completion":"0.0000004"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-nemo","name":"Mistral: Mistral Nemo","pricing":{"prompt":"0.00000002","completion":"0.00000004"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/gpt-4o-mini-2024-07-18","name":"OpenAI: GPT-4o-mini (2024-07-18)","pricing":{"prompt":"0.00000015","completion":"0.0000006","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-mini","name":"OpenAI: GPT-4o-mini","pricing":{"prompt":"0.00000015","completion":"0.0000006","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-2-27b-it","name":"Google: Gemma 2 27B","pricing":{"prompt":"0.00000065","completion":"0.00000065"},"created":1720828800,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemma-2-9b-it","name":"Google: Gemma 2 9B","pricing":{"prompt":"0.00000003","completion":"0.00000009"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3-euryale-70b","name":"Sao10k: Llama 3 Euryale 70B v2.1","pricing":{"prompt":"0.00000148","completion":"0.00000148"},"created":1718668800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"nousresearch/hermes-2-pro-llama-3-8b","name":"NousResearch: Hermes 2 Pro - Llama-3 8B","pricing":{"prompt":"0.00000014","completion":"0.00000014"},"created":1716768000,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct","name":"Mistral: Mistral 7B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000002"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.3","name":"Mistral: Mistral 7B Instruct v0.3","pricing":{"prompt":"0.0000002","completion":"0.0000002"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"meta-llama/llama-guard-2-8b","name":"Meta: LlamaGuard 2 8B","pricing":{"prompt":"0.0000002","completion":"0.0000002"},"created":1715558400,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-05-13","name":"OpenAI: GPT-4o (2024-05-13)","pricing":{"prompt":"0.000005","completion":"0.000015"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4o","name":"OpenAI: GPT-4o","pricing":{"prompt":"0.0000025","completion":"0.00001","input_cache_read":"0.00000125"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o:extended","name":"OpenAI: GPT-4o (extended)","pricing":{"prompt":"0.000006","completion":"0.000018"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"meta-llama/llama-3-70b-instruct","name":"Meta: Llama 3 70B Instruct","pricing":{"prompt":"0.00000051","completion":"0.00000074"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":8000,"is_moderated":false}},{"id":"meta-llama/llama-3-8b-instruct","name":"Meta: Llama 3 8B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000004"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mixtral-8x22b-instruct","name":"Mistral: Mixtral 8x22B Instruct","pricing":{"prompt":"0.000002","completion":"0.000006"},"created":1713312000,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/wizardlm-2-8x22b","name":"WizardLM-2 8x22B","pricing":{"prompt":"0.00000048","completion":"0.00000048"},"created":1713225600,"top_provider":{"context_length":65536,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/gpt-4-turbo","name":"OpenAI: GPT-4 Turbo","pricing":{"prompt":"0.00001","completion":"0.00003"},"created":1712620800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-haiku","name":"Anthropic: Claude 3 Haiku","pricing":{"prompt":"0.00000025","completion":"0.00000125","input_cache_read":"0.00000003","input_cache_write":"0.0000003"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-large","name":"Mistral Large","pricing":{"prompt":"0.000002","completion":"0.000006"},"created":1708905600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-0613","name":"OpenAI: GPT-3.5 Turbo (older v0613)","pricing":{"prompt":"0.000001","completion":"0.000002"},"created":1706140800,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4-turbo-preview","name":"OpenAI: GPT-4 Turbo Preview","pricing":{"prompt":"0.00001","completion":"0.00003"},"created":1706140800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-tiny","name":"Mistral Tiny","pricing":{"prompt":"0.00000025","completion":"0.00000025"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.2","name":"Mistral: Mistral 7B Instruct v0.2","pricing":{"prompt":"0.0000002","completion":"0.0000002"},"created":1703721600,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mixtral-8x7b-instruct","name":"Mistral: Mixtral 8x7B Instruct","pricing":{"prompt":"0.00000054","completion":"0.00000054"},"created":1702166400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/noromaid-20b","name":"Noromaid 20B","pricing":{"prompt":"0.000001","completion":"0.00000175"},"created":1700956800,"top_provider":{"context_length":4096,"max_completion_tokens":2048,"is_moderated":false}},{"id":"alpindale/goliath-120b","name":"Goliath 120B","pricing":{"prompt":"0.00000375","completion":"0.0000075"},"created":1699574400,"top_provider":{"context_length":6144,"max_completion_tokens":1024,"is_moderated":false}},{"id":"openrouter/auto","name":"Auto Router","pricing":{"prompt":"-1","completion":"-1"},"created":1699401600,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-1106-preview","name":"OpenAI: GPT-4 Turbo (older v1106)","pricing":{"prompt":"0.00001","completion":"0.00003"},"created":1699228800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo-instruct","name":"OpenAI: GPT-3.5 Turbo Instruct","pricing":{"prompt":"0.0000015","completion":"0.000002"},"created":1695859200,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-7b-instruct-v0.1","name":"Mistral: Mistral 7B Instruct v0.1","pricing":{"prompt":"0.00000011","completion":"0.00000019"},"created":1695859200,"top_provider":{"context_length":2824,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-16k","name":"OpenAI: GPT-3.5 Turbo 16k","pricing":{"prompt":"0.000003","completion":"0.000004"},"created":1693180800,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mancer/weaver","name":"Mancer: Weaver (alpha)","pricing":{"prompt":"0.00000075","completion":"0.000001"},"created":1690934400,"top_provider":{"context_length":8000,"max_completion_tokens":2000,"is_moderated":false}},{"id":"undi95/remm-slerp-l2-13b","name":"ReMM SLERP 13B","pricing":{"prompt":"0.00000045","completion":"0.00000065"},"created":1689984000,"top_provider":{"context_length":6144,"max_completion_tokens":4096,"is_moderated":false}},{"id":"gryphe/mythomax-l2-13b","name":"MythoMax 13B","pricing":{"prompt":"0.00000006","completion":"0.00000006"},"created":1688256000,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4-0314","name":"OpenAI: GPT-4 (older v0314)","pricing":{"prompt":"0.00003","completion":"0.00006"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4","name":"OpenAI: GPT-4","pricing":{"prompt":"0.00003","completion":"0.00006"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo","name":"OpenAI: GPT-3.5 Turbo","pricing":{"prompt":"0.0000005","completion":"0.0000015"},"created":1685232000,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}}] \ No newline at end of file diff --git a/packages/kbot/package-lock.json b/packages/kbot/package-lock.json index ff2426e9..57d8dc35 100644 --- a/packages/kbot/package-lock.json +++ b/packages/kbot/package-lock.json @@ -1,12 +1,12 @@ { "name": "@polymech/kbot-d", - "version": "0.3.5", + "version": "0.3.6", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@polymech/kbot-d", - "version": "0.3.5", + "version": "0.3.6", "license": "MIT", "dependencies": { "@dmitryrechkin/json-schema-to-zod": "1.0.1", diff --git a/packages/kbot/package.json b/packages/kbot/package.json index b8e027b6..d6e33b98 100644 --- a/packages/kbot/package.json +++ b/packages/kbot/package.json @@ -1,6 +1,6 @@ { "name": "@polymech/kbot-d", - "version": "0.3.5", + "version": "0.3.6", "type": "module", "publishConfig": { "access": "public" @@ -144,4 +144,4 @@ "webpack-visualizer-plugin2": "1.1.0", "zod-to-json-schema": "3.24.1" } -} \ No newline at end of file +} diff --git a/packages/kbot/schema.json b/packages/kbot/schema.json index e508182c..977d144d 100644 --- a/packages/kbot/schema.json +++ b/packages/kbot/schema.json @@ -119,7 +119,7 @@ }, "model": { "type": "string", - "description": "AI model to use for processing. Available models:\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenRouter models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n01-ai/yi-large | paid\naetherwiing/mn-starcannon-12b | paid\nai21/jamba-1-5-large | paid\nai21/jamba-1-5-mini | paid\nai21/jamba-instruct | paid\njondurbin/airoboros-l2-70b | paid\namazon/nova-lite-v1 | paid\namazon/nova-micro-v1 | paid\namazon/nova-pro-v1 | paid\nanthropic/claude-3-haiku | paid\nanthropic/claude-3-haiku:beta | paid\nanthropic/claude-3-opus | paid\nanthropic/claude-3-opus:beta | paid\nanthropic/claude-3-sonnet | paid\nanthropic/claude-3-sonnet:beta | paid\nanthropic/claude-3.5-haiku | paid\nanthropic/claude-3.5-haiku-20241022 | paid\nanthropic/claude-3.5-haiku-20241022:beta | paid\nanthropic/claude-3.5-haiku:beta | paid\nanthropic/claude-3.5-sonnet | paid\nanthropic/claude-3.5-sonnet-20240620 | paid\nanthropic/claude-3.5-sonnet-20240620:beta | paid\nanthropic/claude-3.5-sonnet:beta | paid\nanthropic/claude-2 | paid\nanthropic/claude-2:beta | paid\nanthropic/claude-2.0 | paid\nanthropic/claude-2.0:beta | paid\nanthropic/claude-2.1 | paid\nanthropic/claude-2.1:beta | paid\nopenrouter/auto | paid\ncohere/command | paid\ncohere/command-r | paid\ncohere/command-r-03-2024 | paid\ncohere/command-r-08-2024 | paid\ncohere/command-r-plus | paid\ncohere/command-r-plus-04-2024 | paid\ncohere/command-r-plus-08-2024 | paid\ncohere/command-r7b-12-2024 | paid\ndatabricks/dbrx-instruct | paid\ndeepseek/deepseek-chat-v2.5 | paid\ndeepseek/deepseek-chat | paid\ncognitivecomputations/dolphin-mixtral-8x7b | paid\ncognitivecomputations/dolphin-mixtral-8x22b | paid\neva-unit-01/eva-llama-3.33-70b | paid\neva-unit-01/eva-qwen-2.5-32b | paid\neva-unit-01/eva-qwen-2.5-72b | paid\nalpindale/goliath-120b | paid\ngoogle/gemini-2.0-flash-thinking-exp:free | free\ngoogle/gemini-exp-1114:free | free\ngoogle/gemini-exp-1121:free | free\ngoogle/gemini-exp-1206:free | free\ngoogle/gemini-flash-1.5 | paid\ngoogle/gemini-flash-1.5-8b | paid\ngoogle/gemini-flash-1.5-8b-exp | paid\ngoogle/gemini-flash-1.5-exp | paid\ngoogle/gemini-2.0-flash-exp:free | free\ngoogle/gemini-pro | paid\ngoogle/gemini-pro-1.5 | paid\ngoogle/gemini-pro-1.5-exp | paid\ngoogle/gemini-pro-vision | paid\ngoogle/gemma-2-27b-it | paid\ngoogle/gemma-2-9b-it | paid\ngoogle/gemma-2-9b-it:free | free\ngoogle/learnlm-1.5-pro-experimental:free | free\ngoogle/palm-2-chat-bison | paid\ngoogle/palm-2-chat-bison-32k | paid\ngoogle/palm-2-codechat-bison | paid\ngoogle/palm-2-codechat-bison-32k | paid\nhuggingfaceh4/zephyr-7b-beta:free | free\ninfermatic/mn-inferor-12b | paid\ninflatebot/mn-mag-mell-r1 | paid\ninflection/inflection-3-pi | paid\ninflection/inflection-3-productivity | paid\nliquid/lfm-40b | paid\nlizpreciatior/lzlv-70b-fp16-hf | paid\nalpindale/magnum-72b | paid\nanthracite-org/magnum-v2-72b | paid\nanthracite-org/magnum-v4-72b | paid\nmancer/weaver | paid\nmeta-llama/llama-2-13b-chat | paid\nmeta-llama/llama-3-70b-instruct | paid\nmeta-llama/llama-3-70b-instruct:nitro | paid\nmeta-llama/llama-3-8b-instruct | paid\nmeta-llama/llama-3-8b-instruct:extended | paid\nmeta-llama/llama-3-8b-instruct:free | free\nmeta-llama/llama-3-8b-instruct:nitro | paid\nmeta-llama/llama-3.1-405b | paid\nmeta-llama/llama-3.1-405b-instruct | paid\nmeta-llama/llama-3.1-405b-instruct:free | free\nmeta-llama/llama-3.1-405b-instruct:nitro | paid\nmeta-llama/llama-3.1-70b-instruct | paid\nmeta-llama/llama-3.1-70b-instruct:free | free\nmeta-llama/llama-3.1-70b-instruct:nitro | paid\nmeta-llama/llama-3.1-8b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct:free | free\nmeta-llama/llama-3.2-11b-vision-instruct | paid\nmeta-llama/llama-3.2-11b-vision-instruct:free | free\nmeta-llama/llama-3.2-1b-instruct | paid\nmeta-llama/llama-3.2-1b-instruct:free | free\nmeta-llama/llama-3.2-3b-instruct | paid\nmeta-llama/llama-3.2-3b-instruct:free | free\nmeta-llama/llama-3.2-90b-vision-instruct | paid\nmeta-llama/llama-3.2-90b-vision-instruct:free | free\nmeta-llama/llama-3.3-70b-instruct | paid\nmeta-llama/llama-guard-2-8b | paid\nmicrosoft/phi-3-medium-128k-instruct | paid\nmicrosoft/phi-3-medium-128k-instruct:free | free\nmicrosoft/phi-3-mini-128k-instruct | paid\nmicrosoft/phi-3-mini-128k-instruct:free | free\nmicrosoft/phi-3.5-mini-128k-instruct | paid\nsophosympatheia/midnight-rose-70b | paid\nmistralai/mistral-large | paid\nmistralai/mistral-large-2407 | paid\nmistralai/mistral-large-2411 | paid\nmistralai/mistral-medium | paid\nnothingiisreal/mn-celeste-12b | paid\nmistralai/mistral-small | paid\nmistralai/mistral-tiny | paid\nmistralai/codestral-mamba | paid\nmistralai/ministral-3b | paid\nmistralai/ministral-8b | paid\nmistralai/mistral-7b-instruct | paid\nmistralai/mistral-7b-instruct:free | free\nmistralai/mistral-7b-instruct:nitro | paid\nmistralai/mistral-7b-instruct-v0.1 | paid\nmistralai/mistral-7b-instruct-v0.2 | paid\nmistralai/mistral-7b-instruct-v0.3 | paid\nmistralai/mistral-nemo | paid\nmistralai/mixtral-8x22b-instruct | paid\nmistralai/mixtral-8x7b | paid\nmistralai/mixtral-8x7b-instruct | paid\nmistralai/mixtral-8x7b-instruct:nitro | paid\nmistralai/pixtral-12b | paid\nmistralai/pixtral-large-2411 | paid\ngryphe/mythomax-l2-13b | paid\ngryphe/mythomax-l2-13b:extended | paid\ngryphe/mythomax-l2-13b:free | free\ngryphe/mythomax-l2-13b:nitro | paid\nneversleep/llama-3-lumimaid-70b | paid\nneversleep/llama-3-lumimaid-8b | paid\nneversleep/llama-3-lumimaid-8b:extended | paid\nneversleep/llama-3.1-lumimaid-70b | paid\nneversleep/llama-3.1-lumimaid-8b | paid\nneversleep/noromaid-20b | paid\nnousresearch/nous-hermes-llama2-13b | paid\nnousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid\nnousresearch/hermes-3-llama-3.1-405b | paid\nnousresearch/hermes-3-llama-3.1-70b | paid\nnousresearch/hermes-2-pro-llama-3-8b | paid\nnvidia/llama-3.1-nemotron-70b-instruct | paid\nopenai/chatgpt-4o-latest | paid\nopenai/gpt-3.5-turbo | paid\nopenai/gpt-3.5-turbo-0613 | paid\nopenai/gpt-3.5-turbo-16k | paid\nopenai/gpt-3.5-turbo-0125 | paid\nopenai/gpt-3.5-turbo-1106 | paid\nopenai/gpt-3.5-turbo-instruct | paid\nopenai/gpt-4 | paid\nopenai/gpt-4-0314 | paid\nopenai/gpt-4-32k | paid\nopenai/gpt-4-32k-0314 | paid\nopenai/gpt-4-turbo | paid\nopenai/gpt-4-1106-preview | paid\nopenai/gpt-4-turbo-preview | paid\nopenai/gpt-4o | paid\nopenai/gpt-4o-2024-05-13 | paid\nopenai/gpt-4o-2024-08-06 | paid\nopenai/gpt-4o-2024-11-20 | paid\nopenai/gpt-4o:extended | paid\nopenai/gpt-4o-mini | paid\nopenai/gpt-4o-mini-2024-07-18 | paid\nopenai/o1 | paid\nopenai/o1-mini | paid\nopenai/o1-mini-2024-09-12 | paid\nopenai/o1-preview | paid\nopenai/o1-preview-2024-09-12 | paid\nopenchat/openchat-7b | paid\nopenchat/openchat-7b:free | free\nteknium/openhermes-2.5-mistral-7b | paid\nperplexity/llama-3.1-sonar-huge-128k-online | paid\nperplexity/llama-3.1-sonar-large-128k-chat | paid\nperplexity/llama-3.1-sonar-large-128k-online | paid\nperplexity/llama-3.1-sonar-small-128k-chat | paid\nperplexity/llama-3.1-sonar-small-128k-online | paid\nperplexity/llama-3-sonar-large-32k-chat | paid\nperplexity/llama-3-sonar-large-32k-online | paid\nperplexity/llama-3-sonar-small-32k-chat | paid\npygmalionai/mythalion-13b | paid\nqwen/qwen-2-72b-instruct | paid\nqwen/qwen-2-7b-instruct | paid\nqwen/qwen-2-7b-instruct:free | free\nqwen/qvq-72b-preview | paid\nqwen/qwq-32b-preview | paid\nqwen/qwen-2-vl-72b-instruct | paid\nqwen/qwen-2-vl-7b-instruct | paid\nqwen/qwen-2.5-72b-instruct | paid\nqwen/qwen-2.5-7b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct | paid\nundi95/remm-slerp-l2-13b | paid\nundi95/remm-slerp-l2-13b:extended | paid\nthedrummer/rocinante-12b | paid\nsao10k/l3-lunaris-8b | paid\nsao10k/l3-euryale-70b | paid\nsao10k/l3.1-euryale-70b | paid\nsao10k/l3.3-euryale-70b | paid\nraifle/sorcererlm-8x22b | paid\nundi95/toppy-m-7b | paid\nundi95/toppy-m-7b:free | free\nundi95/toppy-m-7b:nitro | paid\nthedrummer/unslopnemo-12b | paid\nmicrosoft/wizardlm-2-7b | paid\nmicrosoft/wizardlm-2-8x22b | paid\nx-ai/grok-2-1212 | paid\nx-ai/grok-2-vision-1212 | paid\nx-ai/grok-beta | paid\nx-ai/grok-vision-beta | paid\nxwin-lm/xwin-lm-70b | paid\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenAI models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\nbabbage-002\nchatgpt-4o-latest\ndall-e-2\ndall-e-3\ndavinci-002\ngpt-3.5-turbo\ngpt-3.5-turbo-0125\ngpt-3.5-turbo-1106\ngpt-3.5-turbo-16k\ngpt-3.5-turbo-instruct\ngpt-3.5-turbo-instruct-0914\ngpt-4\ngpt-4-0125-preview\ngpt-4-0613\ngpt-4-1106-preview\ngpt-4-1106-vision-preview\ngpt-4-turbo\ngpt-4-turbo-2024-04-09\ngpt-4-turbo-preview\ngpt-4-vision-preview\ngpt-4o\ngpt-4o-2024-05-13\ngpt-4o-2024-08-06\ngpt-4o-2024-11-20\ngpt-4o-audio-preview\ngpt-4o-audio-preview-2024-10-01\ngpt-4o-audio-preview-2024-12-17\ngpt-4o-mini\ngpt-4o-mini-2024-07-18\ngpt-4o-mini-audio-preview\ngpt-4o-mini-audio-preview-2024-12-17\ngpt-4o-mini-realtime-preview\ngpt-4o-mini-realtime-preview-2024-12-17\ngpt-4o-realtime-preview\ngpt-4o-realtime-preview-2024-10-01\ngpt-4o-realtime-preview-2024-12-17\no1-mini\no1-mini-2024-09-12\no1-preview\no1-preview-2024-09-12\nomni-moderation-2024-09-26\nomni-moderation-latest\ntext-embedding-3-large\ntext-embedding-3-small\ntext-embedding-ada-002\ntts-1\ntts-1-1106\ntts-1-hd\ntts-1-hd-1106\nwhisper-1\n-----\n\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m Deepseek models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\ndeepseek-chat\ndeepseek-reasoner\n-----\n" + "description": "AI model to use for processing. Available models:\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenRouter models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\nai21/jamba-large-1.7 | paid\nai21/jamba-mini-1.7 | paid\naion-labs/aion-1.0 | paid\naion-labs/aion-1.0-mini | paid\naion-labs/aion-rp-llama-3.1-8b | paid\nalfredpros/codellama-7b-instruct-solidity | paid\nallenai/olmo-2-0325-32b-instruct | paid\nallenai/olmo-3-32b-think:free | free\nallenai/olmo-3-7b-instruct | paid\nallenai/olmo-3-7b-think | paid\nallenai/olmo-3.1-32b-think:free | free\namazon/nova-2-lite-v1 | paid\namazon/nova-lite-v1 | paid\namazon/nova-micro-v1 | paid\namazon/nova-premier-v1 | paid\namazon/nova-pro-v1 | paid\nanthropic/claude-3-haiku | paid\nanthropic/claude-3-opus | paid\nanthropic/claude-3.5-haiku | paid\nanthropic/claude-3.5-haiku-20241022 | paid\nanthropic/claude-3.5-sonnet | paid\nanthropic/claude-3.7-sonnet | paid\nanthropic/claude-3.7-sonnet:thinking | paid\nanthropic/claude-haiku-4.5 | paid\nanthropic/claude-opus-4 | paid\nanthropic/claude-opus-4.1 | paid\nanthropic/claude-opus-4.5 | paid\nanthropic/claude-sonnet-4 | paid\nanthropic/claude-sonnet-4.5 | paid\narcee-ai/coder-large | paid\narcee-ai/maestro-reasoning | paid\narcee-ai/spotlight | paid\narcee-ai/trinity-mini | paid\narcee-ai/trinity-mini:free | free\narcee-ai/virtuoso-large | paid\narliai/qwq-32b-arliai-rpr-v1 | paid\nopenrouter/auto | paid\nbaidu/ernie-4.5-21b-a3b | paid\nbaidu/ernie-4.5-21b-a3b-thinking | paid\nbaidu/ernie-4.5-300b-a47b | paid\nbaidu/ernie-4.5-vl-28b-a3b | paid\nbaidu/ernie-4.5-vl-424b-a47b | paid\nopenrouter/bodybuilder | paid\nbytedance-seed/seed-1.6 | paid\nbytedance-seed/seed-1.6-flash | paid\nbytedance/ui-tars-1.5-7b | paid\ndeepcogito/cogito-v2-preview-llama-109b-moe | paid\ncohere/command-a | paid\ncohere/command-r-08-2024 | paid\ncohere/command-r-plus-08-2024 | paid\ncohere/command-r7b-12-2024 | paid\ndeepcogito/cogito-v2-preview-llama-405b | paid\ndeepcogito/cogito-v2-preview-llama-70b | paid\ndeepcogito/cogito-v2.1-671b | paid\ndeepseek/deepseek-prover-v2 | paid\ndeepseek/deepseek-r1-0528-qwen3-8b | paid\ndeepseek/deepseek-chat | paid\ndeepseek/deepseek-chat-v3-0324 | paid\ndeepseek/deepseek-chat-v3.1 | paid\ndeepseek/deepseek-v3.1-terminus | paid\ndeepseek/deepseek-v3.1-terminus:exacto | paid\ndeepseek/deepseek-v3.2 | paid\ndeepseek/deepseek-v3.2-exp | paid\ndeepseek/deepseek-v3.2-speciale | paid\ndeepseek/deepseek-r1 | paid\ndeepseek/deepseek-r1-0528 | paid\ndeepseek/deepseek-r1-0528:free | free\ndeepseek/deepseek-r1-distill-llama-70b | paid\ndeepseek/deepseek-r1-distill-qwen-14b | paid\ndeepseek/deepseek-r1-distill-qwen-32b | paid\neleutherai/llemma_7b | paid\nessentialai/rnj-1-instruct | paid\nalpindale/goliath-120b | paid\ngoogle/gemini-2.0-flash-001 | paid\ngoogle/gemini-2.0-flash-exp:free | free\ngoogle/gemini-2.0-flash-lite-001 | paid\ngoogle/gemini-2.5-flash | paid\ngoogle/gemini-2.5-flash-image | paid\ngoogle/gemini-2.5-flash-image-preview | paid\ngoogle/gemini-2.5-flash-lite | paid\ngoogle/gemini-2.5-flash-lite-preview-09-2025 | paid\ngoogle/gemini-2.5-flash-preview-09-2025 | paid\ngoogle/gemini-2.5-pro | paid\ngoogle/gemini-2.5-pro-preview-05-06 | paid\ngoogle/gemini-2.5-pro-preview | paid\ngoogle/gemini-3-flash-preview | paid\ngoogle/gemini-3-pro-preview | paid\ngoogle/gemma-2-27b-it | paid\ngoogle/gemma-2-9b-it | paid\ngoogle/gemma-3-12b-it | paid\ngoogle/gemma-3-12b-it:free | free\ngoogle/gemma-3-27b-it | paid\ngoogle/gemma-3-27b-it:free | free\ngoogle/gemma-3-4b-it | paid\ngoogle/gemma-3-4b-it:free | free\ngoogle/gemma-3n-e2b-it:free | free\ngoogle/gemma-3n-e4b-it | paid\ngoogle/gemma-3n-e4b-it:free | free\ngoogle/gemini-3-pro-image-preview | paid\nibm-granite/granite-4.0-h-micro | paid\ninception/mercury | paid\ninception/mercury-coder | paid\ninflection/inflection-3-pi | paid\ninflection/inflection-3-productivity | paid\nkwaipilot/kat-coder-pro:free | free\nliquid/lfm-2.2-6b | paid\nliquid/lfm2-8b-a1b | paid\nmeta-llama/llama-guard-3-8b | paid\nanthracite-org/magnum-v4-72b | paid\nmancer/weaver | paid\nmeituan/longcat-flash-chat | paid\nmeta-llama/llama-3-70b-instruct | paid\nmeta-llama/llama-3-8b-instruct | paid\nmeta-llama/llama-3.1-405b | paid\nmeta-llama/llama-3.1-405b-instruct | paid\nmeta-llama/llama-3.1-405b-instruct:free | free\nmeta-llama/llama-3.1-70b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct | paid\nmeta-llama/llama-3.2-11b-vision-instruct | paid\nmeta-llama/llama-3.2-1b-instruct | paid\nmeta-llama/llama-3.2-3b-instruct | paid\nmeta-llama/llama-3.2-3b-instruct:free | free\nmeta-llama/llama-3.2-90b-vision-instruct | paid\nmeta-llama/llama-3.3-70b-instruct | paid\nmeta-llama/llama-3.3-70b-instruct:free | free\nmeta-llama/llama-4-maverick | paid\nmeta-llama/llama-4-scout | paid\nmeta-llama/llama-guard-4-12b | paid\nmeta-llama/llama-guard-2-8b | paid\nmicrosoft/phi-4 | paid\nmicrosoft/phi-4-multimodal-instruct | paid\nmicrosoft/phi-4-reasoning-plus | paid\nmicrosoft/phi-3-medium-128k-instruct | paid\nmicrosoft/phi-3-mini-128k-instruct | paid\nmicrosoft/phi-3.5-mini-128k-instruct | paid\nminimax/minimax-m1 | paid\nminimax/minimax-m2 | paid\nminimax/minimax-m2.1 | paid\nminimax/minimax-01 | paid\nmistralai/mistral-large | paid\nmistralai/mistral-large-2407 | paid\nmistralai/mistral-large-2411 | paid\nmistralai/mistral-tiny | paid\nmistralai/codestral-2508 | paid\nmistralai/devstral-2512 | paid\nmistralai/devstral-2512:free | free\nmistralai/devstral-medium | paid\nmistralai/devstral-small | paid\nmistralai/devstral-small-2505 | paid\nmistralai/ministral-14b-2512 | paid\nmistralai/ministral-3b-2512 | paid\nmistralai/ministral-8b-2512 | paid\nmistralai/ministral-3b | paid\nmistralai/ministral-8b | paid\nmistralai/mistral-7b-instruct | paid\nmistralai/mistral-7b-instruct:free | free\nmistralai/mistral-7b-instruct-v0.1 | paid\nmistralai/mistral-7b-instruct-v0.2 | paid\nmistralai/mistral-7b-instruct-v0.3 | paid\nmistralai/mistral-large-2512 | paid\nmistralai/mistral-medium-3 | paid\nmistralai/mistral-medium-3.1 | paid\nmistralai/mistral-nemo | paid\nmistralai/mistral-small-24b-instruct-2501 | paid\nmistralai/mistral-small-3.1-24b-instruct | paid\nmistralai/mistral-small-3.1-24b-instruct:free | free\nmistralai/mistral-small-3.2-24b-instruct | paid\nmistralai/mistral-small-creative | paid\nmistralai/mixtral-8x22b-instruct | paid\nmistralai/mixtral-8x7b-instruct | paid\nmistralai/pixtral-12b | paid\nmistralai/pixtral-large-2411 | paid\nmistralai/mistral-saba | paid\nmistralai/voxtral-small-24b-2507 | paid\nmoonshotai/kimi-dev-72b | paid\nmoonshotai/kimi-k2 | paid\nmoonshotai/kimi-k2:free | free\nmoonshotai/kimi-k2-0905 | paid\nmoonshotai/kimi-k2-0905:exacto | paid\nmoonshotai/kimi-k2-thinking | paid\nmorph/morph-v3-fast | paid\nmorph/morph-v3-large | paid\ngryphe/mythomax-l2-13b | paid\nneversleep/llama-3.1-lumimaid-8b | paid\nnex-agi/deepseek-v3.1-nex-n1:free | free\nneversleep/noromaid-20b | paid\nnousresearch/deephermes-3-mistral-24b-preview | paid\nnousresearch/hermes-3-llama-3.1-405b | paid\nnousresearch/hermes-3-llama-3.1-405b:free | free\nnousresearch/hermes-3-llama-3.1-70b | paid\nnousresearch/hermes-4-405b | paid\nnousresearch/hermes-4-70b | paid\nnousresearch/hermes-2-pro-llama-3-8b | paid\nnvidia/llama-3.1-nemotron-70b-instruct | paid\nnvidia/llama-3.1-nemotron-ultra-253b-v1 | paid\nnvidia/llama-3.3-nemotron-super-49b-v1.5 | paid\nnvidia/nemotron-3-nano-30b-a3b | paid\nnvidia/nemotron-3-nano-30b-a3b:free | free\nnvidia/nemotron-nano-12b-v2-vl | paid\nnvidia/nemotron-nano-12b-v2-vl:free | free\nnvidia/nemotron-nano-9b-v2 | paid\nnvidia/nemotron-nano-9b-v2:free | free\nopenai/chatgpt-4o-latest | paid\nopenai/codex-mini | paid\nopenai/gpt-3.5-turbo | paid\nopenai/gpt-3.5-turbo-0613 | paid\nopenai/gpt-3.5-turbo-16k | paid\nopenai/gpt-3.5-turbo-instruct | paid\nopenai/gpt-4 | paid\nopenai/gpt-4-0314 | paid\nopenai/gpt-4-turbo | paid\nopenai/gpt-4-1106-preview | paid\nopenai/gpt-4-turbo-preview | paid\nopenai/gpt-4.1 | paid\nopenai/gpt-4.1-mini | paid\nopenai/gpt-4.1-nano | paid\nopenai/gpt-4o | paid\nopenai/gpt-4o-2024-05-13 | paid\nopenai/gpt-4o-2024-08-06 | paid\nopenai/gpt-4o-2024-11-20 | paid\nopenai/gpt-4o:extended | paid\nopenai/gpt-4o-audio-preview | paid\nopenai/gpt-4o-search-preview | paid\nopenai/gpt-4o-mini | paid\nopenai/gpt-4o-mini-2024-07-18 | paid\nopenai/gpt-4o-mini-search-preview | paid\nopenai/gpt-5 | paid\nopenai/gpt-5-chat | paid\nopenai/gpt-5-codex | paid\nopenai/gpt-5-image | paid\nopenai/gpt-5-image-mini | paid\nopenai/gpt-5-mini | paid\nopenai/gpt-5-nano | paid\nopenai/gpt-5-pro | paid\nopenai/gpt-5.1 | paid\nopenai/gpt-5.1-chat | paid\nopenai/gpt-5.1-codex | paid\nopenai/gpt-5.1-codex-max | paid\nopenai/gpt-5.1-codex-mini | paid\nopenai/gpt-5.2 | paid\nopenai/gpt-5.2-chat | paid\nopenai/gpt-5.2-pro | paid\nopenai/gpt-oss-120b | paid\nopenai/gpt-oss-120b:exacto | paid\nopenai/gpt-oss-120b:free | free\nopenai/gpt-oss-20b | paid\nopenai/gpt-oss-20b:free | free\nopenai/gpt-oss-safeguard-20b | paid\nopenai/o1 | paid\nopenai/o1-pro | paid\nopenai/o3 | paid\nopenai/o3-deep-research | paid\nopenai/o3-mini | paid\nopenai/o3-mini-high | paid\nopenai/o3-pro | paid\nopenai/o4-mini | paid\nopenai/o4-mini-deep-research | paid\nopenai/o4-mini-high | paid\nopengvlab/internvl3-78b | paid\nperplexity/sonar | paid\nperplexity/sonar-deep-research | paid\nperplexity/sonar-pro | paid\nperplexity/sonar-pro-search | paid\nperplexity/sonar-reasoning | paid\nperplexity/sonar-reasoning-pro | paid\nprime-intellect/intellect-3 | paid\nqwen/qwen-plus-2025-07-28 | paid\nqwen/qwen-plus-2025-07-28:thinking | paid\nqwen/qwen-vl-max | paid\nqwen/qwen-vl-plus | paid\nqwen/qwen-max | paid\nqwen/qwen-plus | paid\nqwen/qwen-turbo | paid\nqwen/qwen-2.5-7b-instruct | paid\nqwen/qwen2.5-coder-7b-instruct | paid\nqwen/qwen2.5-vl-32b-instruct | paid\nqwen/qwen2.5-vl-72b-instruct | paid\nqwen/qwen-2.5-vl-7b-instruct | paid\nqwen/qwen-2.5-vl-7b-instruct:free | free\nqwen/qwen3-14b | paid\nqwen/qwen3-235b-a22b | paid\nqwen/qwen3-235b-a22b-2507 | paid\nqwen/qwen3-235b-a22b-thinking-2507 | paid\nqwen/qwen3-30b-a3b | paid\nqwen/qwen3-30b-a3b-instruct-2507 | paid\nqwen/qwen3-30b-a3b-thinking-2507 | paid\nqwen/qwen3-32b | paid\nqwen/qwen3-4b:free | free\nqwen/qwen3-8b | paid\nqwen/qwen3-coder-30b-a3b-instruct | paid\nqwen/qwen3-coder | paid\nqwen/qwen3-coder:exacto | paid\nqwen/qwen3-coder:free | free\nqwen/qwen3-coder-flash | paid\nqwen/qwen3-coder-plus | paid\nqwen/qwen3-max | paid\nqwen/qwen3-next-80b-a3b-instruct | paid\nqwen/qwen3-next-80b-a3b-thinking | paid\nqwen/qwen3-vl-235b-a22b-instruct | paid\nqwen/qwen3-vl-235b-a22b-thinking | paid\nqwen/qwen3-vl-30b-a3b-instruct | paid\nqwen/qwen3-vl-30b-a3b-thinking | paid\nqwen/qwen3-vl-32b-instruct | paid\nqwen/qwen3-vl-8b-instruct | paid\nqwen/qwen3-vl-8b-thinking | paid\nqwen/qwq-32b | paid\nqwen/qwen-2.5-72b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct | paid\nrelace/relace-apply-3 | paid\nrelace/relace-search | paid\nundi95/remm-slerp-l2-13b | paid\nsao10k/l3-lunaris-8b | paid\nsao10k/l3-euryale-70b | paid\nsao10k/l3.1-70b-hanami-x1 | paid\nsao10k/l3.1-euryale-70b | paid\nsao10k/l3.3-euryale-70b | paid\nraifle/sorcererlm-8x22b | paid\nstepfun-ai/step3 | paid\nswitchpoint/router | paid\ntencent/hunyuan-a13b-instruct | paid\nthedrummer/cydonia-24b-v4.1 | paid\nthedrummer/rocinante-12b | paid\nthedrummer/skyfall-36b-v2 | paid\nthedrummer/unslopnemo-12b | paid\nthudm/glm-4.1v-9b-thinking | paid\ntngtech/deepseek-r1t-chimera | paid\ntngtech/deepseek-r1t-chimera:free | free\ntngtech/deepseek-r1t2-chimera | paid\ntngtech/deepseek-r1t2-chimera:free | free\ntngtech/tng-r1t-chimera | paid\ntngtech/tng-r1t-chimera:free | free\nalibaba/tongyi-deepresearch-30b-a3b | paid\nalibaba/tongyi-deepresearch-30b-a3b:free | free\ncognitivecomputations/dolphin-mistral-24b-venice-edition:free | free\nmicrosoft/wizardlm-2-8x22b | paid\nx-ai/grok-3 | paid\nx-ai/grok-3-beta | paid\nx-ai/grok-3-mini | paid\nx-ai/grok-3-mini-beta | paid\nx-ai/grok-4 | paid\nx-ai/grok-4-fast | paid\nx-ai/grok-4.1-fast | paid\nx-ai/grok-code-fast-1 | paid\nxiaomi/mimo-v2-flash:free | free\nz-ai/glm-4-32b | paid\nz-ai/glm-4.5 | paid\nz-ai/glm-4.5-air | paid\nz-ai/glm-4.5-air:free | free\nz-ai/glm-4.5v | paid\nz-ai/glm-4.6 | paid\nz-ai/glm-4.6:exacto | paid\nz-ai/glm-4.6v | paid\nz-ai/glm-4.7 | paid\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenAI models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\nbabbage-002\nchatgpt-4o-latest\nchatgpt-image-latest\ncodex-mini-latest\ndall-e-2\ndall-e-3\ndavinci-002\ngpt-3.5-turbo\ngpt-3.5-turbo-0125\ngpt-3.5-turbo-1106\ngpt-3.5-turbo-16k\ngpt-3.5-turbo-instruct\ngpt-3.5-turbo-instruct-0914\ngpt-4\ngpt-4-0125-preview\ngpt-4-0613\ngpt-4-1106-preview\ngpt-4-turbo\ngpt-4-turbo-2024-04-09\ngpt-4-turbo-preview\ngpt-4.1\ngpt-4.1-2025-04-14\ngpt-4.1-mini\ngpt-4.1-mini-2025-04-14\ngpt-4.1-nano\ngpt-4.1-nano-2025-04-14\ngpt-4o\ngpt-4o-2024-05-13\ngpt-4o-2024-08-06\ngpt-4o-2024-11-20\ngpt-4o-audio-preview\ngpt-4o-audio-preview-2024-12-17\ngpt-4o-audio-preview-2025-06-03\ngpt-4o-mini\ngpt-4o-mini-2024-07-18\ngpt-4o-mini-audio-preview\ngpt-4o-mini-audio-preview-2024-12-17\ngpt-4o-mini-realtime-preview\ngpt-4o-mini-realtime-preview-2024-12-17\ngpt-4o-mini-search-preview\ngpt-4o-mini-search-preview-2025-03-11\ngpt-4o-mini-transcribe\ngpt-4o-mini-transcribe-2025-03-20\ngpt-4o-mini-transcribe-2025-12-15\ngpt-4o-mini-tts\ngpt-4o-mini-tts-2025-03-20\ngpt-4o-mini-tts-2025-12-15\ngpt-4o-realtime-preview\ngpt-4o-realtime-preview-2024-12-17\ngpt-4o-realtime-preview-2025-06-03\ngpt-4o-search-preview\ngpt-4o-search-preview-2025-03-11\ngpt-4o-transcribe\ngpt-4o-transcribe-diarize\ngpt-5\ngpt-5-2025-08-07\ngpt-5-chat-latest\ngpt-5-codex\ngpt-5-mini\ngpt-5-mini-2025-08-07\ngpt-5-nano\ngpt-5-nano-2025-08-07\ngpt-5-pro\ngpt-5-pro-2025-10-06\ngpt-5-search-api\ngpt-5-search-api-2025-10-14\ngpt-5.1\ngpt-5.1-2025-11-13\ngpt-5.1-chat-latest\ngpt-5.1-codex\ngpt-5.1-codex-max\ngpt-5.1-codex-mini\ngpt-5.2\ngpt-5.2-2025-12-11\ngpt-5.2-chat-latest\ngpt-5.2-pro\ngpt-5.2-pro-2025-12-11\ngpt-audio\ngpt-audio-2025-08-28\ngpt-audio-mini\ngpt-audio-mini-2025-10-06\ngpt-audio-mini-2025-12-15\ngpt-image-1\ngpt-image-1-mini\ngpt-image-1.5\ngpt-realtime\ngpt-realtime-2025-08-28\ngpt-realtime-mini\ngpt-realtime-mini-2025-10-06\ngpt-realtime-mini-2025-12-15\no1\no1-2024-12-17\no1-pro\no1-pro-2025-03-19\no3\no3-2025-04-16\no3-mini\no3-mini-2025-01-31\no4-mini\no4-mini-2025-04-16\no4-mini-deep-research\no4-mini-deep-research-2025-06-26\nomni-moderation-2024-09-26\nomni-moderation-latest\nsora-2\nsora-2-pro\ntext-embedding-3-large\ntext-embedding-3-small\ntext-embedding-ada-002\ntts-1\ntts-1-1106\ntts-1-hd\ntts-1-hd-1106\nwhisper-1\n-----\n\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m Deepseek models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\ndeepseek-chat\ndeepseek-reasoner\n-----\n" }, "router": { "type": "string", @@ -161,7 +161,7 @@ }, "preferences": { "type": "string", - "default": "C:\\Users\\mc007\\.osr\\preferences.md", + "default": "C:\\Users\\zx\\.osr\\preferences.md", "description": "Path to preferences file, eg: location, your email address, gender, etc. Supports environment variables." }, "logs": { diff --git a/packages/kbot/schema_ui.json b/packages/kbot/schema_ui.json index 5facee16..13ae8252 100644 --- a/packages/kbot/schema_ui.json +++ b/packages/kbot/schema_ui.json @@ -79,7 +79,7 @@ "ui:title": "Api_key" }, "model": { - "ui:description": "AI model to use for processing. Available models:\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenRouter models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n01-ai/yi-large | paid\naetherwiing/mn-starcannon-12b | paid\nai21/jamba-1-5-large | paid\nai21/jamba-1-5-mini | paid\nai21/jamba-instruct | paid\njondurbin/airoboros-l2-70b | paid\namazon/nova-lite-v1 | paid\namazon/nova-micro-v1 | paid\namazon/nova-pro-v1 | paid\nanthropic/claude-3-haiku | paid\nanthropic/claude-3-haiku:beta | paid\nanthropic/claude-3-opus | paid\nanthropic/claude-3-opus:beta | paid\nanthropic/claude-3-sonnet | paid\nanthropic/claude-3-sonnet:beta | paid\nanthropic/claude-3.5-haiku | paid\nanthropic/claude-3.5-haiku-20241022 | paid\nanthropic/claude-3.5-haiku-20241022:beta | paid\nanthropic/claude-3.5-haiku:beta | paid\nanthropic/claude-3.5-sonnet | paid\nanthropic/claude-3.5-sonnet-20240620 | paid\nanthropic/claude-3.5-sonnet-20240620:beta | paid\nanthropic/claude-3.5-sonnet:beta | paid\nanthropic/claude-2 | paid\nanthropic/claude-2:beta | paid\nanthropic/claude-2.0 | paid\nanthropic/claude-2.0:beta | paid\nanthropic/claude-2.1 | paid\nanthropic/claude-2.1:beta | paid\nopenrouter/auto | paid\ncohere/command | paid\ncohere/command-r | paid\ncohere/command-r-03-2024 | paid\ncohere/command-r-08-2024 | paid\ncohere/command-r-plus | paid\ncohere/command-r-plus-04-2024 | paid\ncohere/command-r-plus-08-2024 | paid\ncohere/command-r7b-12-2024 | paid\ndatabricks/dbrx-instruct | paid\ndeepseek/deepseek-chat-v2.5 | paid\ndeepseek/deepseek-chat | paid\ncognitivecomputations/dolphin-mixtral-8x7b | paid\ncognitivecomputations/dolphin-mixtral-8x22b | paid\neva-unit-01/eva-llama-3.33-70b | paid\neva-unit-01/eva-qwen-2.5-32b | paid\neva-unit-01/eva-qwen-2.5-72b | paid\nalpindale/goliath-120b | paid\ngoogle/gemini-2.0-flash-thinking-exp:free | free\ngoogle/gemini-exp-1114:free | free\ngoogle/gemini-exp-1121:free | free\ngoogle/gemini-exp-1206:free | free\ngoogle/gemini-flash-1.5 | paid\ngoogle/gemini-flash-1.5-8b | paid\ngoogle/gemini-flash-1.5-8b-exp | paid\ngoogle/gemini-flash-1.5-exp | paid\ngoogle/gemini-2.0-flash-exp:free | free\ngoogle/gemini-pro | paid\ngoogle/gemini-pro-1.5 | paid\ngoogle/gemini-pro-1.5-exp | paid\ngoogle/gemini-pro-vision | paid\ngoogle/gemma-2-27b-it | paid\ngoogle/gemma-2-9b-it | paid\ngoogle/gemma-2-9b-it:free | free\ngoogle/learnlm-1.5-pro-experimental:free | free\ngoogle/palm-2-chat-bison | paid\ngoogle/palm-2-chat-bison-32k | paid\ngoogle/palm-2-codechat-bison | paid\ngoogle/palm-2-codechat-bison-32k | paid\nhuggingfaceh4/zephyr-7b-beta:free | free\ninfermatic/mn-inferor-12b | paid\ninflatebot/mn-mag-mell-r1 | paid\ninflection/inflection-3-pi | paid\ninflection/inflection-3-productivity | paid\nliquid/lfm-40b | paid\nlizpreciatior/lzlv-70b-fp16-hf | paid\nalpindale/magnum-72b | paid\nanthracite-org/magnum-v2-72b | paid\nanthracite-org/magnum-v4-72b | paid\nmancer/weaver | paid\nmeta-llama/llama-2-13b-chat | paid\nmeta-llama/llama-3-70b-instruct | paid\nmeta-llama/llama-3-70b-instruct:nitro | paid\nmeta-llama/llama-3-8b-instruct | paid\nmeta-llama/llama-3-8b-instruct:extended | paid\nmeta-llama/llama-3-8b-instruct:free | free\nmeta-llama/llama-3-8b-instruct:nitro | paid\nmeta-llama/llama-3.1-405b | paid\nmeta-llama/llama-3.1-405b-instruct | paid\nmeta-llama/llama-3.1-405b-instruct:free | free\nmeta-llama/llama-3.1-405b-instruct:nitro | paid\nmeta-llama/llama-3.1-70b-instruct | paid\nmeta-llama/llama-3.1-70b-instruct:free | free\nmeta-llama/llama-3.1-70b-instruct:nitro | paid\nmeta-llama/llama-3.1-8b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct:free | free\nmeta-llama/llama-3.2-11b-vision-instruct | paid\nmeta-llama/llama-3.2-11b-vision-instruct:free | free\nmeta-llama/llama-3.2-1b-instruct | paid\nmeta-llama/llama-3.2-1b-instruct:free | free\nmeta-llama/llama-3.2-3b-instruct | paid\nmeta-llama/llama-3.2-3b-instruct:free | free\nmeta-llama/llama-3.2-90b-vision-instruct | paid\nmeta-llama/llama-3.2-90b-vision-instruct:free | free\nmeta-llama/llama-3.3-70b-instruct | paid\nmeta-llama/llama-guard-2-8b | paid\nmicrosoft/phi-3-medium-128k-instruct | paid\nmicrosoft/phi-3-medium-128k-instruct:free | free\nmicrosoft/phi-3-mini-128k-instruct | paid\nmicrosoft/phi-3-mini-128k-instruct:free | free\nmicrosoft/phi-3.5-mini-128k-instruct | paid\nsophosympatheia/midnight-rose-70b | paid\nmistralai/mistral-large | paid\nmistralai/mistral-large-2407 | paid\nmistralai/mistral-large-2411 | paid\nmistralai/mistral-medium | paid\nnothingiisreal/mn-celeste-12b | paid\nmistralai/mistral-small | paid\nmistralai/mistral-tiny | paid\nmistralai/codestral-mamba | paid\nmistralai/ministral-3b | paid\nmistralai/ministral-8b | paid\nmistralai/mistral-7b-instruct | paid\nmistralai/mistral-7b-instruct:free | free\nmistralai/mistral-7b-instruct:nitro | paid\nmistralai/mistral-7b-instruct-v0.1 | paid\nmistralai/mistral-7b-instruct-v0.2 | paid\nmistralai/mistral-7b-instruct-v0.3 | paid\nmistralai/mistral-nemo | paid\nmistralai/mixtral-8x22b-instruct | paid\nmistralai/mixtral-8x7b | paid\nmistralai/mixtral-8x7b-instruct | paid\nmistralai/mixtral-8x7b-instruct:nitro | paid\nmistralai/pixtral-12b | paid\nmistralai/pixtral-large-2411 | paid\ngryphe/mythomax-l2-13b | paid\ngryphe/mythomax-l2-13b:extended | paid\ngryphe/mythomax-l2-13b:free | free\ngryphe/mythomax-l2-13b:nitro | paid\nneversleep/llama-3-lumimaid-70b | paid\nneversleep/llama-3-lumimaid-8b | paid\nneversleep/llama-3-lumimaid-8b:extended | paid\nneversleep/llama-3.1-lumimaid-70b | paid\nneversleep/llama-3.1-lumimaid-8b | paid\nneversleep/noromaid-20b | paid\nnousresearch/nous-hermes-llama2-13b | paid\nnousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid\nnousresearch/hermes-3-llama-3.1-405b | paid\nnousresearch/hermes-3-llama-3.1-70b | paid\nnousresearch/hermes-2-pro-llama-3-8b | paid\nnvidia/llama-3.1-nemotron-70b-instruct | paid\nopenai/chatgpt-4o-latest | paid\nopenai/gpt-3.5-turbo | paid\nopenai/gpt-3.5-turbo-0613 | paid\nopenai/gpt-3.5-turbo-16k | paid\nopenai/gpt-3.5-turbo-0125 | paid\nopenai/gpt-3.5-turbo-1106 | paid\nopenai/gpt-3.5-turbo-instruct | paid\nopenai/gpt-4 | paid\nopenai/gpt-4-0314 | paid\nopenai/gpt-4-32k | paid\nopenai/gpt-4-32k-0314 | paid\nopenai/gpt-4-turbo | paid\nopenai/gpt-4-1106-preview | paid\nopenai/gpt-4-turbo-preview | paid\nopenai/gpt-4o | paid\nopenai/gpt-4o-2024-05-13 | paid\nopenai/gpt-4o-2024-08-06 | paid\nopenai/gpt-4o-2024-11-20 | paid\nopenai/gpt-4o:extended | paid\nopenai/gpt-4o-mini | paid\nopenai/gpt-4o-mini-2024-07-18 | paid\nopenai/o1 | paid\nopenai/o1-mini | paid\nopenai/o1-mini-2024-09-12 | paid\nopenai/o1-preview | paid\nopenai/o1-preview-2024-09-12 | paid\nopenchat/openchat-7b | paid\nopenchat/openchat-7b:free | free\nteknium/openhermes-2.5-mistral-7b | paid\nperplexity/llama-3.1-sonar-huge-128k-online | paid\nperplexity/llama-3.1-sonar-large-128k-chat | paid\nperplexity/llama-3.1-sonar-large-128k-online | paid\nperplexity/llama-3.1-sonar-small-128k-chat | paid\nperplexity/llama-3.1-sonar-small-128k-online | paid\nperplexity/llama-3-sonar-large-32k-chat | paid\nperplexity/llama-3-sonar-large-32k-online | paid\nperplexity/llama-3-sonar-small-32k-chat | paid\npygmalionai/mythalion-13b | paid\nqwen/qwen-2-72b-instruct | paid\nqwen/qwen-2-7b-instruct | paid\nqwen/qwen-2-7b-instruct:free | free\nqwen/qvq-72b-preview | paid\nqwen/qwq-32b-preview | paid\nqwen/qwen-2-vl-72b-instruct | paid\nqwen/qwen-2-vl-7b-instruct | paid\nqwen/qwen-2.5-72b-instruct | paid\nqwen/qwen-2.5-7b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct | paid\nundi95/remm-slerp-l2-13b | paid\nundi95/remm-slerp-l2-13b:extended | paid\nthedrummer/rocinante-12b | paid\nsao10k/l3-lunaris-8b | paid\nsao10k/l3-euryale-70b | paid\nsao10k/l3.1-euryale-70b | paid\nsao10k/l3.3-euryale-70b | paid\nraifle/sorcererlm-8x22b | paid\nundi95/toppy-m-7b | paid\nundi95/toppy-m-7b:free | free\nundi95/toppy-m-7b:nitro | paid\nthedrummer/unslopnemo-12b | paid\nmicrosoft/wizardlm-2-7b | paid\nmicrosoft/wizardlm-2-8x22b | paid\nx-ai/grok-2-1212 | paid\nx-ai/grok-2-vision-1212 | paid\nx-ai/grok-beta | paid\nx-ai/grok-vision-beta | paid\nxwin-lm/xwin-lm-70b | paid\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenAI models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\nbabbage-002\nchatgpt-4o-latest\ndall-e-2\ndall-e-3\ndavinci-002\ngpt-3.5-turbo\ngpt-3.5-turbo-0125\ngpt-3.5-turbo-1106\ngpt-3.5-turbo-16k\ngpt-3.5-turbo-instruct\ngpt-3.5-turbo-instruct-0914\ngpt-4\ngpt-4-0125-preview\ngpt-4-0613\ngpt-4-1106-preview\ngpt-4-1106-vision-preview\ngpt-4-turbo\ngpt-4-turbo-2024-04-09\ngpt-4-turbo-preview\ngpt-4-vision-preview\ngpt-4o\ngpt-4o-2024-05-13\ngpt-4o-2024-08-06\ngpt-4o-2024-11-20\ngpt-4o-audio-preview\ngpt-4o-audio-preview-2024-10-01\ngpt-4o-audio-preview-2024-12-17\ngpt-4o-mini\ngpt-4o-mini-2024-07-18\ngpt-4o-mini-audio-preview\ngpt-4o-mini-audio-preview-2024-12-17\ngpt-4o-mini-realtime-preview\ngpt-4o-mini-realtime-preview-2024-12-17\ngpt-4o-realtime-preview\ngpt-4o-realtime-preview-2024-10-01\ngpt-4o-realtime-preview-2024-12-17\no1-mini\no1-mini-2024-09-12\no1-preview\no1-preview-2024-09-12\nomni-moderation-2024-09-26\nomni-moderation-latest\ntext-embedding-3-large\ntext-embedding-3-small\ntext-embedding-ada-002\ntts-1\ntts-1-1106\ntts-1-hd\ntts-1-hd-1106\nwhisper-1\n-----\n\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m Deepseek models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\ndeepseek-chat\ndeepseek-reasoner\n-----\n", + "ui:description": "AI model to use for processing. Available models:\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenRouter models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\nai21/jamba-large-1.7 | paid\nai21/jamba-mini-1.7 | paid\naion-labs/aion-1.0 | paid\naion-labs/aion-1.0-mini | paid\naion-labs/aion-rp-llama-3.1-8b | paid\nalfredpros/codellama-7b-instruct-solidity | paid\nallenai/olmo-2-0325-32b-instruct | paid\nallenai/olmo-3-32b-think:free | free\nallenai/olmo-3-7b-instruct | paid\nallenai/olmo-3-7b-think | paid\nallenai/olmo-3.1-32b-think:free | free\namazon/nova-2-lite-v1 | paid\namazon/nova-lite-v1 | paid\namazon/nova-micro-v1 | paid\namazon/nova-premier-v1 | paid\namazon/nova-pro-v1 | paid\nanthropic/claude-3-haiku | paid\nanthropic/claude-3-opus | paid\nanthropic/claude-3.5-haiku | paid\nanthropic/claude-3.5-haiku-20241022 | paid\nanthropic/claude-3.5-sonnet | paid\nanthropic/claude-3.7-sonnet | paid\nanthropic/claude-3.7-sonnet:thinking | paid\nanthropic/claude-haiku-4.5 | paid\nanthropic/claude-opus-4 | paid\nanthropic/claude-opus-4.1 | paid\nanthropic/claude-opus-4.5 | paid\nanthropic/claude-sonnet-4 | paid\nanthropic/claude-sonnet-4.5 | paid\narcee-ai/coder-large | paid\narcee-ai/maestro-reasoning | paid\narcee-ai/spotlight | paid\narcee-ai/trinity-mini | paid\narcee-ai/trinity-mini:free | free\narcee-ai/virtuoso-large | paid\narliai/qwq-32b-arliai-rpr-v1 | paid\nopenrouter/auto | paid\nbaidu/ernie-4.5-21b-a3b | paid\nbaidu/ernie-4.5-21b-a3b-thinking | paid\nbaidu/ernie-4.5-300b-a47b | paid\nbaidu/ernie-4.5-vl-28b-a3b | paid\nbaidu/ernie-4.5-vl-424b-a47b | paid\nopenrouter/bodybuilder | paid\nbytedance-seed/seed-1.6 | paid\nbytedance-seed/seed-1.6-flash | paid\nbytedance/ui-tars-1.5-7b | paid\ndeepcogito/cogito-v2-preview-llama-109b-moe | paid\ncohere/command-a | paid\ncohere/command-r-08-2024 | paid\ncohere/command-r-plus-08-2024 | paid\ncohere/command-r7b-12-2024 | paid\ndeepcogito/cogito-v2-preview-llama-405b | paid\ndeepcogito/cogito-v2-preview-llama-70b | paid\ndeepcogito/cogito-v2.1-671b | paid\ndeepseek/deepseek-prover-v2 | paid\ndeepseek/deepseek-r1-0528-qwen3-8b | paid\ndeepseek/deepseek-chat | paid\ndeepseek/deepseek-chat-v3-0324 | paid\ndeepseek/deepseek-chat-v3.1 | paid\ndeepseek/deepseek-v3.1-terminus | paid\ndeepseek/deepseek-v3.1-terminus:exacto | paid\ndeepseek/deepseek-v3.2 | paid\ndeepseek/deepseek-v3.2-exp | paid\ndeepseek/deepseek-v3.2-speciale | paid\ndeepseek/deepseek-r1 | paid\ndeepseek/deepseek-r1-0528 | paid\ndeepseek/deepseek-r1-0528:free | free\ndeepseek/deepseek-r1-distill-llama-70b | paid\ndeepseek/deepseek-r1-distill-qwen-14b | paid\ndeepseek/deepseek-r1-distill-qwen-32b | paid\neleutherai/llemma_7b | paid\nessentialai/rnj-1-instruct | paid\nalpindale/goliath-120b | paid\ngoogle/gemini-2.0-flash-001 | paid\ngoogle/gemini-2.0-flash-exp:free | free\ngoogle/gemini-2.0-flash-lite-001 | paid\ngoogle/gemini-2.5-flash | paid\ngoogle/gemini-2.5-flash-image | paid\ngoogle/gemini-2.5-flash-image-preview | paid\ngoogle/gemini-2.5-flash-lite | paid\ngoogle/gemini-2.5-flash-lite-preview-09-2025 | paid\ngoogle/gemini-2.5-flash-preview-09-2025 | paid\ngoogle/gemini-2.5-pro | paid\ngoogle/gemini-2.5-pro-preview-05-06 | paid\ngoogle/gemini-2.5-pro-preview | paid\ngoogle/gemini-3-flash-preview | paid\ngoogle/gemini-3-pro-preview | paid\ngoogle/gemma-2-27b-it | paid\ngoogle/gemma-2-9b-it | paid\ngoogle/gemma-3-12b-it | paid\ngoogle/gemma-3-12b-it:free | free\ngoogle/gemma-3-27b-it | paid\ngoogle/gemma-3-27b-it:free | free\ngoogle/gemma-3-4b-it | paid\ngoogle/gemma-3-4b-it:free | free\ngoogle/gemma-3n-e2b-it:free | free\ngoogle/gemma-3n-e4b-it | paid\ngoogle/gemma-3n-e4b-it:free | free\ngoogle/gemini-3-pro-image-preview | paid\nibm-granite/granite-4.0-h-micro | paid\ninception/mercury | paid\ninception/mercury-coder | paid\ninflection/inflection-3-pi | paid\ninflection/inflection-3-productivity | paid\nkwaipilot/kat-coder-pro:free | free\nliquid/lfm-2.2-6b | paid\nliquid/lfm2-8b-a1b | paid\nmeta-llama/llama-guard-3-8b | paid\nanthracite-org/magnum-v4-72b | paid\nmancer/weaver | paid\nmeituan/longcat-flash-chat | paid\nmeta-llama/llama-3-70b-instruct | paid\nmeta-llama/llama-3-8b-instruct | paid\nmeta-llama/llama-3.1-405b | paid\nmeta-llama/llama-3.1-405b-instruct | paid\nmeta-llama/llama-3.1-405b-instruct:free | free\nmeta-llama/llama-3.1-70b-instruct | paid\nmeta-llama/llama-3.1-8b-instruct | paid\nmeta-llama/llama-3.2-11b-vision-instruct | paid\nmeta-llama/llama-3.2-1b-instruct | paid\nmeta-llama/llama-3.2-3b-instruct | paid\nmeta-llama/llama-3.2-3b-instruct:free | free\nmeta-llama/llama-3.2-90b-vision-instruct | paid\nmeta-llama/llama-3.3-70b-instruct | paid\nmeta-llama/llama-3.3-70b-instruct:free | free\nmeta-llama/llama-4-maverick | paid\nmeta-llama/llama-4-scout | paid\nmeta-llama/llama-guard-4-12b | paid\nmeta-llama/llama-guard-2-8b | paid\nmicrosoft/phi-4 | paid\nmicrosoft/phi-4-multimodal-instruct | paid\nmicrosoft/phi-4-reasoning-plus | paid\nmicrosoft/phi-3-medium-128k-instruct | paid\nmicrosoft/phi-3-mini-128k-instruct | paid\nmicrosoft/phi-3.5-mini-128k-instruct | paid\nminimax/minimax-m1 | paid\nminimax/minimax-m2 | paid\nminimax/minimax-m2.1 | paid\nminimax/minimax-01 | paid\nmistralai/mistral-large | paid\nmistralai/mistral-large-2407 | paid\nmistralai/mistral-large-2411 | paid\nmistralai/mistral-tiny | paid\nmistralai/codestral-2508 | paid\nmistralai/devstral-2512 | paid\nmistralai/devstral-2512:free | free\nmistralai/devstral-medium | paid\nmistralai/devstral-small | paid\nmistralai/devstral-small-2505 | paid\nmistralai/ministral-14b-2512 | paid\nmistralai/ministral-3b-2512 | paid\nmistralai/ministral-8b-2512 | paid\nmistralai/ministral-3b | paid\nmistralai/ministral-8b | paid\nmistralai/mistral-7b-instruct | paid\nmistralai/mistral-7b-instruct:free | free\nmistralai/mistral-7b-instruct-v0.1 | paid\nmistralai/mistral-7b-instruct-v0.2 | paid\nmistralai/mistral-7b-instruct-v0.3 | paid\nmistralai/mistral-large-2512 | paid\nmistralai/mistral-medium-3 | paid\nmistralai/mistral-medium-3.1 | paid\nmistralai/mistral-nemo | paid\nmistralai/mistral-small-24b-instruct-2501 | paid\nmistralai/mistral-small-3.1-24b-instruct | paid\nmistralai/mistral-small-3.1-24b-instruct:free | free\nmistralai/mistral-small-3.2-24b-instruct | paid\nmistralai/mistral-small-creative | paid\nmistralai/mixtral-8x22b-instruct | paid\nmistralai/mixtral-8x7b-instruct | paid\nmistralai/pixtral-12b | paid\nmistralai/pixtral-large-2411 | paid\nmistralai/mistral-saba | paid\nmistralai/voxtral-small-24b-2507 | paid\nmoonshotai/kimi-dev-72b | paid\nmoonshotai/kimi-k2 | paid\nmoonshotai/kimi-k2:free | free\nmoonshotai/kimi-k2-0905 | paid\nmoonshotai/kimi-k2-0905:exacto | paid\nmoonshotai/kimi-k2-thinking | paid\nmorph/morph-v3-fast | paid\nmorph/morph-v3-large | paid\ngryphe/mythomax-l2-13b | paid\nneversleep/llama-3.1-lumimaid-8b | paid\nnex-agi/deepseek-v3.1-nex-n1:free | free\nneversleep/noromaid-20b | paid\nnousresearch/deephermes-3-mistral-24b-preview | paid\nnousresearch/hermes-3-llama-3.1-405b | paid\nnousresearch/hermes-3-llama-3.1-405b:free | free\nnousresearch/hermes-3-llama-3.1-70b | paid\nnousresearch/hermes-4-405b | paid\nnousresearch/hermes-4-70b | paid\nnousresearch/hermes-2-pro-llama-3-8b | paid\nnvidia/llama-3.1-nemotron-70b-instruct | paid\nnvidia/llama-3.1-nemotron-ultra-253b-v1 | paid\nnvidia/llama-3.3-nemotron-super-49b-v1.5 | paid\nnvidia/nemotron-3-nano-30b-a3b | paid\nnvidia/nemotron-3-nano-30b-a3b:free | free\nnvidia/nemotron-nano-12b-v2-vl | paid\nnvidia/nemotron-nano-12b-v2-vl:free | free\nnvidia/nemotron-nano-9b-v2 | paid\nnvidia/nemotron-nano-9b-v2:free | free\nopenai/chatgpt-4o-latest | paid\nopenai/codex-mini | paid\nopenai/gpt-3.5-turbo | paid\nopenai/gpt-3.5-turbo-0613 | paid\nopenai/gpt-3.5-turbo-16k | paid\nopenai/gpt-3.5-turbo-instruct | paid\nopenai/gpt-4 | paid\nopenai/gpt-4-0314 | paid\nopenai/gpt-4-turbo | paid\nopenai/gpt-4-1106-preview | paid\nopenai/gpt-4-turbo-preview | paid\nopenai/gpt-4.1 | paid\nopenai/gpt-4.1-mini | paid\nopenai/gpt-4.1-nano | paid\nopenai/gpt-4o | paid\nopenai/gpt-4o-2024-05-13 | paid\nopenai/gpt-4o-2024-08-06 | paid\nopenai/gpt-4o-2024-11-20 | paid\nopenai/gpt-4o:extended | paid\nopenai/gpt-4o-audio-preview | paid\nopenai/gpt-4o-search-preview | paid\nopenai/gpt-4o-mini | paid\nopenai/gpt-4o-mini-2024-07-18 | paid\nopenai/gpt-4o-mini-search-preview | paid\nopenai/gpt-5 | paid\nopenai/gpt-5-chat | paid\nopenai/gpt-5-codex | paid\nopenai/gpt-5-image | paid\nopenai/gpt-5-image-mini | paid\nopenai/gpt-5-mini | paid\nopenai/gpt-5-nano | paid\nopenai/gpt-5-pro | paid\nopenai/gpt-5.1 | paid\nopenai/gpt-5.1-chat | paid\nopenai/gpt-5.1-codex | paid\nopenai/gpt-5.1-codex-max | paid\nopenai/gpt-5.1-codex-mini | paid\nopenai/gpt-5.2 | paid\nopenai/gpt-5.2-chat | paid\nopenai/gpt-5.2-pro | paid\nopenai/gpt-oss-120b | paid\nopenai/gpt-oss-120b:exacto | paid\nopenai/gpt-oss-120b:free | free\nopenai/gpt-oss-20b | paid\nopenai/gpt-oss-20b:free | free\nopenai/gpt-oss-safeguard-20b | paid\nopenai/o1 | paid\nopenai/o1-pro | paid\nopenai/o3 | paid\nopenai/o3-deep-research | paid\nopenai/o3-mini | paid\nopenai/o3-mini-high | paid\nopenai/o3-pro | paid\nopenai/o4-mini | paid\nopenai/o4-mini-deep-research | paid\nopenai/o4-mini-high | paid\nopengvlab/internvl3-78b | paid\nperplexity/sonar | paid\nperplexity/sonar-deep-research | paid\nperplexity/sonar-pro | paid\nperplexity/sonar-pro-search | paid\nperplexity/sonar-reasoning | paid\nperplexity/sonar-reasoning-pro | paid\nprime-intellect/intellect-3 | paid\nqwen/qwen-plus-2025-07-28 | paid\nqwen/qwen-plus-2025-07-28:thinking | paid\nqwen/qwen-vl-max | paid\nqwen/qwen-vl-plus | paid\nqwen/qwen-max | paid\nqwen/qwen-plus | paid\nqwen/qwen-turbo | paid\nqwen/qwen-2.5-7b-instruct | paid\nqwen/qwen2.5-coder-7b-instruct | paid\nqwen/qwen2.5-vl-32b-instruct | paid\nqwen/qwen2.5-vl-72b-instruct | paid\nqwen/qwen-2.5-vl-7b-instruct | paid\nqwen/qwen-2.5-vl-7b-instruct:free | free\nqwen/qwen3-14b | paid\nqwen/qwen3-235b-a22b | paid\nqwen/qwen3-235b-a22b-2507 | paid\nqwen/qwen3-235b-a22b-thinking-2507 | paid\nqwen/qwen3-30b-a3b | paid\nqwen/qwen3-30b-a3b-instruct-2507 | paid\nqwen/qwen3-30b-a3b-thinking-2507 | paid\nqwen/qwen3-32b | paid\nqwen/qwen3-4b:free | free\nqwen/qwen3-8b | paid\nqwen/qwen3-coder-30b-a3b-instruct | paid\nqwen/qwen3-coder | paid\nqwen/qwen3-coder:exacto | paid\nqwen/qwen3-coder:free | free\nqwen/qwen3-coder-flash | paid\nqwen/qwen3-coder-plus | paid\nqwen/qwen3-max | paid\nqwen/qwen3-next-80b-a3b-instruct | paid\nqwen/qwen3-next-80b-a3b-thinking | paid\nqwen/qwen3-vl-235b-a22b-instruct | paid\nqwen/qwen3-vl-235b-a22b-thinking | paid\nqwen/qwen3-vl-30b-a3b-instruct | paid\nqwen/qwen3-vl-30b-a3b-thinking | paid\nqwen/qwen3-vl-32b-instruct | paid\nqwen/qwen3-vl-8b-instruct | paid\nqwen/qwen3-vl-8b-thinking | paid\nqwen/qwq-32b | paid\nqwen/qwen-2.5-72b-instruct | paid\nqwen/qwen-2.5-coder-32b-instruct | paid\nrelace/relace-apply-3 | paid\nrelace/relace-search | paid\nundi95/remm-slerp-l2-13b | paid\nsao10k/l3-lunaris-8b | paid\nsao10k/l3-euryale-70b | paid\nsao10k/l3.1-70b-hanami-x1 | paid\nsao10k/l3.1-euryale-70b | paid\nsao10k/l3.3-euryale-70b | paid\nraifle/sorcererlm-8x22b | paid\nstepfun-ai/step3 | paid\nswitchpoint/router | paid\ntencent/hunyuan-a13b-instruct | paid\nthedrummer/cydonia-24b-v4.1 | paid\nthedrummer/rocinante-12b | paid\nthedrummer/skyfall-36b-v2 | paid\nthedrummer/unslopnemo-12b | paid\nthudm/glm-4.1v-9b-thinking | paid\ntngtech/deepseek-r1t-chimera | paid\ntngtech/deepseek-r1t-chimera:free | free\ntngtech/deepseek-r1t2-chimera | paid\ntngtech/deepseek-r1t2-chimera:free | free\ntngtech/tng-r1t-chimera | paid\ntngtech/tng-r1t-chimera:free | free\nalibaba/tongyi-deepresearch-30b-a3b | paid\nalibaba/tongyi-deepresearch-30b-a3b:free | free\ncognitivecomputations/dolphin-mistral-24b-venice-edition:free | free\nmicrosoft/wizardlm-2-8x22b | paid\nx-ai/grok-3 | paid\nx-ai/grok-3-beta | paid\nx-ai/grok-3-mini | paid\nx-ai/grok-3-mini-beta | paid\nx-ai/grok-4 | paid\nx-ai/grok-4-fast | paid\nx-ai/grok-4.1-fast | paid\nx-ai/grok-code-fast-1 | paid\nxiaomi/mimo-v2-flash:free | free\nz-ai/glm-4-32b | paid\nz-ai/glm-4.5 | paid\nz-ai/glm-4.5-air | paid\nz-ai/glm-4.5-air:free | free\nz-ai/glm-4.5v | paid\nz-ai/glm-4.6 | paid\nz-ai/glm-4.6:exacto | paid\nz-ai/glm-4.6v | paid\nz-ai/glm-4.7 | paid\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m OpenAI models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\nbabbage-002\nchatgpt-4o-latest\nchatgpt-image-latest\ncodex-mini-latest\ndall-e-2\ndall-e-3\ndavinci-002\ngpt-3.5-turbo\ngpt-3.5-turbo-0125\ngpt-3.5-turbo-1106\ngpt-3.5-turbo-16k\ngpt-3.5-turbo-instruct\ngpt-3.5-turbo-instruct-0914\ngpt-4\ngpt-4-0125-preview\ngpt-4-0613\ngpt-4-1106-preview\ngpt-4-turbo\ngpt-4-turbo-2024-04-09\ngpt-4-turbo-preview\ngpt-4.1\ngpt-4.1-2025-04-14\ngpt-4.1-mini\ngpt-4.1-mini-2025-04-14\ngpt-4.1-nano\ngpt-4.1-nano-2025-04-14\ngpt-4o\ngpt-4o-2024-05-13\ngpt-4o-2024-08-06\ngpt-4o-2024-11-20\ngpt-4o-audio-preview\ngpt-4o-audio-preview-2024-12-17\ngpt-4o-audio-preview-2025-06-03\ngpt-4o-mini\ngpt-4o-mini-2024-07-18\ngpt-4o-mini-audio-preview\ngpt-4o-mini-audio-preview-2024-12-17\ngpt-4o-mini-realtime-preview\ngpt-4o-mini-realtime-preview-2024-12-17\ngpt-4o-mini-search-preview\ngpt-4o-mini-search-preview-2025-03-11\ngpt-4o-mini-transcribe\ngpt-4o-mini-transcribe-2025-03-20\ngpt-4o-mini-transcribe-2025-12-15\ngpt-4o-mini-tts\ngpt-4o-mini-tts-2025-03-20\ngpt-4o-mini-tts-2025-12-15\ngpt-4o-realtime-preview\ngpt-4o-realtime-preview-2024-12-17\ngpt-4o-realtime-preview-2025-06-03\ngpt-4o-search-preview\ngpt-4o-search-preview-2025-03-11\ngpt-4o-transcribe\ngpt-4o-transcribe-diarize\ngpt-5\ngpt-5-2025-08-07\ngpt-5-chat-latest\ngpt-5-codex\ngpt-5-mini\ngpt-5-mini-2025-08-07\ngpt-5-nano\ngpt-5-nano-2025-08-07\ngpt-5-pro\ngpt-5-pro-2025-10-06\ngpt-5-search-api\ngpt-5-search-api-2025-10-14\ngpt-5.1\ngpt-5.1-2025-11-13\ngpt-5.1-chat-latest\ngpt-5.1-codex\ngpt-5.1-codex-max\ngpt-5.1-codex-mini\ngpt-5.2\ngpt-5.2-2025-12-11\ngpt-5.2-chat-latest\ngpt-5.2-pro\ngpt-5.2-pro-2025-12-11\ngpt-audio\ngpt-audio-2025-08-28\ngpt-audio-mini\ngpt-audio-mini-2025-10-06\ngpt-audio-mini-2025-12-15\ngpt-image-1\ngpt-image-1-mini\ngpt-image-1.5\ngpt-realtime\ngpt-realtime-2025-08-28\ngpt-realtime-mini\ngpt-realtime-mini-2025-10-06\ngpt-realtime-mini-2025-12-15\no1\no1-2024-12-17\no1-pro\no1-pro-2025-03-19\no3\no3-2025-04-16\no3-mini\no3-mini-2025-01-31\no4-mini\no4-mini-2025-04-16\no4-mini-deep-research\no4-mini-deep-research-2025-06-26\nomni-moderation-2024-09-26\nomni-moderation-latest\nsora-2\nsora-2-pro\ntext-embedding-3-large\ntext-embedding-3-small\ntext-embedding-ada-002\ntts-1\ntts-1-1106\ntts-1-hd\ntts-1-hd-1106\nwhisper-1\n-----\n\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m Deepseek models:\u001b[22m\u001b[39m\n\u001b[35m\u001b[1m\u001b[22m\u001b[39m\ndeepseek-chat\ndeepseek-reasoner\n-----\n", "ui:title": "Model" }, "router": { @@ -116,7 +116,7 @@ "preferences": { "ui:description": "Path to preferences file, eg: location, your email address, gender, etc. Supports environment variables.", "ui:title": "Preferences", - "ui:placeholder": "C:\\Users\\mc007\\.osr\\preferences.md" + "ui:placeholder": "C:\\Users\\zx\\.osr\\preferences.md" }, "logs": { "ui:description": "Logging directory", diff --git a/packages/kbot/src/models/cache/openrouter-models-free.ts b/packages/kbot/src/models/cache/openrouter-models-free.ts index c963e887..fa3b8da3 100644 --- a/packages/kbot/src/models/cache/openrouter-models-free.ts +++ b/packages/kbot/src/models/cache/openrouter-models-free.ts @@ -1,22 +1,3 @@ export enum E_OPENROUTER_MODEL_FREE { - MODEL_FREE_ALLENAI_MOLMO_2_8B_FREE = "allenai/molmo-2-8b:free", - MODEL_FREE_XIAOMI_MIMO_V2_FLASH_FREE = "xiaomi/mimo-v2-flash:free", - MODEL_FREE_NVIDIA_NEMOTRON_3_NANO_30B_A3B_FREE = "nvidia/nemotron-3-nano-30b-a3b:free", - MODEL_FREE_ARCEE_AI_TRINITY_MINI_FREE = "arcee-ai/trinity-mini:free", - MODEL_FREE_TNGTECH_TNG_R1T_CHIMERA_FREE = "tngtech/tng-r1t-chimera:free", - MODEL_FREE_NVIDIA_NEMOTRON_NANO_12B_V2_VL_FREE = "nvidia/nemotron-nano-12b-v2-vl:free", - MODEL_FREE_QWEN_QWEN3_NEXT_80B_A3B_INSTRUCT_FREE = "qwen/qwen3-next-80b-a3b-instruct:free", - MODEL_FREE_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free", - MODEL_FREE_OPENAI_GPT_OSS_120B_FREE = "openai/gpt-oss-120b:free", - MODEL_FREE_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free", - MODEL_FREE_Z_AI_GLM_4_5_AIR_FREE = "z-ai/glm-4.5-air:free", - MODEL_FREE_QWEN_QWEN3_CODER_FREE = "qwen/qwen3-coder:free", - MODEL_FREE_MOONSHOTAI_KIMI_K2_FREE = "moonshotai/kimi-k2:free", - MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN_MISTRAL_24B_VENICE_EDITION_FREE = "cognitivecomputations/dolphin-mistral-24b-venice-edition:free", - MODEL_FREE_TNGTECH_DEEPSEEK_R1T2_CHIMERA_FREE = "tngtech/deepseek-r1t2-chimera:free", - MODEL_FREE_QWEN_QWEN3_4B_FREE = "qwen/qwen3-4b:free", - MODEL_FREE_TNGTECH_DEEPSEEK_R1T_CHIMERA_FREE = "tngtech/deepseek-r1t-chimera:free", - MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free", - MODEL_FREE_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free", - MODEL_FREE_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B_FREE = "nousresearch/hermes-3-llama-3.1-405b:free" + } \ No newline at end of file diff --git a/packages/kbot/src/models/cache/openrouter-models.ts b/packages/kbot/src/models/cache/openrouter-models.ts index 5941840f..f2dbf1a2 100644 --- a/packages/kbot/src/models/cache/openrouter-models.ts +++ b/packages/kbot/src/models/cache/openrouter-models.ts @@ -1,4 +1,10 @@ export enum E_OPENROUTER_MODEL { + MODEL_QWEN_QWEN3_CODER_NEXT = "qwen/qwen3-coder-next", + MODEL_OPENROUTER_FREE = "openrouter/free", + MODEL_STEPFUN_STEP_3_5_FLASH_FREE = "stepfun/step-3.5-flash:free", + MODEL_ARCEE_AI_TRINITY_LARGE_PREVIEW_FREE = "arcee-ai/trinity-large-preview:free", + MODEL_MOONSHOTAI_KIMI_K2_5 = "moonshotai/kimi-k2.5", + MODEL_UPSTAGE_SOLAR_PRO_3_FREE = "upstage/solar-pro-3:free", MODEL_MINIMAX_MINIMAX_M2_HER = "minimax/minimax-m2-her", MODEL_WRITER_PALMYRA_X5 = "writer/palmyra-x5", MODEL_LIQUID_LFM_2_5_1_2B_THINKING_FREE = "liquid/lfm-2.5-1.2b-thinking:free", @@ -16,14 +22,12 @@ export enum E_OPENROUTER_MODEL { MODEL_GOOGLE_GEMINI_3_FLASH_PREVIEW = "google/gemini-3-flash-preview", MODEL_MISTRALAI_MISTRAL_SMALL_CREATIVE = "mistralai/mistral-small-creative", MODEL_ALLENAI_OLMO_3_1_32B_THINK = "allenai/olmo-3.1-32b-think", - MODEL_XIAOMI_MIMO_V2_FLASH_FREE = "xiaomi/mimo-v2-flash:free", MODEL_XIAOMI_MIMO_V2_FLASH = "xiaomi/mimo-v2-flash", MODEL_NVIDIA_NEMOTRON_3_NANO_30B_A3B_FREE = "nvidia/nemotron-3-nano-30b-a3b:free", MODEL_NVIDIA_NEMOTRON_3_NANO_30B_A3B = "nvidia/nemotron-3-nano-30b-a3b", MODEL_OPENAI_GPT_5_2_CHAT = "openai/gpt-5.2-chat", MODEL_OPENAI_GPT_5_2_PRO = "openai/gpt-5.2-pro", MODEL_OPENAI_GPT_5_2 = "openai/gpt-5.2", - MODEL_MISTRALAI_DEVSTRAL_2512_FREE = "mistralai/devstral-2512:free", MODEL_MISTRALAI_DEVSTRAL_2512 = "mistralai/devstral-2512", MODEL_RELACE_RELACE_SEARCH = "relace/relace-search", MODEL_Z_AI_GLM_4_6V = "z-ai/glm-4.6v", @@ -151,7 +155,6 @@ export enum E_OPENROUTER_MODEL { MODEL_GOOGLE_GEMINI_2_5_FLASH_LITE = "google/gemini-2.5-flash-lite", MODEL_QWEN_QWEN3_235B_A22B_2507 = "qwen/qwen3-235b-a22b-2507", MODEL_SWITCHPOINT_ROUTER = "switchpoint/router", - MODEL_MOONSHOTAI_KIMI_K2_FREE = "moonshotai/kimi-k2:free", MODEL_MOONSHOTAI_KIMI_K2 = "moonshotai/kimi-k2", MODEL_MISTRALAI_DEVSTRAL_MEDIUM = "mistralai/devstral-medium", MODEL_MISTRALAI_DEVSTRAL_SMALL = "mistralai/devstral-small", @@ -261,7 +264,6 @@ export enum E_OPENROUTER_MODEL { MODEL_SAO10K_L3_3_EURYALE_70B = "sao10k/l3.3-euryale-70b", MODEL_OPENAI_O1 = "openai/o1", MODEL_COHERE_COMMAND_R7B_12_2024 = "cohere/command-r7b-12-2024", - MODEL_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE = "google/gemini-2.0-flash-exp:free", MODEL_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE = "meta-llama/llama-3.3-70b-instruct:free", MODEL_META_LLAMA_LLAMA_3_3_70B_INSTRUCT = "meta-llama/llama-3.3-70b-instruct", MODEL_AMAZON_NOVA_LITE_V1 = "amazon/nova-lite-v1", @@ -277,25 +279,24 @@ export enum E_OPENROUTER_MODEL { MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku", MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b", MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet", - MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b", MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b", + MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b", MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct", MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct", MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi", MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity", MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b", - MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct", MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free", MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct", + MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct", MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct", MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct", MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b", MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b", - MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024", MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024", - MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT_FREE = "qwen/qwen-2.5-vl-7b-instruct:free", - MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct", + MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024", MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b", + MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct", MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B = "nousresearch/hermes-3-llama-3.1-70b", MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B_FREE = "nousresearch/hermes-3-llama-3.1-405b:free", MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B = "nousresearch/hermes-3-llama-3.1-405b", @@ -303,25 +304,24 @@ export enum E_OPENROUTER_MODEL { MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b", MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06", MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b", - MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct", - MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE = "meta-llama/llama-3.1-405b-instruct:free", - MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct", MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct", + MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct", + MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct", MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo", - MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini", MODEL_OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18", + MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini", MODEL_GOOGLE_GEMMA_2_27B_IT = "google/gemma-2-27b-it", MODEL_GOOGLE_GEMMA_2_9B_IT = "google/gemma-2-9b-it", MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b", - MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct", MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct", MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3", MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b", + MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13", MODEL_OPENAI_GPT_4O = "openai/gpt-4o", MODEL_OPENAI_GPT_4O_EXTENDED = "openai/gpt-4o:extended", - MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13", - MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct", MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT = "meta-llama/llama-3-70b-instruct", + MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct", MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT = "mistralai/mixtral-8x22b-instruct", MODEL_MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b", MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo", diff --git a/packages/kbot/src/zod_types.ts b/packages/kbot/src/zod_types.ts index c42a734b..569d8a53 100644 --- a/packages/kbot/src/zod_types.ts +++ b/packages/kbot/src/zod_types.ts @@ -31,228 +31,366 @@ export interface IKBotOptions {   OpenRouter models:  - 01-ai/yi-large | paid - aetherwiing/mn-starcannon-12b | paid - ai21/jamba-1-5-large | paid - ai21/jamba-1-5-mini | paid - ai21/jamba-instruct | paid - jondurbin/airoboros-l2-70b | paid + ai21/jamba-large-1.7 | paid + ai21/jamba-mini-1.7 | paid + aion-labs/aion-1.0 | paid + aion-labs/aion-1.0-mini | paid + aion-labs/aion-rp-llama-3.1-8b | paid + alfredpros/codellama-7b-instruct-solidity | paid + allenai/olmo-2-0325-32b-instruct | paid + allenai/olmo-3-32b-think:free | free + allenai/olmo-3-7b-instruct | paid + allenai/olmo-3-7b-think | paid + allenai/olmo-3.1-32b-think:free | free + amazon/nova-2-lite-v1 | paid amazon/nova-lite-v1 | paid amazon/nova-micro-v1 | paid + amazon/nova-premier-v1 | paid amazon/nova-pro-v1 | paid anthropic/claude-3-haiku | paid - anthropic/claude-3-haiku:beta | paid anthropic/claude-3-opus | paid - anthropic/claude-3-opus:beta | paid - anthropic/claude-3-sonnet | paid - anthropic/claude-3-sonnet:beta | paid anthropic/claude-3.5-haiku | paid anthropic/claude-3.5-haiku-20241022 | paid - anthropic/claude-3.5-haiku-20241022:beta | paid - anthropic/claude-3.5-haiku:beta | paid anthropic/claude-3.5-sonnet | paid - anthropic/claude-3.5-sonnet-20240620 | paid - anthropic/claude-3.5-sonnet-20240620:beta | paid - anthropic/claude-3.5-sonnet:beta | paid - anthropic/claude-2 | paid - anthropic/claude-2:beta | paid - anthropic/claude-2.0 | paid - anthropic/claude-2.0:beta | paid - anthropic/claude-2.1 | paid - anthropic/claude-2.1:beta | paid + anthropic/claude-3.7-sonnet | paid + anthropic/claude-3.7-sonnet:thinking | paid + anthropic/claude-haiku-4.5 | paid + anthropic/claude-opus-4 | paid + anthropic/claude-opus-4.1 | paid + anthropic/claude-opus-4.5 | paid + anthropic/claude-sonnet-4 | paid + anthropic/claude-sonnet-4.5 | paid + arcee-ai/coder-large | paid + arcee-ai/maestro-reasoning | paid + arcee-ai/spotlight | paid + arcee-ai/trinity-mini | paid + arcee-ai/trinity-mini:free | free + arcee-ai/virtuoso-large | paid + arliai/qwq-32b-arliai-rpr-v1 | paid openrouter/auto | paid - cohere/command | paid - cohere/command-r | paid - cohere/command-r-03-2024 | paid + baidu/ernie-4.5-21b-a3b | paid + baidu/ernie-4.5-21b-a3b-thinking | paid + baidu/ernie-4.5-300b-a47b | paid + baidu/ernie-4.5-vl-28b-a3b | paid + baidu/ernie-4.5-vl-424b-a47b | paid + openrouter/bodybuilder | paid + bytedance-seed/seed-1.6 | paid + bytedance-seed/seed-1.6-flash | paid + bytedance/ui-tars-1.5-7b | paid + deepcogito/cogito-v2-preview-llama-109b-moe | paid + cohere/command-a | paid cohere/command-r-08-2024 | paid - cohere/command-r-plus | paid - cohere/command-r-plus-04-2024 | paid cohere/command-r-plus-08-2024 | paid cohere/command-r7b-12-2024 | paid - databricks/dbrx-instruct | paid - deepseek/deepseek-chat-v2.5 | paid + deepcogito/cogito-v2-preview-llama-405b | paid + deepcogito/cogito-v2-preview-llama-70b | paid + deepcogito/cogito-v2.1-671b | paid + deepseek/deepseek-prover-v2 | paid + deepseek/deepseek-r1-0528-qwen3-8b | paid deepseek/deepseek-chat | paid - cognitivecomputations/dolphin-mixtral-8x7b | paid - cognitivecomputations/dolphin-mixtral-8x22b | paid - eva-unit-01/eva-llama-3.33-70b | paid - eva-unit-01/eva-qwen-2.5-32b | paid - eva-unit-01/eva-qwen-2.5-72b | paid + deepseek/deepseek-chat-v3-0324 | paid + deepseek/deepseek-chat-v3.1 | paid + deepseek/deepseek-v3.1-terminus | paid + deepseek/deepseek-v3.1-terminus:exacto | paid + deepseek/deepseek-v3.2 | paid + deepseek/deepseek-v3.2-exp | paid + deepseek/deepseek-v3.2-speciale | paid + deepseek/deepseek-r1 | paid + deepseek/deepseek-r1-0528 | paid + deepseek/deepseek-r1-0528:free | free + deepseek/deepseek-r1-distill-llama-70b | paid + deepseek/deepseek-r1-distill-qwen-14b | paid + deepseek/deepseek-r1-distill-qwen-32b | paid + eleutherai/llemma_7b | paid + essentialai/rnj-1-instruct | paid alpindale/goliath-120b | paid - google/gemini-2.0-flash-thinking-exp:free | free - google/gemini-exp-1114:free | free - google/gemini-exp-1121:free | free - google/gemini-exp-1206:free | free - google/gemini-flash-1.5 | paid - google/gemini-flash-1.5-8b | paid - google/gemini-flash-1.5-8b-exp | paid - google/gemini-flash-1.5-exp | paid + google/gemini-2.0-flash-001 | paid google/gemini-2.0-flash-exp:free | free - google/gemini-pro | paid - google/gemini-pro-1.5 | paid - google/gemini-pro-1.5-exp | paid - google/gemini-pro-vision | paid + google/gemini-2.0-flash-lite-001 | paid + google/gemini-2.5-flash | paid + google/gemini-2.5-flash-image | paid + google/gemini-2.5-flash-image-preview | paid + google/gemini-2.5-flash-lite | paid + google/gemini-2.5-flash-lite-preview-09-2025 | paid + google/gemini-2.5-flash-preview-09-2025 | paid + google/gemini-2.5-pro | paid + google/gemini-2.5-pro-preview-05-06 | paid + google/gemini-2.5-pro-preview | paid + google/gemini-3-flash-preview | paid + google/gemini-3-pro-preview | paid google/gemma-2-27b-it | paid google/gemma-2-9b-it | paid - google/gemma-2-9b-it:free | free - google/learnlm-1.5-pro-experimental:free | free - google/palm-2-chat-bison | paid - google/palm-2-chat-bison-32k | paid - google/palm-2-codechat-bison | paid - google/palm-2-codechat-bison-32k | paid - huggingfaceh4/zephyr-7b-beta:free | free - infermatic/mn-inferor-12b | paid - inflatebot/mn-mag-mell-r1 | paid + google/gemma-3-12b-it | paid + google/gemma-3-12b-it:free | free + google/gemma-3-27b-it | paid + google/gemma-3-27b-it:free | free + google/gemma-3-4b-it | paid + google/gemma-3-4b-it:free | free + google/gemma-3n-e2b-it:free | free + google/gemma-3n-e4b-it | paid + google/gemma-3n-e4b-it:free | free + google/gemini-3-pro-image-preview | paid + ibm-granite/granite-4.0-h-micro | paid + inception/mercury | paid + inception/mercury-coder | paid inflection/inflection-3-pi | paid inflection/inflection-3-productivity | paid - liquid/lfm-40b | paid - lizpreciatior/lzlv-70b-fp16-hf | paid - alpindale/magnum-72b | paid - anthracite-org/magnum-v2-72b | paid + kwaipilot/kat-coder-pro:free | free + liquid/lfm-2.2-6b | paid + liquid/lfm2-8b-a1b | paid + meta-llama/llama-guard-3-8b | paid anthracite-org/magnum-v4-72b | paid mancer/weaver | paid - meta-llama/llama-2-13b-chat | paid + meituan/longcat-flash-chat | paid meta-llama/llama-3-70b-instruct | paid - meta-llama/llama-3-70b-instruct:nitro | paid meta-llama/llama-3-8b-instruct | paid - meta-llama/llama-3-8b-instruct:extended | paid - meta-llama/llama-3-8b-instruct:free | free - meta-llama/llama-3-8b-instruct:nitro | paid meta-llama/llama-3.1-405b | paid meta-llama/llama-3.1-405b-instruct | paid meta-llama/llama-3.1-405b-instruct:free | free - meta-llama/llama-3.1-405b-instruct:nitro | paid meta-llama/llama-3.1-70b-instruct | paid - meta-llama/llama-3.1-70b-instruct:free | free - meta-llama/llama-3.1-70b-instruct:nitro | paid meta-llama/llama-3.1-8b-instruct | paid - meta-llama/llama-3.1-8b-instruct:free | free meta-llama/llama-3.2-11b-vision-instruct | paid - meta-llama/llama-3.2-11b-vision-instruct:free | free meta-llama/llama-3.2-1b-instruct | paid - meta-llama/llama-3.2-1b-instruct:free | free meta-llama/llama-3.2-3b-instruct | paid meta-llama/llama-3.2-3b-instruct:free | free meta-llama/llama-3.2-90b-vision-instruct | paid - meta-llama/llama-3.2-90b-vision-instruct:free | free meta-llama/llama-3.3-70b-instruct | paid + meta-llama/llama-3.3-70b-instruct:free | free + meta-llama/llama-4-maverick | paid + meta-llama/llama-4-scout | paid + meta-llama/llama-guard-4-12b | paid meta-llama/llama-guard-2-8b | paid + microsoft/phi-4 | paid + microsoft/phi-4-multimodal-instruct | paid + microsoft/phi-4-reasoning-plus | paid microsoft/phi-3-medium-128k-instruct | paid - microsoft/phi-3-medium-128k-instruct:free | free microsoft/phi-3-mini-128k-instruct | paid - microsoft/phi-3-mini-128k-instruct:free | free microsoft/phi-3.5-mini-128k-instruct | paid - sophosympatheia/midnight-rose-70b | paid + minimax/minimax-m1 | paid + minimax/minimax-m2 | paid + minimax/minimax-m2.1 | paid + minimax/minimax-01 | paid mistralai/mistral-large | paid mistralai/mistral-large-2407 | paid mistralai/mistral-large-2411 | paid - mistralai/mistral-medium | paid - nothingiisreal/mn-celeste-12b | paid - mistralai/mistral-small | paid mistralai/mistral-tiny | paid - mistralai/codestral-mamba | paid + mistralai/codestral-2508 | paid + mistralai/devstral-2512 | paid + mistralai/devstral-2512:free | free + mistralai/devstral-medium | paid + mistralai/devstral-small | paid + mistralai/devstral-small-2505 | paid + mistralai/ministral-14b-2512 | paid + mistralai/ministral-3b-2512 | paid + mistralai/ministral-8b-2512 | paid mistralai/ministral-3b | paid mistralai/ministral-8b | paid mistralai/mistral-7b-instruct | paid mistralai/mistral-7b-instruct:free | free - mistralai/mistral-7b-instruct:nitro | paid mistralai/mistral-7b-instruct-v0.1 | paid mistralai/mistral-7b-instruct-v0.2 | paid mistralai/mistral-7b-instruct-v0.3 | paid + mistralai/mistral-large-2512 | paid + mistralai/mistral-medium-3 | paid + mistralai/mistral-medium-3.1 | paid mistralai/mistral-nemo | paid + mistralai/mistral-small-24b-instruct-2501 | paid + mistralai/mistral-small-3.1-24b-instruct | paid + mistralai/mistral-small-3.1-24b-instruct:free | free + mistralai/mistral-small-3.2-24b-instruct | paid + mistralai/mistral-small-creative | paid mistralai/mixtral-8x22b-instruct | paid - mistralai/mixtral-8x7b | paid mistralai/mixtral-8x7b-instruct | paid - mistralai/mixtral-8x7b-instruct:nitro | paid mistralai/pixtral-12b | paid mistralai/pixtral-large-2411 | paid + mistralai/mistral-saba | paid + mistralai/voxtral-small-24b-2507 | paid + moonshotai/kimi-dev-72b | paid + moonshotai/kimi-k2 | paid + moonshotai/kimi-k2:free | free + moonshotai/kimi-k2-0905 | paid + moonshotai/kimi-k2-0905:exacto | paid + moonshotai/kimi-k2-thinking | paid + morph/morph-v3-fast | paid + morph/morph-v3-large | paid gryphe/mythomax-l2-13b | paid - gryphe/mythomax-l2-13b:extended | paid - gryphe/mythomax-l2-13b:free | free - gryphe/mythomax-l2-13b:nitro | paid - neversleep/llama-3-lumimaid-70b | paid - neversleep/llama-3-lumimaid-8b | paid - neversleep/llama-3-lumimaid-8b:extended | paid - neversleep/llama-3.1-lumimaid-70b | paid neversleep/llama-3.1-lumimaid-8b | paid + nex-agi/deepseek-v3.1-nex-n1:free | free neversleep/noromaid-20b | paid - nousresearch/nous-hermes-llama2-13b | paid - nousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid + nousresearch/deephermes-3-mistral-24b-preview | paid nousresearch/hermes-3-llama-3.1-405b | paid + nousresearch/hermes-3-llama-3.1-405b:free | free nousresearch/hermes-3-llama-3.1-70b | paid + nousresearch/hermes-4-405b | paid + nousresearch/hermes-4-70b | paid nousresearch/hermes-2-pro-llama-3-8b | paid nvidia/llama-3.1-nemotron-70b-instruct | paid + nvidia/llama-3.1-nemotron-ultra-253b-v1 | paid + nvidia/llama-3.3-nemotron-super-49b-v1.5 | paid + nvidia/nemotron-3-nano-30b-a3b | paid + nvidia/nemotron-3-nano-30b-a3b:free | free + nvidia/nemotron-nano-12b-v2-vl | paid + nvidia/nemotron-nano-12b-v2-vl:free | free + nvidia/nemotron-nano-9b-v2 | paid + nvidia/nemotron-nano-9b-v2:free | free openai/chatgpt-4o-latest | paid + openai/codex-mini | paid openai/gpt-3.5-turbo | paid openai/gpt-3.5-turbo-0613 | paid openai/gpt-3.5-turbo-16k | paid - openai/gpt-3.5-turbo-0125 | paid - openai/gpt-3.5-turbo-1106 | paid openai/gpt-3.5-turbo-instruct | paid openai/gpt-4 | paid openai/gpt-4-0314 | paid - openai/gpt-4-32k | paid - openai/gpt-4-32k-0314 | paid openai/gpt-4-turbo | paid openai/gpt-4-1106-preview | paid openai/gpt-4-turbo-preview | paid + openai/gpt-4.1 | paid + openai/gpt-4.1-mini | paid + openai/gpt-4.1-nano | paid openai/gpt-4o | paid openai/gpt-4o-2024-05-13 | paid openai/gpt-4o-2024-08-06 | paid openai/gpt-4o-2024-11-20 | paid openai/gpt-4o:extended | paid + openai/gpt-4o-audio-preview | paid + openai/gpt-4o-search-preview | paid openai/gpt-4o-mini | paid openai/gpt-4o-mini-2024-07-18 | paid + openai/gpt-4o-mini-search-preview | paid + openai/gpt-5 | paid + openai/gpt-5-chat | paid + openai/gpt-5-codex | paid + openai/gpt-5-image | paid + openai/gpt-5-image-mini | paid + openai/gpt-5-mini | paid + openai/gpt-5-nano | paid + openai/gpt-5-pro | paid + openai/gpt-5.1 | paid + openai/gpt-5.1-chat | paid + openai/gpt-5.1-codex | paid + openai/gpt-5.1-codex-max | paid + openai/gpt-5.1-codex-mini | paid + openai/gpt-5.2 | paid + openai/gpt-5.2-chat | paid + openai/gpt-5.2-pro | paid + openai/gpt-oss-120b | paid + openai/gpt-oss-120b:exacto | paid + openai/gpt-oss-120b:free | free + openai/gpt-oss-20b | paid + openai/gpt-oss-20b:free | free + openai/gpt-oss-safeguard-20b | paid openai/o1 | paid - openai/o1-mini | paid - openai/o1-mini-2024-09-12 | paid - openai/o1-preview | paid - openai/o1-preview-2024-09-12 | paid - openchat/openchat-7b | paid - openchat/openchat-7b:free | free - teknium/openhermes-2.5-mistral-7b | paid - perplexity/llama-3.1-sonar-huge-128k-online | paid - perplexity/llama-3.1-sonar-large-128k-chat | paid - perplexity/llama-3.1-sonar-large-128k-online | paid - perplexity/llama-3.1-sonar-small-128k-chat | paid - perplexity/llama-3.1-sonar-small-128k-online | paid - perplexity/llama-3-sonar-large-32k-chat | paid - perplexity/llama-3-sonar-large-32k-online | paid - perplexity/llama-3-sonar-small-32k-chat | paid - pygmalionai/mythalion-13b | paid - qwen/qwen-2-72b-instruct | paid - qwen/qwen-2-7b-instruct | paid - qwen/qwen-2-7b-instruct:free | free - qwen/qvq-72b-preview | paid - qwen/qwq-32b-preview | paid - qwen/qwen-2-vl-72b-instruct | paid - qwen/qwen-2-vl-7b-instruct | paid - qwen/qwen-2.5-72b-instruct | paid + openai/o1-pro | paid + openai/o3 | paid + openai/o3-deep-research | paid + openai/o3-mini | paid + openai/o3-mini-high | paid + openai/o3-pro | paid + openai/o4-mini | paid + openai/o4-mini-deep-research | paid + openai/o4-mini-high | paid + opengvlab/internvl3-78b | paid + perplexity/sonar | paid + perplexity/sonar-deep-research | paid + perplexity/sonar-pro | paid + perplexity/sonar-pro-search | paid + perplexity/sonar-reasoning | paid + perplexity/sonar-reasoning-pro | paid + prime-intellect/intellect-3 | paid + qwen/qwen-plus-2025-07-28 | paid + qwen/qwen-plus-2025-07-28:thinking | paid + qwen/qwen-vl-max | paid + qwen/qwen-vl-plus | paid + qwen/qwen-max | paid + qwen/qwen-plus | paid + qwen/qwen-turbo | paid qwen/qwen-2.5-7b-instruct | paid + qwen/qwen2.5-coder-7b-instruct | paid + qwen/qwen2.5-vl-32b-instruct | paid + qwen/qwen2.5-vl-72b-instruct | paid + qwen/qwen-2.5-vl-7b-instruct | paid + qwen/qwen-2.5-vl-7b-instruct:free | free + qwen/qwen3-14b | paid + qwen/qwen3-235b-a22b | paid + qwen/qwen3-235b-a22b-2507 | paid + qwen/qwen3-235b-a22b-thinking-2507 | paid + qwen/qwen3-30b-a3b | paid + qwen/qwen3-30b-a3b-instruct-2507 | paid + qwen/qwen3-30b-a3b-thinking-2507 | paid + qwen/qwen3-32b | paid + qwen/qwen3-4b:free | free + qwen/qwen3-8b | paid + qwen/qwen3-coder-30b-a3b-instruct | paid + qwen/qwen3-coder | paid + qwen/qwen3-coder:exacto | paid + qwen/qwen3-coder:free | free + qwen/qwen3-coder-flash | paid + qwen/qwen3-coder-plus | paid + qwen/qwen3-max | paid + qwen/qwen3-next-80b-a3b-instruct | paid + qwen/qwen3-next-80b-a3b-thinking | paid + qwen/qwen3-vl-235b-a22b-instruct | paid + qwen/qwen3-vl-235b-a22b-thinking | paid + qwen/qwen3-vl-30b-a3b-instruct | paid + qwen/qwen3-vl-30b-a3b-thinking | paid + qwen/qwen3-vl-32b-instruct | paid + qwen/qwen3-vl-8b-instruct | paid + qwen/qwen3-vl-8b-thinking | paid + qwen/qwq-32b | paid + qwen/qwen-2.5-72b-instruct | paid qwen/qwen-2.5-coder-32b-instruct | paid + relace/relace-apply-3 | paid + relace/relace-search | paid undi95/remm-slerp-l2-13b | paid - undi95/remm-slerp-l2-13b:extended | paid - thedrummer/rocinante-12b | paid sao10k/l3-lunaris-8b | paid sao10k/l3-euryale-70b | paid + sao10k/l3.1-70b-hanami-x1 | paid sao10k/l3.1-euryale-70b | paid sao10k/l3.3-euryale-70b | paid raifle/sorcererlm-8x22b | paid - undi95/toppy-m-7b | paid - undi95/toppy-m-7b:free | free - undi95/toppy-m-7b:nitro | paid + stepfun-ai/step3 | paid + switchpoint/router | paid + tencent/hunyuan-a13b-instruct | paid + thedrummer/cydonia-24b-v4.1 | paid + thedrummer/rocinante-12b | paid + thedrummer/skyfall-36b-v2 | paid thedrummer/unslopnemo-12b | paid - microsoft/wizardlm-2-7b | paid + thudm/glm-4.1v-9b-thinking | paid + tngtech/deepseek-r1t-chimera | paid + tngtech/deepseek-r1t-chimera:free | free + tngtech/deepseek-r1t2-chimera | paid + tngtech/deepseek-r1t2-chimera:free | free + tngtech/tng-r1t-chimera | paid + tngtech/tng-r1t-chimera:free | free + alibaba/tongyi-deepresearch-30b-a3b | paid + alibaba/tongyi-deepresearch-30b-a3b:free | free + cognitivecomputations/dolphin-mistral-24b-venice-edition:free | free microsoft/wizardlm-2-8x22b | paid - x-ai/grok-2-1212 | paid - x-ai/grok-2-vision-1212 | paid - x-ai/grok-beta | paid - x-ai/grok-vision-beta | paid - xwin-lm/xwin-lm-70b | paid + x-ai/grok-3 | paid + x-ai/grok-3-beta | paid + x-ai/grok-3-mini | paid + x-ai/grok-3-mini-beta | paid + x-ai/grok-4 | paid + x-ai/grok-4-fast | paid + x-ai/grok-4.1-fast | paid + x-ai/grok-code-fast-1 | paid + xiaomi/mimo-v2-flash:free | free + z-ai/glm-4-32b | paid + z-ai/glm-4.5 | paid + z-ai/glm-4.5-air | paid + z-ai/glm-4.5-air:free | free + z-ai/glm-4.5v | paid + z-ai/glm-4.6 | paid + z-ai/glm-4.6:exacto | paid + z-ai/glm-4.6v | paid + z-ai/glm-4.7 | paid   OpenAI models:  babbage-002 chatgpt-4o-latest + chatgpt-image-latest + codex-mini-latest dall-e-2 dall-e-3 davinci-002 @@ -266,33 +404,95 @@ export interface IKBotOptions { gpt-4-0125-preview gpt-4-0613 gpt-4-1106-preview - gpt-4-1106-vision-preview gpt-4-turbo gpt-4-turbo-2024-04-09 gpt-4-turbo-preview - gpt-4-vision-preview + gpt-4.1 + gpt-4.1-2025-04-14 + gpt-4.1-mini + gpt-4.1-mini-2025-04-14 + gpt-4.1-nano + gpt-4.1-nano-2025-04-14 gpt-4o gpt-4o-2024-05-13 gpt-4o-2024-08-06 gpt-4o-2024-11-20 gpt-4o-audio-preview - gpt-4o-audio-preview-2024-10-01 gpt-4o-audio-preview-2024-12-17 + gpt-4o-audio-preview-2025-06-03 gpt-4o-mini gpt-4o-mini-2024-07-18 gpt-4o-mini-audio-preview gpt-4o-mini-audio-preview-2024-12-17 gpt-4o-mini-realtime-preview gpt-4o-mini-realtime-preview-2024-12-17 + gpt-4o-mini-search-preview + gpt-4o-mini-search-preview-2025-03-11 + gpt-4o-mini-transcribe + gpt-4o-mini-transcribe-2025-03-20 + gpt-4o-mini-transcribe-2025-12-15 + gpt-4o-mini-tts + gpt-4o-mini-tts-2025-03-20 + gpt-4o-mini-tts-2025-12-15 gpt-4o-realtime-preview - gpt-4o-realtime-preview-2024-10-01 gpt-4o-realtime-preview-2024-12-17 - o1-mini - o1-mini-2024-09-12 - o1-preview - o1-preview-2024-09-12 + gpt-4o-realtime-preview-2025-06-03 + gpt-4o-search-preview + gpt-4o-search-preview-2025-03-11 + gpt-4o-transcribe + gpt-4o-transcribe-diarize + gpt-5 + gpt-5-2025-08-07 + gpt-5-chat-latest + gpt-5-codex + gpt-5-mini + gpt-5-mini-2025-08-07 + gpt-5-nano + gpt-5-nano-2025-08-07 + gpt-5-pro + gpt-5-pro-2025-10-06 + gpt-5-search-api + gpt-5-search-api-2025-10-14 + gpt-5.1 + gpt-5.1-2025-11-13 + gpt-5.1-chat-latest + gpt-5.1-codex + gpt-5.1-codex-max + gpt-5.1-codex-mini + gpt-5.2 + gpt-5.2-2025-12-11 + gpt-5.2-chat-latest + gpt-5.2-pro + gpt-5.2-pro-2025-12-11 + gpt-audio + gpt-audio-2025-08-28 + gpt-audio-mini + gpt-audio-mini-2025-10-06 + gpt-audio-mini-2025-12-15 + gpt-image-1 + gpt-image-1-mini + gpt-image-1.5 + gpt-realtime + gpt-realtime-2025-08-28 + gpt-realtime-mini + gpt-realtime-mini-2025-10-06 + gpt-realtime-mini-2025-12-15 + o1 + o1-2024-12-17 + o1-pro + o1-pro-2025-03-19 + o3 + o3-2025-04-16 + o3-mini + o3-mini-2025-01-31 + o4-mini + o4-mini-2025-04-16 + o4-mini-deep-research + o4-mini-deep-research-2025-06-26 omni-moderation-2024-09-26 omni-moderation-latest + sora-2 + sora-2-pro text-embedding-3-large text-embedding-3-small text-embedding-ada-002