diff --git a/packages/kbot/dist-in/data/openai_models.json b/packages/kbot/dist-in/data/openai_models.json index 278dcede..8518de32 100644 --- a/packages/kbot/dist-in/data/openai_models.json +++ b/packages/kbot/dist-in/data/openai_models.json @@ -1,5 +1,5 @@ { - "timestamp": 1757014666958, + "timestamp": 1757758318142, "models": [ { "id": "gpt-4-0613", diff --git a/packages/kbot/dist-in/data/openrouter_models.json b/packages/kbot/dist-in/data/openrouter_models.json index 1c83b721..5e5410fe 100644 --- a/packages/kbot/dist-in/data/openrouter_models.json +++ b/packages/kbot/dist-in/data/openrouter_models.json @@ -1,6 +1,583 @@ { - "timestamp": 1757014667576, + "timestamp": 1757758318388, "models": [ + { + "id": "qwen/qwen3-next-80b-a3b-thinking", + "canonical_slug": "qwen/qwen3-next-80b-a3b-thinking-2509", + "hugging_face_id": "Qwen/Qwen3-Next-80B-A3B-Thinking", + "name": "Qwen: Qwen3 Next 80B A3B Thinking", + "created": 1757612284, + "description": "Qwen3-Next-80B-A3B-Thinking is a reasoning-first chat model in the Qwen3-Next line that outputs structured “thinking” traces by default. It’s designed for hard multi-step problems; math proofs, code synthesis/debugging, logic, and agentic planning, and reports strong results across knowledge, reasoning, coding, alignment, and multilingual evaluations. Compared with prior Qwen3 variants, it emphasizes stability under long chains of thought and efficient scaling during inference, and it is tuned to follow complex instructions while reducing repetitive or off-task behavior.\n\nThe model is suitable for agent frameworks and tool use (function calling), retrieval-heavy workflows, and standardized benchmarking where step-by-step solutions are required. It supports long, detailed completions and leverages throughput-oriented techniques (e.g., multi-token prediction) for faster generation. Note that it operates in thinking-only mode.", + "context_length": 262144, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen3", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000009782604", + "completion": "0.000000391304304", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 262144, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "include_reasoning", + "logit_bias", + "logprobs", + "max_tokens", + "min_p", + "presence_penalty", + "reasoning", + "repetition_penalty", + "response_format", + "seed", + "stop", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_logprobs", + "top_p" + ] + }, + { + "id": "qwen/qwen3-next-80b-a3b-instruct", + "canonical_slug": "qwen/qwen3-next-80b-a3b-instruct-2509", + "hugging_face_id": "Qwen/Qwen3-Next-80B-A3B-Instruct", + "name": "Qwen: Qwen3 Next 80B A3B Instruct", + "created": 1757612213, + "description": "Qwen3-Next-80B-A3B-Instruct is an instruction-tuned chat model in the Qwen3-Next series optimized for fast, stable responses without “thinking” traces. It targets complex tasks across reasoning, code generation, knowledge QA, and multilingual use, while remaining robust on alignment and formatting. Compared with prior Qwen3 instruct variants, it focuses on higher throughput and stability on ultra-long inputs and multi-turn dialogues, making it well-suited for RAG, tool use, and agentic workflows that require consistent final answers rather than visible chain-of-thought.\n\nThe model employs scaling-efficient training and decoding to improve parameter efficiency and inference speed, and has been validated on a broad set of public benchmarks where it reaches or approaches larger Qwen3 systems in several categories while outperforming earlier mid-sized baselines. It is best used as a general assistant, code helper, and long-context task solver in production settings where deterministic, instruction-following outputs are preferred.", + "context_length": 262144, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen3", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000009782604", + "completion": "0.000000391304304", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 262144, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "logprobs", + "max_tokens", + "min_p", + "presence_penalty", + "repetition_penalty", + "response_format", + "seed", + "stop", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_logprobs", + "top_p" + ] + }, + { + "id": "meituan/longcat-flash-chat", + "canonical_slug": "meituan/longcat-flash-chat", + "hugging_face_id": "meituan-longcat/LongCat-Flash-Chat", + "name": "Meituan: LongCat Flash Chat", + "created": 1757427658, + "description": "LongCat-Flash-Chat is a large-scale Mixture-of-Experts (MoE) model with 560B total parameters, of which 18.6B–31.3B (≈27B on average) are dynamically activated per input. It introduces a shortcut-connected MoE design to reduce communication overhead and achieve high throughput while maintaining training stability through advanced scaling strategies such as hyperparameter transfer, deterministic computation, and multi-stage optimization.\n\nThis release, LongCat-Flash-Chat, is a non-thinking foundation model optimized for conversational and agentic tasks. It supports long context windows up to 128K tokens and shows competitive performance across reasoning, coding, instruction following, and domain benchmarks, with particular strengths in tool use and complex multi-step interactions.", + "context_length": 131072, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000015", + "completion": "0.00000075", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": 131072, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "logprobs", + "max_tokens", + "min_p", + "presence_penalty", + "repetition_penalty", + "seed", + "stop", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_logprobs", + "top_p" + ] + }, + { + "id": "qwen/qwen-plus-2025-07-28", + "canonical_slug": "qwen/qwen-plus-2025-07-28", + "hugging_face_id": "", + "name": "Qwen: Qwen Plus 0728", + "created": 1757347599, + "description": "Qwen Plus 0728, based on the Qwen3 foundation model, is a 1 million context hybrid reasoning model with a balanced performance, speed, and cost combination.", + "context_length": 1000000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen3", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000004", + "completion": "0.0000012", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 1000000, + "max_completion_tokens": 32768, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "presence_penalty", + "response_format", + "seed", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_p" + ] + }, + { + "id": "qwen/qwen-plus-2025-07-28:thinking", + "canonical_slug": "qwen/qwen-plus-2025-07-28", + "hugging_face_id": "", + "name": "Qwen: Qwen Plus 0728 (thinking)", + "created": 1757347599, + "description": "Qwen Plus 0728, based on the Qwen3 foundation model, is a 1 million context hybrid reasoning model with a balanced performance, speed, and cost combination.", + "context_length": 1000000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen3", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000004", + "completion": "0.000004", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 1000000, + "max_completion_tokens": 32768, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "include_reasoning", + "max_tokens", + "presence_penalty", + "reasoning", + "response_format", + "seed", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_p" + ] + }, + { + "id": "nvidia/nemotron-nano-9b-v2:free", + "canonical_slug": "nvidia/nemotron-nano-9b-v2", + "hugging_face_id": "nvidia/NVIDIA-Nemotron-Nano-9B-v2", + "name": "NVIDIA: Nemotron Nano 9B V2 (free)", + "created": 1757106807, + "description": "NVIDIA-Nemotron-Nano-9B-v2 is a large language model (LLM) trained from scratch by NVIDIA, and designed as a unified model for both reasoning and non-reasoning tasks. It responds to user queries and tasks by first generating a reasoning trace and then concluding with a final response. \n\nThe model's reasoning capabilities can be controlled via a system prompt. If the user prefers the model to provide its final answer without intermediate reasoning traces, it can be configured to do so.", + "context_length": 128000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 128000, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "include_reasoning", + "reasoning", + "response_format", + "structured_outputs", + "tool_choice", + "tools" + ] + }, + { + "id": "nvidia/nemotron-nano-9b-v2", + "canonical_slug": "nvidia/nemotron-nano-9b-v2", + "hugging_face_id": "nvidia/NVIDIA-Nemotron-Nano-9B-v2", + "name": "NVIDIA: Nemotron Nano 9B V2", + "created": 1757106807, + "description": "NVIDIA-Nemotron-Nano-9B-v2 is a large language model (LLM) trained from scratch by NVIDIA, and designed as a unified model for both reasoning and non-reasoning tasks. It responds to user queries and tasks by first generating a reasoning trace and then concluding with a final response. \n\nThe model's reasoning capabilities can be controlled via a system prompt. If the user prefers the model to provide its final answer without intermediate reasoning traces, it can be configured to do so.", + "context_length": 131072, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000004", + "completion": "0.00000016", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "include_reasoning", + "max_tokens", + "min_p", + "presence_penalty", + "reasoning", + "repetition_penalty", + "response_format", + "seed", + "stop", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_p" + ] + }, + { + "id": "openrouter/sonoma-dusk-alpha", + "canonical_slug": "openrouter/sonoma-dusk-alpha", + "hugging_face_id": "", + "name": "Sonoma Dusk Alpha", + "created": 1757093247, + "description": "This is a cloaked model provided to the community to gather feedback. A fast and intelligent general-purpose frontier model with a 2 million token context window. Supports image inputs and parallel tool calling.\n\nNote: It’s free to use during this testing period, and prompts and completions are logged by the model creator for feedback and training.", + "context_length": 2000000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 2000000, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "response_format", + "structured_outputs", + "tool_choice", + "tools" + ] + }, + { + "id": "openrouter/sonoma-sky-alpha", + "canonical_slug": "openrouter/sonoma-sky-alpha", + "hugging_face_id": "", + "name": "Sonoma Sky Alpha", + "created": 1757093001, + "description": "This is a cloaked model provided to the community to gather feedback. A maximally intelligent general-purpose frontier model with a 2 million token context window. Supports image inputs and parallel tool calling.\n\nNote: It’s free to use during this testing period, and prompts and completions are logged by the model creator for feedback and training.", + "context_length": 2000000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0", + "completion": "0", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 2000000, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "include_reasoning", + "max_tokens", + "reasoning", + "response_format", + "structured_outputs", + "tool_choice", + "tools" + ] + }, + { + "id": "qwen/qwen3-max", + "canonical_slug": "qwen/qwen3-max", + "hugging_face_id": "", + "name": "Qwen: Qwen3 Max", + "created": 1757076567, + "description": "Qwen3-Max is an updated release built on the Qwen3 series, offering major improvements in reasoning, instruction following, multilingual support, and long-tail knowledge coverage compared to the January 2025 version. It delivers higher accuracy in math, coding, logic, and science tasks, follows complex instructions in Chinese and English more reliably, reduces hallucinations, and produces higher-quality responses for open-ended Q&A, writing, and conversation. The model supports over 100 languages with stronger translation and commonsense reasoning, and is optimized for retrieval-augmented generation (RAG) and tool calling, though it does not include a dedicated “thinking” mode.", + "context_length": 256000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen3", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000012", + "completion": "0.000006", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0.00000024" + }, + "top_provider": { + "context_length": 256000, + "max_completion_tokens": 32768, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "presence_penalty", + "response_format", + "seed", + "temperature", + "tool_choice", + "tools", + "top_p" + ] + }, + { + "id": "moonshotai/kimi-k2-0905", + "canonical_slug": "moonshotai/kimi-k2-0905", + "hugging_face_id": "moonshotai/Kimi-K2-Instruct-0905", + "name": "MoonshotAI: Kimi K2 0905", + "created": 1757021147, + "description": "Kimi K2 0905 is the September update of [Kimi K2 0711](moonshotai/kimi-k2). It is a large-scale Mixture-of-Experts (MoE) language model developed by Moonshot AI, featuring 1 trillion total parameters with 32 billion active per forward pass. It supports long-context inference up to 256k tokens, extended from the previous 128k.\n\nThis update improves agentic coding with higher accuracy and better generalization across scaffolds, and enhances frontend coding with more aesthetic and functional outputs for web, 3D, and related tasks. Kimi K2 is optimized for agentic capabilities, including advanced tool use, reasoning, and code synthesis. It excels across coding (LiveCodeBench, SWE-bench), reasoning (ZebraLogic, GPQA), and tool-use (Tau2, AceBench) benchmarks. The model is trained with a novel stack incorporating the MuonClip optimizer for stable large-scale MoE training.", + "context_length": 262144, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000003804346", + "completion": "0.00000152173896", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 262144, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "logprobs", + "max_tokens", + "min_p", + "presence_penalty", + "repetition_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_logprobs", + "top_p" + ] + }, + { + "id": "bytedance/seed-oss-36b-instruct", + "canonical_slug": "bytedance/seed-oss-36b-instruct", + "hugging_face_id": "ByteDance-Seed/Seed-OSS-36B-Instruct", + "name": "ByteDance: Seed OSS 36B Instruct", + "created": 1756834704, + "description": "Seed-OSS-36B-Instruct is a 36B-parameter instruction-tuned reasoning language model from ByteDance’s Seed team, released under Apache-2.0. The model is optimized for general instruction following with strong performance in reasoning, mathematics, coding, tool use/agentic workflows, and multilingual tasks, and is intended for international (i18n) use cases. It is not currently possible to control the reasoning effort.", + "context_length": 131072, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000002006688", + "completion": "0.00000080267549538462", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "include_reasoning", + "logit_bias", + "logprobs", + "max_tokens", + "min_p", + "presence_penalty", + "reasoning", + "repetition_penalty", + "seed", + "stop", + "temperature", + "top_k", + "top_logprobs", + "top_p" + ] + }, { "id": "deepcogito/cogito-v2-preview-llama-109b-moe", "canonical_slug": "deepcogito/cogito-v2-preview-llama-109b-moe", @@ -100,6 +677,52 @@ "top_p" ] }, + { + "id": "stepfun-ai/step3", + "canonical_slug": "stepfun-ai/step3", + "hugging_face_id": "stepfun-ai/step3", + "name": "StepFun: Step3", + "created": 1756415375, + "description": "Step3 is a cutting-edge multimodal reasoning model—built on a Mixture-of-Experts architecture with 321B total parameters and 38B active. It is designed end-to-end to minimize decoding costs while delivering top-tier performance in vision–language reasoning. Through the co-design of Multi-Matrix Factorization Attention (MFA) and Attention-FFN Disaggregation (AFD), Step3 maintains exceptional efficiency across both flagship and low-end accelerators.", + "context_length": 65536, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "image", + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000057", + "completion": "0.00000142", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 65536, + "max_completion_tokens": 65536, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "include_reasoning", + "reasoning", + "response_format", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_p" + ] + }, { "id": "qwen/qwen3-30b-a3b-thinking-2507", "canonical_slug": "qwen/qwen3-30b-a3b-thinking-2507", @@ -120,8 +743,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000000713", - "completion": "0.0000002852", + "prompt": "0.00000008967387", + "completion": "0.000000358695612", "request": "0", "image": "0", "web_search": "0", @@ -224,8 +847,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000009329544", - "completion": "0.0000003733632", + "prompt": "0.000000127173852", + "completion": "0.0000005086955952", "request": "0", "image": "0", "web_search": "0", @@ -277,8 +900,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000001999188", - "completion": "0.000000800064", + "prompt": "0.00000024999988", + "completion": "0.000000999999888", "request": "0", "image": "0", "web_search": "0", @@ -316,7 +939,7 @@ "hugging_face_id": "", "name": "Google: Gemini 2.5 Flash Image Preview", "created": 1756218977, - "description": "Gemini 2.5 Flash Image Preview is a state of the art image generation model with contextual understanding. It is capable of image generation, edits, and multi-turn conversations.", + "description": "Gemini 2.5 Flash Image Preview, AKA Nano Banana is a state of the art image generation model with contextual understanding. It is capable of image generation, edits, and multi-turn conversations.", "context_length": 32768, "architecture": { "modality": "text+image->text+image", @@ -425,8 +1048,8 @@ "instruct_type": "deepseek-v3.1" }, "pricing": { - "prompt": "0.0000002", - "completion": "0.0000008", + "prompt": "0.00000024999988", + "completion": "0.000000999999888", "request": "0", "image": "0", "web_search": "0", @@ -480,8 +1103,8 @@ "instruct_type": "none" }, "pricing": { - "prompt": "0.0000002", - "completion": "0.0000008", + "prompt": "0.00000024999988", + "completion": "0.000000999999888", "request": "0", "image": "0", "web_search": "0", @@ -881,9 +1504,7 @@ }, "per_request_limits": null, "supported_parameters": [ - "include_reasoning", "max_tokens", - "reasoning", "response_format", "seed", "structured_outputs" @@ -1251,7 +1872,7 @@ "completion": "0.000075", "request": "0", "image": "0.024", - "web_search": "0", + "web_search": "0.01", "internal_reasoning": "0", "input_cache_read": "0.0000015", "input_cache_write": "0.00001875" @@ -1259,7 +1880,7 @@ "top_provider": { "context_length": 200000, "max_completion_tokens": 32000, - "is_moderated": false + "is_moderated": true }, "per_request_limits": null, "supported_parameters": [ @@ -1339,8 +1960,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000000518308", - "completion": "0.000000207424", + "prompt": "0.00000007065214", + "completion": "0.000000282608664", "request": "0", "image": "0", "web_search": "0", @@ -1391,8 +2012,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000000518308", - "completion": "0.000000207424", + "prompt": "0.00000007065214", + "completion": "0.000000282608664", "request": "0", "image": "0", "web_search": "0", @@ -1443,8 +2064,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000032986602", - "completion": "0.0000013201056", + "prompt": "0.000000412499802", + "completion": "0.0000016499998152", "request": "0", "image": "0", "web_search": "0", @@ -1597,8 +2218,8 @@ "instruct_type": "qwen3" }, "pricing": { - "prompt": "0.000000077968332", - "completion": "0.00000031202496", + "prompt": "0.0000000974999532", + "completion": "0.00000038999995632", "request": "0", "image": "0", "web_search": "0", @@ -1743,8 +2364,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000002", - "completion": "0.0000008", + "prompt": "0.00000024999988", + "completion": "0.000000999999888", "request": "0", "image": "0", "web_search": "0", @@ -1896,8 +2517,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.000000077968332", - "completion": "0.00000031202496", + "prompt": "0.0000000974999532", + "completion": "0.00000038999995632", "request": "0", "image": "0", "web_search": "0", @@ -1977,7 +2598,7 @@ "id": "moonshotai/kimi-k2:free", "canonical_slug": "moonshotai/kimi-k2", "hugging_face_id": "moonshotai/Kimi-K2-Instruct", - "name": "MoonshotAI: Kimi K2 (free)", + "name": "MoonshotAI: Kimi K2 0711 (free)", "created": 1752263252, "description": "Kimi K2 Instruct is a large-scale Mixture-of-Experts (MoE) language model developed by Moonshot AI, featuring 1 trillion total parameters with 32 billion active per forward pass. It is optimized for agentic capabilities, including advanced tool use, reasoning, and code synthesis. Kimi K2 excels across a broad range of benchmarks, particularly in coding (LiveCodeBench, SWE-bench), reasoning (ZebraLogic, GPQA), and tool-use (Tau2, AceBench) tasks. It supports long-context inference up to 128K tokens and is designed with a novel training stack that includes the MuonClip optimizer for stable large-scale MoE training.", "context_length": 32768, @@ -2028,7 +2649,7 @@ "id": "moonshotai/kimi-k2", "canonical_slug": "moonshotai/kimi-k2", "hugging_face_id": "moonshotai/Kimi-K2-Instruct", - "name": "MoonshotAI: Kimi K2", + "name": "MoonshotAI: Kimi K2 0711", "created": 1752263252, "description": "Kimi K2 Instruct is a large-scale Mixture-of-Experts (MoE) language model developed by Moonshot AI, featuring 1 trillion total parameters with 32 billion active per forward pass. It is optimized for agentic capabilities, including advanced tool use, reasoning, and code synthesis. Kimi K2 excels across a broad range of benchmarks, particularly in coding (LiveCodeBench, SWE-bench), reasoning (ZebraLogic, GPQA), and tool-use (Tau2, AceBench) tasks. It supports long-context inference up to 128K tokens and is designed with a novel training stack that includes the MuonClip optimizer for stable large-scale MoE training.", "context_length": 63000, @@ -3614,8 +4235,8 @@ "instruct_type": "deepseek-r1" }, "pricing": { - "prompt": "0.00000001703012", - "completion": "0.0000000681536", + "prompt": "0.000000013043472", + "completion": "0.0000000521739072", "request": "0", "image": "0", "web_search": "0", @@ -3716,8 +4337,8 @@ "instruct_type": "deepseek-r1" }, "pricing": { - "prompt": "0.0000001999188", - "completion": "0.000000800064", + "prompt": "0.00000024999988", + "completion": "0.000000999999888", "request": "0", "image": "0", "web_search": "0", @@ -3777,7 +4398,7 @@ "completion": "0.000075", "request": "0", "image": "0.024", - "web_search": "0", + "web_search": "0.01", "internal_reasoning": "0", "input_cache_read": "0.0000015", "input_cache_write": "0.00001875" @@ -3785,7 +4406,7 @@ "top_provider": { "context_length": 200000, "max_completion_tokens": 32000, - "is_moderated": false + "is_moderated": true }, "per_request_limits": null, "supported_parameters": [ @@ -3920,8 +4541,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000001999188", - "completion": "0.0000000800064", + "prompt": "0.000000035869548", + "completion": "0.0000001434782448", "request": "0", "image": "0", "web_search": "0", @@ -4154,8 +4775,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000009329544", - "completion": "0.0000003733632", + "prompt": "0.000000127173852", + "completion": "0.0000005086955952", "request": "0", "image": "0", "web_search": "0", @@ -4784,8 +5405,8 @@ "instruct_type": "qwen3" }, "pricing": { - "prompt": "0.00000001999188", - "completion": "0.0000000800064", + "prompt": "0.000000035869548", + "completion": "0.0000001434782448", "request": "0", "image": "0", "web_search": "0", @@ -5045,8 +5666,8 @@ "instruct_type": "qwen3" }, "pricing": { - "prompt": "0.000000017992692", - "completion": "0.00000007200576", + "prompt": "0.0000000322825932", + "completion": "0.00000012913042032", "request": "0", "image": "0", "web_search": "0", @@ -5261,8 +5882,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000001999188", - "completion": "0.000000800064", + "prompt": "0.00000024999988", + "completion": "0.000000999999888", "request": "0", "image": "0", "web_search": "0", @@ -5363,8 +5984,8 @@ "instruct_type": "deepseek-r1" }, "pricing": { - "prompt": "0.0000001999188", - "completion": "0.000000800064", + "prompt": "0.00000024999988", + "completion": "0.000000999999888", "request": "0", "image": "0", "web_search": "0", @@ -5414,8 +6035,8 @@ "instruct_type": "deepseek-r1" }, "pricing": { - "prompt": "0.00000001999188", - "completion": "0.0000000800064", + "prompt": "0.000000035869548", + "completion": "0.0000001434782448", "request": "0", "image": "0", "web_search": "0", @@ -5702,8 +6323,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000001999188", - "completion": "0.0000000800064", + "prompt": "0.000000035869548", + "completion": "0.0000001434782448", "request": "0", "image": "0", "web_search": "0", @@ -5945,7 +6566,7 @@ "name": "AlfredPros: CodeLLaMa 7B Instruct Solidity", "created": 1744641874, "description": "A finetuned 7 billion parameters Code LLaMA - Instruct model to generate Solidity smart contract using 4-bit QLoRA finetuning provided by PEFT library.", - "context_length": 8192, + "context_length": 4096, "architecture": { "modality": "text->text", "input_modalities": [ @@ -5958,22 +6579,21 @@ "instruct_type": "alpaca" }, "pricing": { - "prompt": "0.0000007", - "completion": "0.0000011", + "prompt": "0.0000008", + "completion": "0.0000012", "request": "0", "image": "0", "web_search": "0", "internal_reasoning": "0" }, "top_provider": { - "context_length": 8192, - "max_completion_tokens": 8192, + "context_length": 4096, + "max_completion_tokens": 4096, "is_moderated": false }, "per_request_limits": null, "supported_parameters": [ "frequency_penalty", - "logit_bias", "max_tokens", "min_p", "presence_penalty", @@ -6056,8 +6676,8 @@ "instruct_type": "deepseek-r1" }, "pricing": { - "prompt": "0.00000001", - "completion": "0.0000000400032", + "prompt": "0.000000017934774", + "completion": "0.0000000717391224", "request": "0", "image": "0", "web_search": "0", @@ -6262,8 +6882,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000002498985", - "completion": "0.000000100008", + "prompt": "0.00000006249997", + "completion": "0.000000249999972", "request": "0", "image": "0", "web_search": "0", @@ -6391,53 +7011,6 @@ "top_p" ] }, - { - "id": "nvidia/llama-3.3-nemotron-super-49b-v1", - "canonical_slug": "nvidia/llama-3.3-nemotron-super-49b-v1", - "hugging_face_id": "nvidia/Llama-3_3-Nemotron-Super-49B-v1", - "name": "NVIDIA: Llama 3.3 Nemotron Super 49B v1", - "created": 1744119494, - "description": "Llama-3.3-Nemotron-Super-49B-v1 is a large language model (LLM) optimized for advanced reasoning, conversational interactions, retrieval-augmented generation (RAG), and tool-calling tasks. Derived from Meta's Llama-3.3-70B-Instruct, it employs a Neural Architecture Search (NAS) approach, significantly enhancing efficiency and reducing memory requirements. This allows the model to support a context length of up to 128K tokens and fit efficiently on single high-performance GPUs, such as NVIDIA H200.\n\nNote: you must include `detailed thinking on` in the system prompt to enable reasoning. Please see [Usage Recommendations](https://huggingface.co/nvidia/Llama-3_1-Nemotron-Ultra-253B-v1#quick-start-and-usage-recommendations) for more.", - "context_length": 131072, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Other", - "instruct_type": null - }, - "pricing": { - "prompt": "0.00000013", - "completion": "0.0000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 131072, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logit_bias", - "logprobs", - "max_tokens", - "presence_penalty", - "seed", - "stop", - "temperature", - "top_k", - "top_logprobs", - "top_p" - ] - }, { "id": "nvidia/llama-3.1-nemotron-ultra-253b-v1:free", "canonical_slug": "nvidia/llama-3.1-nemotron-ultra-253b-v1", @@ -6737,49 +7310,50 @@ ] }, { - "id": "google/gemini-2.5-pro-exp-03-25", - "canonical_slug": "google/gemini-2.5-pro-exp-03-25", - "hugging_face_id": "", - "name": "Google: Gemini 2.5 Pro Experimental", - "created": 1742922099, - "description": "This model has been deprecated by Google in favor of the (paid Preview model)[google/gemini-2.5-pro-preview]\n \nGemini 2.5 Pro is Google’s state-of-the-art AI model designed for advanced reasoning, coding, mathematics, and scientific tasks. It employs “thinking” capabilities, enabling it to reason through responses with enhanced accuracy and nuanced context handling. Gemini 2.5 Pro achieves top-tier performance on multiple benchmarks, including first-place positioning on the LMArena leaderboard, reflecting superior human-preference alignment and complex problem-solving abilities.", - "context_length": 1048576, + "id": "allenai/molmo-7b-d", + "canonical_slug": "allenai/molmo-7b-d-0924", + "hugging_face_id": "allenai/Molmo-7B-D-0924", + "name": "AllenAI: Molmo 7B D", + "created": 1743023247, + "description": "Molmo is a family of open vision-language models developed by the Allen Institute for AI. Molmo models are trained on PixMo, a dataset of 1 million, highly-curated image-text pairs. It has state-of-the-art performance among multimodal models with a similar size while being fully open-source. You can find all models in the Molmo family [here](https://huggingface.co/collections/allenai/molmo-66f379e6fe3b8ef090a8ca19). Learn more about the Molmo family [in the announcement blog post](https://molmo.allenai.org/blog) or the [paper](https://huggingface.co/papers/2409.17146).\n\nMolmo 7B-D is based on [Qwen2-7B](https://huggingface.co/Qwen/Qwen2-7B) and uses [OpenAI CLIP](https://huggingface.co/openai/clip-vit-large-patch14-336) as vision backbone. It performs comfortably between GPT-4V and GPT-4o on both academic benchmarks and human evaluation.\n\nThis checkpoint is a preview of the Molmo release. All artifacts used in creating Molmo (PixMo dataset, training code, evaluations, intermediate checkpoints) will be made available at a later date, furthering our commitment to open-source AI development and reproducibility.", + "context_length": 4096, "architecture": { "modality": "text+image->text", "input_modalities": [ "text", - "image", - "file" + "image" ], "output_modalities": [ "text" ], - "tokenizer": "Gemini", + "tokenizer": "Other", "instruct_type": null }, "pricing": { - "prompt": "0", - "completion": "0", + "prompt": "0.0000001", + "completion": "0.0000002", "request": "0", "image": "0", "web_search": "0", "internal_reasoning": "0" }, "top_provider": { - "context_length": 1048576, - "max_completion_tokens": 65535, + "context_length": 4096, + "max_completion_tokens": 4096, "is_moderated": false }, "per_request_limits": null, "supported_parameters": [ + "frequency_penalty", + "logit_bias", "max_tokens", - "response_format", + "min_p", + "presence_penalty", + "repetition_penalty", "seed", "stop", - "structured_outputs", "temperature", - "tool_choice", - "tools", + "top_k", "top_p" ] }, @@ -6855,8 +7429,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000001999188", - "completion": "0.0000000800064", + "prompt": "0.000000035869548", + "completion": "0.0000001434782448", "request": "0", "image": "0", "web_search": "0", @@ -6957,8 +7531,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000001999188", - "completion": "0.000000800064", + "prompt": "0.00000024999988", + "completion": "0.000000999999888", "request": "0", "image": "0", "web_search": "0", @@ -7109,8 +7683,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000001999188", - "completion": "0.0000000800064", + "prompt": "0.00000003804346", + "completion": "0.000000152173896", "request": "0", "image": "0", "web_search": "0", @@ -7142,6 +7716,53 @@ "top_p" ] }, + { + "id": "allenai/olmo-2-0325-32b-instruct", + "canonical_slug": "allenai/olmo-2-0325-32b-instruct", + "hugging_face_id": "allenai/OLMo-2-0325-32B-Instruct", + "name": "AllenAI: Olmo 2 32B Instruct", + "created": 1741988556, + "description": "OLMo-2 32B Instruct is a supervised instruction-finetuned variant of the OLMo-2 32B March 2025 base model. It excels in complex reasoning and instruction-following tasks across diverse benchmarks such as GSM8K, MATH, IFEval, and general NLP evaluation. Developed by AI2, OLMo-2 32B is part of an open, research-oriented initiative, trained primarily on English-language datasets to advance the understanding and development of open-source language models.", + "context_length": 4096, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000001", + "completion": "0.0000015", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 4096, + "max_completion_tokens": 4096, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "max_tokens", + "min_p", + "presence_penalty", + "repetition_penalty", + "seed", + "stop", + "temperature", + "top_k", + "top_p" + ] + }, { "id": "google/gemma-3-4b-it:free", "canonical_slug": "google/gemma-3-4b-it", @@ -7206,8 +7827,8 @@ "instruct_type": "gemma" }, "pricing": { - "prompt": "0.00000002", - "completion": "0.00000004", + "prompt": "0.00000004", + "completion": "0.00000008", "request": "0", "image": "0", "web_search": "0", @@ -7304,8 +7925,8 @@ "instruct_type": "gemma" }, "pricing": { - "prompt": "0.0000000481286", - "completion": "0.000000192608", + "prompt": "0.00000003532607", + "completion": "0.000000141304332", "request": "0", "image": "0", "web_search": "0", @@ -7341,7 +7962,7 @@ "name": "Cohere: Command A", "created": 1741894342, "description": "Command A is an open-weights 111B parameter model with a 256k context window focused on delivering great performance across agentic, multilingual, and coding use cases.\nCompared to other leading proprietary and open-weights models Command A delivers maximum performance with minimum hardware costs, excelling on business-critical agentic and multilingual tasks.", - "context_length": 32768, + "context_length": 256000, "architecture": { "modality": "text->text", "input_modalities": [ @@ -7354,17 +7975,17 @@ "instruct_type": null }, "pricing": { - "prompt": "0.000002", - "completion": "0.000008", + "prompt": "0.0000025", + "completion": "0.00001", "request": "0", "image": "0", "web_search": "0", "internal_reasoning": "0" }, "top_provider": { - "context_length": 32768, - "max_completion_tokens": null, - "is_moderated": false + "context_length": 256000, + "max_completion_tokens": 8192, + "is_moderated": true }, "per_request_limits": null, "supported_parameters": [ @@ -7584,8 +8205,8 @@ "instruct_type": "gemma" }, "pricing": { - "prompt": "0.0000000666396", - "completion": "0.000000266688", + "prompt": "0.00000006521736", + "completion": "0.000000260869536", "request": "0", "image": "0", "web_search": "0", @@ -7682,8 +8303,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000000481286", - "completion": "0.000000192608", + "prompt": "0.000000039130416", + "completion": "0.0000001565217216", "request": "0", "image": "0", "web_search": "0", @@ -7703,10 +8324,8 @@ "min_p", "presence_penalty", "repetition_penalty", - "response_format", "seed", "stop", - "structured_outputs", "temperature", "top_k", "top_logprobs", @@ -7948,7 +8567,7 @@ "name": "Qwen: QwQ 32B", "created": 1741208814, "description": "QwQ is the reasoning model of the Qwen series. Compared with conventional instruction-tuned models, QwQ, which is capable of thinking and reasoning, can achieve significantly enhanced performance in downstream tasks, especially hard problems. QwQ-32B is the medium-sized reasoning model, which is capable of achieving competitive performance against state-of-the-art reasoning models, e.g., DeepSeek-R1, o1-mini.", - "context_length": 131072, + "context_length": 32768, "architecture": { "modality": "text->text", "input_modalities": [ @@ -7961,15 +8580,15 @@ "instruct_type": "qwq" }, "pricing": { - "prompt": "0.000000075", - "completion": "0.00000015", + "prompt": "0.00000015", + "completion": "0.0000004", "request": "0", "image": "0", "web_search": "0", "internal_reasoning": "0" }, "top_provider": { - "context_length": 131072, + "context_length": 32768, "max_completion_tokens": null, "is_moderated": false }, @@ -8351,8 +8970,8 @@ "instruct_type": "deepseek-r1" }, "pricing": { - "prompt": "0.00000001", - "completion": "0.0000000340768", + "prompt": "0.00000001333333333333", + "completion": "0.0000000347826048", "request": "0", "image": "0", "web_search": "0", @@ -8451,8 +9070,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.000000037022", - "completion": "0.00000014816", + "prompt": "0.0000000271739", + "completion": "0.00000010869564", "request": "0", "image": "0", "web_search": "0", @@ -8541,12 +9160,13 @@ "architecture": { "modality": "text->text", "input_modalities": [ - "text" + "text", + "file" ], "output_modalities": [ "text" ], - "tokenizer": "Other", + "tokenizer": "GPT", "instruct_type": null }, "pricing": { @@ -8993,8 +9613,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000000999594", - "completion": "0.000000400032", + "prompt": "0.00000024999988", + "completion": "0.000000999999888", "request": "0", "image": "0", "web_search": "0", @@ -9123,7 +9743,8 @@ "architecture": { "modality": "text->text", "input_modalities": [ - "text" + "text", + "file" ], "output_modalities": [ "text" @@ -9224,8 +9845,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000001999188", - "completion": "0.0000000800064", + "prompt": "0.00000003804346", + "completion": "0.000000152173896", "request": "0", "image": "0", "web_search": "0", @@ -9277,8 +9898,8 @@ "instruct_type": "deepseek-r1" }, "pricing": { - "prompt": "0.000000075", - "completion": "0.00000015", + "prompt": "0.00000027", + "completion": "0.00000027", "request": "0", "image": "0", "web_search": "0", @@ -9663,8 +10284,8 @@ "instruct_type": "deepseek-r1" }, "pricing": { - "prompt": "0.0000000259154", - "completion": "0.000000103712", + "prompt": "0.00000003260868", + "completion": "0.000000130434768", "request": "0", "image": "0", "web_search": "0", @@ -9914,8 +10535,6 @@ "per_request_limits": null, "supported_parameters": [ "frequency_penalty", - "logit_bias", - "logprobs", "max_tokens", "min_p", "presence_penalty", @@ -9926,7 +10545,6 @@ "structured_outputs", "temperature", "top_k", - "top_logprobs", "top_p" ] }, @@ -9950,8 +10568,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000001999188", - "completion": "0.000000800064", + "prompt": "0.00000024999988", + "completion": "0.000000999999888", "request": "0", "image": "0", "web_search": "0", @@ -10018,7 +10636,6 @@ "per_request_limits": null, "supported_parameters": [ "frequency_penalty", - "logit_bias", "max_tokens", "min_p", "presence_penalty", @@ -10737,100 +11354,6 @@ "top_p" ] }, - { - "id": "x-ai/grok-vision-beta", - "canonical_slug": "x-ai/grok-vision-beta", - "hugging_face_id": "", - "name": "xAI: Grok Vision Beta", - "created": 1731976624, - "description": "Grok Vision Beta is xAI's experimental language model with vision capability.\n\n", - "context_length": 8192, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Grok", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000005", - "completion": "0.000015", - "request": "0", - "image": "0.009", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 8192, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logprobs", - "max_tokens", - "presence_penalty", - "response_format", - "seed", - "stop", - "temperature", - "top_logprobs", - "top_p" - ] - }, - { - "id": "infermatic/mn-inferor-12b", - "canonical_slug": "infermatic/mn-inferor-12b", - "hugging_face_id": "Infermatic/MN-12B-Inferor-v0.0", - "name": "Infermatic: Mistral Nemo Inferor 12B", - "created": 1731464428, - "description": "Inferor 12B is a merge of top roleplay models, expert on immersive narratives and storytelling.\n\nThis model was merged using the [Model Stock](https://arxiv.org/abs/2403.19522) merge method using [anthracite-org/magnum-v4-12b](https://openrouter.ai/anthracite-org/magnum-v4-72b) as a base.\n", - "context_length": 8192, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Mistral", - "instruct_type": "mistral" - }, - "pricing": { - "prompt": "0.0000006", - "completion": "0.000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 8192, - "max_completion_tokens": 8192, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logit_bias", - "max_tokens", - "min_p", - "presence_penalty", - "repetition_penalty", - "seed", - "stop", - "temperature", - "top_k", - "top_p" - ] - }, { "id": "qwen/qwen-2.5-coder-32b-instruct:free", "canonical_slug": "qwen/qwen-2.5-coder-32b-instruct", @@ -10900,8 +11423,8 @@ "instruct_type": "chatml" }, "pricing": { - "prompt": "0.0000000499797", - "completion": "0.000000200016", + "prompt": "0.00000006", + "completion": "0.00000015", "request": "0", "image": "0", "web_search": "0", @@ -10909,7 +11432,7 @@ }, "top_provider": { "context_length": 32768, - "max_completion_tokens": null, + "max_completion_tokens": 16384, "is_moderated": false }, "per_request_limits": null, @@ -11029,6 +11552,52 @@ "top_p" ] }, + { + "id": "anthropic/claude-3.5-haiku", + "canonical_slug": "anthropic/claude-3-5-haiku", + "hugging_face_id": null, + "name": "Anthropic: Claude 3.5 Haiku", + "created": 1730678400, + "description": "Claude 3.5 Haiku features offers enhanced capabilities in speed, coding accuracy, and tool use. Engineered to excel in real-time applications, it delivers quick response times that are essential for dynamic tasks such as chat interactions and immediate coding suggestions.\n\nThis makes it highly suitable for environments that demand both speed and precision, such as software development, customer service bots, and data management systems.\n\nThis model is currently pointing to [Claude 3.5 Haiku (2024-10-22)](/anthropic/claude-3-5-haiku-20241022).", + "context_length": 200000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Claude", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000008", + "completion": "0.000004", + "request": "0", + "image": "0", + "web_search": "0.01", + "internal_reasoning": "0", + "input_cache_read": "0.00000008", + "input_cache_write": "0.000001" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 8192, + "is_moderated": true + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "stop", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_p" + ] + }, { "id": "anthropic/claude-3.5-haiku-20241022", "canonical_slug": "anthropic/claude-3-5-haiku-20241022", @@ -11076,52 +11645,6 @@ "top_p" ] }, - { - "id": "anthropic/claude-3.5-haiku", - "canonical_slug": "anthropic/claude-3-5-haiku", - "hugging_face_id": null, - "name": "Anthropic: Claude 3.5 Haiku", - "created": 1730678400, - "description": "Claude 3.5 Haiku features offers enhanced capabilities in speed, coding accuracy, and tool use. Engineered to excel in real-time applications, it delivers quick response times that are essential for dynamic tasks such as chat interactions and immediate coding suggestions.\n\nThis makes it highly suitable for environments that demand both speed and precision, such as software development, customer service bots, and data management systems.\n\nThis model is currently pointing to [Claude 3.5 Haiku (2024-10-22)](/anthropic/claude-3-5-haiku-20241022).", - "context_length": 200000, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000008", - "completion": "0.000004", - "request": "0", - "image": "0", - "web_search": "0.01", - "internal_reasoning": "0", - "input_cache_read": "0.00000008", - "input_cache_write": "0.000001" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 8192, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "stop", - "temperature", - "tool_choice", - "tools", - "top_k", - "top_p" - ] - }, { "id": "anthracite-org/magnum-v4-72b", "canonical_slug": "anthracite-org/magnum-v4-72b", @@ -11409,12 +11932,12 @@ ] }, { - "id": "inflection/inflection-3-productivity", - "canonical_slug": "inflection/inflection-3-productivity", + "id": "inflection/inflection-3-pi", + "canonical_slug": "inflection/inflection-3-pi", "hugging_face_id": null, - "name": "Inflection: Inflection 3 Productivity", + "name": "Inflection: Inflection 3 Pi", "created": 1728604800, - "description": "Inflection 3 Productivity is optimized for following instructions. It is better for tasks requiring JSON output or precise adherence to provided guidelines. It has access to recent news.\n\nFor emotional intelligence similar to Pi, see [Inflect 3 Pi](/inflection/inflection-3-pi)\n\nSee [Inflection's announcement](https://inflection.ai/blog/enterprise) for more details.", + "description": "Inflection 3 Pi powers Inflection's [Pi](https://pi.ai) chatbot, including backstory, emotional intelligence, productivity, and safety. It has access to recent news, and excels in scenarios like customer support and roleplay.\n\nPi has been trained to mirror your tone and style, if you use more emojis, so will Pi! Try experimenting with various prompts and conversation styles.", "context_length": 8000, "architecture": { "modality": "text->text", @@ -11449,12 +11972,12 @@ ] }, { - "id": "inflection/inflection-3-pi", - "canonical_slug": "inflection/inflection-3-pi", + "id": "inflection/inflection-3-productivity", + "canonical_slug": "inflection/inflection-3-productivity", "hugging_face_id": null, - "name": "Inflection: Inflection 3 Pi", + "name": "Inflection: Inflection 3 Productivity", "created": 1728604800, - "description": "Inflection 3 Pi powers Inflection's [Pi](https://pi.ai) chatbot, including backstory, emotional intelligence, productivity, and safety. It has access to recent news, and excels in scenarios like customer support and roleplay.\n\nPi has been trained to mirror your tone and style, if you use more emojis, so will Pi! Try experimenting with various prompts and conversation styles.", + "description": "Inflection 3 Productivity is optimized for following instructions. It is better for tasks requiring JSON output or precise adherence to provided guidelines. It has access to recent news.\n\nFor emotional intelligence similar to Pi, see [Inflect 3 Pi](/inflection/inflection-3-pi)\n\nSee [Inflection's announcement](https://inflection.ai/blog/enterprise) for more details.", "context_length": 8000, "architecture": { "modality": "text->text", @@ -11538,53 +12061,6 @@ "top_p" ] }, - { - "id": "anthracite-org/magnum-v2-72b", - "canonical_slug": "anthracite-org/magnum-v2-72b", - "hugging_face_id": "anthracite-org/magnum-v2-72b", - "name": "Magnum v2 72B", - "created": 1727654400, - "description": "From the maker of [Goliath](https://openrouter.ai/models/alpindale/goliath-120b), Magnum 72B is the seventh in a family of models designed to achieve the prose quality of the Claude 3 models, notably Opus & Sonnet.\n\nThe model is based on [Qwen2 72B](https://openrouter.ai/models/qwen/qwen-2-72b-instruct) and trained with 55 million tokens of highly curated roleplay (RP) data.", - "context_length": 32768, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Qwen", - "instruct_type": "chatml" - }, - "pricing": { - "prompt": "0.000003", - "completion": "0.000003", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 32768, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logit_bias", - "max_tokens", - "min_p", - "presence_penalty", - "repetition_penalty", - "seed", - "stop", - "temperature", - "top_k", - "top_p" - ] - }, { "id": "thedrummer/rocinante-12b", "canonical_slug": "thedrummer/rocinante-12b", @@ -11638,36 +12114,35 @@ ] }, { - "id": "meta-llama/llama-3.2-11b-vision-instruct", - "canonical_slug": "meta-llama/llama-3.2-11b-vision-instruct", - "hugging_face_id": "meta-llama/Llama-3.2-11B-Vision-Instruct", - "name": "Meta: Llama 3.2 11B Vision Instruct", - "created": 1727222400, - "description": "Llama 3.2 11B Vision is a multimodal model with 11 billion parameters, designed to handle tasks combining visual and textual data. It excels in tasks such as image captioning and visual question answering, bridging the gap between language generation and visual reasoning. Pre-trained on a massive dataset of image-text pairs, it performs well in complex, high-accuracy image analysis.\n\nIts ability to integrate visual understanding with language processing makes it an ideal solution for industries requiring comprehensive visual-linguistic AI applications, such as content creation, AI-driven customer service, and research.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD_VISION.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).", - "context_length": 131072, + "id": "anthracite-org/magnum-v2-72b", + "canonical_slug": "anthracite-org/magnum-v2-72b", + "hugging_face_id": "anthracite-org/magnum-v2-72b", + "name": "Magnum v2 72B", + "created": 1727654400, + "description": "From the maker of [Goliath](https://openrouter.ai/models/alpindale/goliath-120b), Magnum 72B is the seventh in a family of models designed to achieve the prose quality of the Claude 3 models, notably Opus & Sonnet.\n\nThe model is based on [Qwen2 72B](https://openrouter.ai/models/qwen/qwen-2-72b-instruct) and trained with 55 million tokens of highly curated roleplay (RP) data.", + "context_length": 32768, "architecture": { - "modality": "text+image->text", + "modality": "text->text", "input_modalities": [ - "text", - "image" + "text" ], "output_modalities": [ "text" ], - "tokenizer": "Llama3", - "instruct_type": "llama3" + "tokenizer": "Qwen", + "instruct_type": "chatml" }, "pricing": { - "prompt": "0.000000049", - "completion": "0.000000049", + "prompt": "0.000003", + "completion": "0.000003", "request": "0", - "image": "0.00007948", + "image": "0", "web_search": "0", "internal_reasoning": "0" }, "top_provider": { - "context_length": 131072, - "max_completion_tokens": 16384, + "context_length": 32768, + "max_completion_tokens": null, "is_moderated": false }, "per_request_limits": null, @@ -11678,13 +12153,10 @@ "min_p", "presence_penalty", "repetition_penalty", - "response_format", "seed", "stop", - "structured_outputs", "temperature", "top_k", - "top_logprobs", "top_p" ] }, @@ -11882,6 +12354,57 @@ "top_p" ] }, + { + "id": "meta-llama/llama-3.2-11b-vision-instruct", + "canonical_slug": "meta-llama/llama-3.2-11b-vision-instruct", + "hugging_face_id": "meta-llama/Llama-3.2-11B-Vision-Instruct", + "name": "Meta: Llama 3.2 11B Vision Instruct", + "created": 1727222400, + "description": "Llama 3.2 11B Vision is a multimodal model with 11 billion parameters, designed to handle tasks combining visual and textual data. It excels in tasks such as image captioning and visual question answering, bridging the gap between language generation and visual reasoning. Pre-trained on a massive dataset of image-text pairs, it performs well in complex, high-accuracy image analysis.\n\nIts ability to integrate visual understanding with language processing makes it an ideal solution for industries requiring comprehensive visual-linguistic AI applications, such as content creation, AI-driven customer service, and research.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD_VISION.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).", + "context_length": 131072, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.000000049", + "completion": "0.000000049", + "request": "0", + "image": "0.00007948", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": 16384, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "max_tokens", + "min_p", + "presence_penalty", + "repetition_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "top_k", + "top_logprobs", + "top_p" + ] + }, { "id": "qwen/qwen-2.5-72b-instruct:free", "canonical_slug": "qwen/qwen-2.5-72b-instruct", @@ -11951,8 +12474,8 @@ "instruct_type": "chatml" }, "pricing": { - "prompt": "0.0000000518308", - "completion": "0.000000207424", + "prompt": "0.00000006521736", + "completion": "0.000000260869536", "request": "0", "image": "0", "web_search": "0", @@ -12034,10 +12557,10 @@ ] }, { - "id": "openai/o1-mini-2024-09-12", - "canonical_slug": "openai/o1-mini-2024-09-12", + "id": "openai/o1-mini", + "canonical_slug": "openai/o1-mini", "hugging_face_id": null, - "name": "OpenAI: o1-mini (2024-09-12)", + "name": "OpenAI: o1-mini", "created": 1726099200, "description": "The latest and strongest model family from OpenAI, o1 is designed to spend more time thinking before responding.\n\nThe o1 models are optimized for math, science, programming, and other STEM-related tasks. They consistently exhibit PhD-level accuracy on benchmarks in physics, chemistry, and biology. Learn more in the [launch announcement](https://openai.com/o1).\n\nNote: This model is currently experimental and not suitable for production use-cases, and may be heavily rate-limited.", "context_length": 128000, @@ -12073,10 +12596,10 @@ ] }, { - "id": "openai/o1-mini", - "canonical_slug": "openai/o1-mini", + "id": "openai/o1-mini-2024-09-12", + "canonical_slug": "openai/o1-mini-2024-09-12", "hugging_face_id": null, - "name": "OpenAI: o1-mini", + "name": "OpenAI: o1-mini (2024-09-12)", "created": 1726099200, "description": "The latest and strongest model family from OpenAI, o1 is designed to spend more time thinking before responding.\n\nThe o1 models are optimized for math, science, programming, and other STEM-related tasks. They consistently exhibit PhD-level accuracy on benchmarks in physics, chemistry, and biology. Learn more in the [launch announcement](https://openai.com/o1).\n\nNote: This model is currently experimental and not suitable for production use-cases, and may be heavily rate-limited.", "context_length": 128000, @@ -12166,12 +12689,12 @@ ] }, { - "id": "cohere/command-r-08-2024", - "canonical_slug": "cohere/command-r-08-2024", + "id": "cohere/command-r-plus-08-2024", + "canonical_slug": "cohere/command-r-plus-08-2024", "hugging_face_id": null, - "name": "Cohere: Command R (08-2024)", + "name": "Cohere: Command R+ (08-2024)", "created": 1724976000, - "description": "command-r-08-2024 is an update of the [Command R](/models/cohere/command-r) with improved performance for multilingual retrieval-augmented generation (RAG) and tool use. More broadly, it is better at math, code and reasoning and is competitive with the previous version of the larger Command R+ model.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", + "description": "command-r-plus-08-2024 is an update of the [Command R+](/models/cohere/command-r-plus) with roughly 50% higher throughput and 25% lower latencies as compared to the previous Command R+ version, while keeping the hardware footprint the same.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", "context_length": 128000, "architecture": { "modality": "text->text", @@ -12185,8 +12708,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000015", - "completion": "0.0000006", + "prompt": "0.0000025", + "completion": "0.00001", "request": "0", "image": "0", "web_search": "0", @@ -12213,12 +12736,12 @@ ] }, { - "id": "cohere/command-r-plus-08-2024", - "canonical_slug": "cohere/command-r-plus-08-2024", + "id": "cohere/command-r-08-2024", + "canonical_slug": "cohere/command-r-08-2024", "hugging_face_id": null, - "name": "Cohere: Command R+ (08-2024)", + "name": "Cohere: Command R (08-2024)", "created": 1724976000, - "description": "command-r-plus-08-2024 is an update of the [Command R+](/models/cohere/command-r-plus) with roughly 50% higher throughput and 25% lower latencies as compared to the previous Command R+ version, while keeping the hardware footprint the same.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", + "description": "command-r-08-2024 is an update of the [Command R](/models/cohere/command-r) with improved performance for multilingual retrieval-augmented generation (RAG) and tool use. More broadly, it is better at math, code and reasoning and is competitive with the previous version of the larger Command R+ model.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", "context_length": 128000, "architecture": { "modality": "text->text", @@ -12232,8 +12755,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000025", - "completion": "0.00001", + "prompt": "0.00000015", + "completion": "0.0000006", "request": "0", "image": "0", "web_search": "0", @@ -12421,8 +12944,8 @@ "instruct_type": "chatml" }, "pricing": { - "prompt": "0.0000001", - "completion": "0.00000028", + "prompt": "0.00000012", + "completion": "0.0000003", "request": "0", "image": "0", "web_search": "0", @@ -12474,7 +12997,7 @@ "instruct_type": "chatml" }, "pricing": { - "prompt": "0.0000007", + "prompt": "0.0000008", "completion": "0.0000008", "request": "0", "image": "0", @@ -12483,7 +13006,7 @@ }, "top_provider": { "context_length": 131072, - "max_completion_tokens": 16384, + "max_completion_tokens": 131072, "is_moderated": false }, "per_request_limits": null, @@ -12704,6 +13227,59 @@ "top_p" ] }, + { + "id": "meta-llama/llama-3.1-8b-instruct", + "canonical_slug": "meta-llama/llama-3.1-8b-instruct", + "hugging_face_id": "meta-llama/Meta-Llama-3.1-8B-Instruct", + "name": "Meta: Llama 3.1 8B Instruct", + "created": 1721692800, + "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 8B instruct-tuned version is fast and efficient.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", + "context_length": 131072, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.000000015", + "completion": "0.00000002", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": 16384, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "logprobs", + "max_tokens", + "min_p", + "presence_penalty", + "repetition_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_logprobs", + "top_p" + ] + }, { "id": "meta-llama/llama-3.1-405b-instruct:free", "canonical_slug": "meta-llama/llama-3.1-405b-instruct", @@ -12802,59 +13378,6 @@ "top_p" ] }, - { - "id": "meta-llama/llama-3.1-8b-instruct", - "canonical_slug": "meta-llama/llama-3.1-8b-instruct", - "hugging_face_id": "meta-llama/Meta-Llama-3.1-8B-Instruct", - "name": "Meta: Llama 3.1 8B Instruct", - "created": 1721692800, - "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 8B instruct-tuned version is fast and efficient.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", - "context_length": 131072, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0.000000015", - "completion": "0.00000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 131072, - "max_completion_tokens": 16384, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logit_bias", - "logprobs", - "max_tokens", - "min_p", - "presence_penalty", - "repetition_penalty", - "response_format", - "seed", - "stop", - "structured_outputs", - "temperature", - "tool_choice", - "tools", - "top_k", - "top_logprobs", - "top_p" - ] - }, { "id": "meta-llama/llama-3.1-70b-instruct", "canonical_slug": "meta-llama/llama-3.1-70b-instruct", @@ -12977,8 +13500,8 @@ "instruct_type": "mistral" }, "pricing": { - "prompt": "0.00000001", - "completion": "0.0000000400032", + "prompt": "0.000000017934774", + "completion": "0.0000000717391224", "request": "0", "image": "0", "web_search": "0", @@ -13231,8 +13754,8 @@ "instruct_type": "gemma" }, "pricing": { - "prompt": "0.00000001", - "completion": "0.0000000100008", + "prompt": "0.00000002", + "completion": "0.0000000358695612", "request": "0", "image": "0", "web_search": "0", @@ -13453,6 +13976,56 @@ "top_p" ] }, + { + "id": "mistralai/mistral-7b-instruct-v0.3", + "canonical_slug": "mistralai/mistral-7b-instruct-v0.3", + "hugging_face_id": "mistralai/Mistral-7B-Instruct-v0.3", + "name": "Mistral: Mistral 7B Instruct v0.3", + "created": 1716768000, + "description": "A high-performing, industry-standard 7.3B parameter model, with optimizations for speed and context length.\n\nAn improved version of [Mistral 7B Instruct v0.2](/models/mistralai/mistral-7b-instruct-v0.2), with the following changes:\n\n- Extended vocabulary to 32768\n- Supports v3 Tokenizer\n- Supports function calling\n\nNOTE: Support for function calling depends on the provider.", + "context_length": 32768, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Mistral", + "instruct_type": "mistral" + }, + "pricing": { + "prompt": "0.000000028", + "completion": "0.000000054", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 32768, + "max_completion_tokens": 16384, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "max_tokens", + "min_p", + "presence_penalty", + "repetition_penalty", + "response_format", + "seed", + "stop", + "temperature", + "tool_choice", + "tools", + "top_k", + "top_p" + ] + }, { "id": "mistralai/mistral-7b-instruct:free", "canonical_slug": "mistralai/mistral-7b-instruct", @@ -13552,56 +14125,6 @@ "top_p" ] }, - { - "id": "mistralai/mistral-7b-instruct-v0.3", - "canonical_slug": "mistralai/mistral-7b-instruct-v0.3", - "hugging_face_id": "mistralai/Mistral-7B-Instruct-v0.3", - "name": "Mistral: Mistral 7B Instruct v0.3", - "created": 1716768000, - "description": "A high-performing, industry-standard 7.3B parameter model, with optimizations for speed and context length.\n\nAn improved version of [Mistral 7B Instruct v0.2](/models/mistralai/mistral-7b-instruct-v0.2), with the following changes:\n\n- Extended vocabulary to 32768\n- Supports v3 Tokenizer\n- Supports function calling\n\nNOTE: Support for function calling depends on the provider.", - "context_length": 32768, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Mistral", - "instruct_type": "mistral" - }, - "pricing": { - "prompt": "0.000000028", - "completion": "0.000000054", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 32768, - "max_completion_tokens": 16384, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logit_bias", - "max_tokens", - "min_p", - "presence_penalty", - "repetition_penalty", - "response_format", - "seed", - "stop", - "temperature", - "tool_choice", - "tools", - "top_k", - "top_p" - ] - }, { "id": "microsoft/phi-3-mini-128k-instruct", "canonical_slug": "microsoft/phi-3-mini-128k-instruct", @@ -14428,6 +14951,52 @@ "top_p" ] }, + { + "id": "cohere/command", + "canonical_slug": "cohere/command", + "hugging_face_id": null, + "name": "Cohere: Command", + "created": 1710374400, + "description": "Command is an instruction-following conversational model that performs language tasks with high quality, more reliably and with a longer context than our base generative models.\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", + "context_length": 4096, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Cohere", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000001", + "completion": "0.000002", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 4096, + "max_completion_tokens": 4000, + "is_moderated": true + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "max_tokens", + "presence_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "top_k", + "top_p" + ] + }, { "id": "cohere/command-r", "canonical_slug": "cohere/command-r", @@ -14475,52 +15044,6 @@ "top_p" ] }, - { - "id": "cohere/command", - "canonical_slug": "cohere/command", - "hugging_face_id": null, - "name": "Cohere: Command", - "created": 1710374400, - "description": "Command is an instruction-following conversational model that performs language tasks with high quality, more reliably and with a longer context than our base generative models.\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", - "context_length": 4096, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Cohere", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000001", - "completion": "0.000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 4096, - "max_completion_tokens": 4000, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "max_tokens", - "presence_penalty", - "response_format", - "seed", - "stop", - "structured_outputs", - "temperature", - "top_k", - "top_p" - ] - }, { "id": "anthropic/claude-3-haiku", "canonical_slug": "anthropic/claude-3-haiku", @@ -14808,12 +15331,12 @@ ] }, { - "id": "mistralai/mistral-small", - "canonical_slug": "mistralai/mistral-small", + "id": "mistralai/mistral-tiny", + "canonical_slug": "mistralai/mistral-tiny", "hugging_face_id": null, - "name": "Mistral Small", + "name": "Mistral Tiny", "created": 1704844800, - "description": "With 22 billion parameters, Mistral Small v24.09 offers a convenient mid-point between (Mistral NeMo 12B)[/mistralai/mistral-nemo] and (Mistral Large 2)[/mistralai/mistral-large], providing a cost-effective solution that can be deployed across various platforms and environments. It has better reasoning, exhibits more capabilities, can produce and reason about code, and is multiligual, supporting English, French, German, Italian, and Spanish.", + "description": "Note: This model is being deprecated. Recommended replacement is the newer [Ministral 8B](/mistral/ministral-8b)\n\nThis model is currently powered by Mistral-7B-v0.2, and incorporates a \"better\" fine-tuning than [Mistral 7B](/models/mistralai/mistral-7b-instruct-v0.1), inspired by community work. It's best used for large batch processing tasks where cost is a significant factor but reasoning capabilities are not crucial.", "context_length": 32768, "architecture": { "modality": "text->text", @@ -14827,8 +15350,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000002", - "completion": "0.0000006", + "prompt": "0.00000025", + "completion": "0.00000025", "request": "0", "image": "0", "web_search": "0", @@ -14855,12 +15378,12 @@ ] }, { - "id": "mistralai/mistral-tiny", - "canonical_slug": "mistralai/mistral-tiny", + "id": "mistralai/mistral-small", + "canonical_slug": "mistralai/mistral-small", "hugging_face_id": null, - "name": "Mistral Tiny", + "name": "Mistral Small", "created": 1704844800, - "description": "Note: This model is being deprecated. Recommended replacement is the newer [Ministral 8B](/mistral/ministral-8b)\n\nThis model is currently powered by Mistral-7B-v0.2, and incorporates a \"better\" fine-tuning than [Mistral 7B](/models/mistralai/mistral-7b-instruct-v0.1), inspired by community work. It's best used for large batch processing tasks where cost is a significant factor but reasoning capabilities are not crucial.", + "description": "With 22 billion parameters, Mistral Small v24.09 offers a convenient mid-point between (Mistral NeMo 12B)[/mistralai/mistral-nemo] and (Mistral Large 2)[/mistralai/mistral-large], providing a cost-effective solution that can be deployed across various platforms and environments. It has better reasoning, exhibits more capabilities, can produce and reason about code, and is multiligual, supporting English, French, German, Italian, and Spanish.", "context_length": 32768, "architecture": { "modality": "text->text", @@ -14874,8 +15397,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000025", - "completion": "0.00000025", + "prompt": "0.0000002", + "completion": "0.0000006", "request": "0", "image": "0", "web_search": "0", @@ -15132,55 +15655,6 @@ "top_p" ] }, - { - "id": "mistralai/mistral-7b-instruct-v0.1", - "canonical_slug": "mistralai/mistral-7b-instruct-v0.1", - "hugging_face_id": "mistralai/Mistral-7B-Instruct-v0.1", - "name": "Mistral: Mistral 7B Instruct v0.1", - "created": 1695859200, - "description": "A 7.3B parameter model that outperforms Llama 2 13B on all benchmarks, with optimizations for speed and context length.", - "context_length": 2824, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Mistral", - "instruct_type": "mistral" - }, - "pricing": { - "prompt": "0.00000011", - "completion": "0.00000019", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 2824, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logit_bias", - "max_tokens", - "min_p", - "presence_penalty", - "repetition_penalty", - "seed", - "stop", - "temperature", - "tool_choice", - "tools", - "top_k", - "top_p" - ] - }, { "id": "openai/gpt-3.5-turbo-instruct", "canonical_slug": "openai/gpt-3.5-turbo-instruct", @@ -15230,13 +15704,13 @@ ] }, { - "id": "pygmalionai/mythalion-13b", - "canonical_slug": "pygmalionai/mythalion-13b", - "hugging_face_id": "PygmalionAI/mythalion-13b", - "name": "Pygmalion: Mythalion 13B", - "created": 1693612800, - "description": "A blend of the new Pygmalion-13b and MythoMax. #merge", - "context_length": 4096, + "id": "mistralai/mistral-7b-instruct-v0.1", + "canonical_slug": "mistralai/mistral-7b-instruct-v0.1", + "hugging_face_id": "mistralai/Mistral-7B-Instruct-v0.1", + "name": "Mistral: Mistral 7B Instruct v0.1", + "created": 1695859200, + "description": "A 7.3B parameter model that outperforms Llama 2 13B on all benchmarks, with optimizations for speed and context length.", + "context_length": 2824, "architecture": { "modality": "text->text", "input_modalities": [ @@ -15245,20 +15719,20 @@ "output_modalities": [ "text" ], - "tokenizer": "Llama2", - "instruct_type": "alpaca" + "tokenizer": "Mistral", + "instruct_type": "mistral" }, "pricing": { - "prompt": "0.0000007", - "completion": "0.0000011", + "prompt": "0.00000011", + "completion": "0.00000019", "request": "0", "image": "0", "web_search": "0", "internal_reasoning": "0" }, "top_provider": { - "context_length": 4096, - "max_completion_tokens": 4096, + "context_length": 2824, + "max_completion_tokens": null, "is_moderated": false }, "per_request_limits": null, @@ -15272,6 +15746,8 @@ "seed", "stop", "temperature", + "tool_choice", + "tools", "top_k", "top_p" ] @@ -15474,56 +15950,6 @@ "top_p" ] }, - { - "id": "openai/gpt-4-0314", - "canonical_slug": "openai/gpt-4-0314", - "hugging_face_id": null, - "name": "OpenAI: GPT-4 (older v0314)", - "created": 1685232000, - "description": "GPT-4-0314 is the first version of GPT-4 released, with a context length of 8,192 tokens, and was supported until June 14. Training data: up to Sep 2021.", - "context_length": 8191, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "GPT", - "instruct_type": null - }, - "pricing": { - "prompt": "0.00003", - "completion": "0.00006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 8191, - "max_completion_tokens": 4096, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "frequency_penalty", - "logit_bias", - "logprobs", - "max_tokens", - "presence_penalty", - "response_format", - "seed", - "stop", - "structured_outputs", - "temperature", - "tool_choice", - "tools", - "top_logprobs", - "top_p" - ] - }, { "id": "openai/gpt-3.5-turbo", "canonical_slug": "openai/gpt-3.5-turbo", @@ -15574,6 +16000,56 @@ "top_p" ] }, + { + "id": "openai/gpt-4-0314", + "canonical_slug": "openai/gpt-4-0314", + "hugging_face_id": null, + "name": "OpenAI: GPT-4 (older v0314)", + "created": 1685232000, + "description": "GPT-4-0314 is the first version of GPT-4 released, with a context length of 8,192 tokens, and was supported until June 14. Training data: up to Sep 2021.", + "context_length": 8191, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "GPT", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00003", + "completion": "0.00006", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 8191, + "max_completion_tokens": 4096, + "is_moderated": true + }, + "per_request_limits": null, + "supported_parameters": [ + "frequency_penalty", + "logit_bias", + "logprobs", + "max_tokens", + "presence_penalty", + "response_format", + "seed", + "stop", + "structured_outputs", + "temperature", + "tool_choice", + "tools", + "top_logprobs", + "top_p" + ] + }, { "id": "openai/gpt-4", "canonical_slug": "openai/gpt-4", diff --git a/packages/kbot/dist-in/models/cache/openrouter-models-free.d.ts b/packages/kbot/dist-in/models/cache/openrouter-models-free.d.ts index d2a234ea..dbd471f2 100644 --- a/packages/kbot/dist-in/models/cache/openrouter-models-free.d.ts +++ b/packages/kbot/dist-in/models/cache/openrouter-models-free.d.ts @@ -1,4 +1,7 @@ export declare enum E_OPENROUTER_MODEL_FREE { + MODEL_FREE_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free", + MODEL_FREE_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha", + MODEL_FREE_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha", MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE = "deepseek/deepseek-chat-v3.1:free", MODEL_FREE_OPENAI_GPT_OSS_120B_FREE = "openai/gpt-oss-120b:free", MODEL_FREE_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free", @@ -30,7 +33,6 @@ export declare enum E_OPENROUTER_MODEL_FREE { MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free", MODEL_FREE_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free", MODEL_FREE_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free", - MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25 = "google/gemini-2.5-pro-exp-03-25", MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free", MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free", MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free", diff --git a/packages/kbot/dist-in/models/cache/openrouter-models-free.js b/packages/kbot/dist-in/models/cache/openrouter-models-free.js index e48745be..5ae338cd 100644 --- a/packages/kbot/dist-in/models/cache/openrouter-models-free.js +++ b/packages/kbot/dist-in/models/cache/openrouter-models-free.js @@ -1,5 +1,8 @@ export var E_OPENROUTER_MODEL_FREE; (function (E_OPENROUTER_MODEL_FREE) { + E_OPENROUTER_MODEL_FREE["MODEL_FREE_NVIDIA_NEMOTRON_NANO_9B_V2_FREE"] = "nvidia/nemotron-nano-9b-v2:free"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENROUTER_SONOMA_DUSK_ALPHA"] = "openrouter/sonoma-dusk-alpha"; + E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENROUTER_SONOMA_SKY_ALPHA"] = "openrouter/sonoma-sky-alpha"; E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE"] = "deepseek/deepseek-chat-v3.1:free"; E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENAI_GPT_OSS_120B_FREE"] = "openai/gpt-oss-120b:free"; E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENAI_GPT_OSS_20B_FREE"] = "openai/gpt-oss-20b:free"; @@ -31,7 +34,6 @@ export var E_OPENROUTER_MODEL_FREE; E_OPENROUTER_MODEL_FREE["MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE"] = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free"; E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_4_MAVERICK_FREE"] = "meta-llama/llama-4-maverick:free"; E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_4_SCOUT_FREE"] = "meta-llama/llama-4-scout:free"; - E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25"] = "google/gemini-2.5-pro-exp-03-25"; E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-32b-instruct:free"; E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE"] = "deepseek/deepseek-chat-v3-0324:free"; E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE"] = "mistralai/mistral-small-3.1-24b-instruct:free"; @@ -58,4 +60,4 @@ export var E_OPENROUTER_MODEL_FREE; E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE"] = "google/gemma-2-9b-it:free"; E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free"; })(E_OPENROUTER_MODEL_FREE || (E_OPENROUTER_MODEL_FREE = {})); -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkEwRFg7QUExREQsV0FBWSx1QkFBdUI7SUFDakMsMkdBQWdGLENBQUE7SUFDaEYsMkZBQWdFLENBQUE7SUFDaEUseUZBQThELENBQUE7SUFDOUQscUZBQTBELENBQUE7SUFDMUQscUZBQTBELENBQUE7SUFDMUQseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUscUhBQTBGLENBQUE7SUFDMUYsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsbUlBQXdHLENBQUE7SUFDeEcsMkdBQWdGLENBQUE7SUFDaEYscUdBQTBFLENBQUE7SUFDMUUseUdBQThFLENBQUE7SUFDOUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsMkZBQWdFLENBQUE7SUFDaEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsK0hBQW9HLENBQUE7SUFDcEcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQTFEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBMERsQyJ9 \ No newline at end of file +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkE0RFg7QUE1REQsV0FBWSx1QkFBdUI7SUFDakMseUdBQThFLENBQUE7SUFDOUUsbUdBQXdFLENBQUE7SUFDeEUsaUdBQXNFLENBQUE7SUFDdEUsMkdBQWdGLENBQUE7SUFDaEYsMkZBQWdFLENBQUE7SUFDaEUseUZBQThELENBQUE7SUFDOUQscUZBQTBELENBQUE7SUFDMUQscUZBQTBELENBQUE7SUFDMUQseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUscUhBQTBGLENBQUE7SUFDMUYsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsbUlBQXdHLENBQUE7SUFDeEcsMkdBQWdGLENBQUE7SUFDaEYscUdBQTBFLENBQUE7SUFDMUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsMkZBQWdFLENBQUE7SUFDaEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsK0hBQW9HLENBQUE7SUFDcEcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQTVEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBNERsQyJ9 \ No newline at end of file diff --git a/packages/kbot/dist-in/models/cache/openrouter-models.d.ts b/packages/kbot/dist-in/models/cache/openrouter-models.d.ts index 6ceeef57..1ba4d905 100644 --- a/packages/kbot/dist-in/models/cache/openrouter-models.d.ts +++ b/packages/kbot/dist-in/models/cache/openrouter-models.d.ts @@ -1,6 +1,19 @@ export declare enum E_OPENROUTER_MODEL { + MODEL_QWEN_QWEN3_NEXT_80B_A3B_THINKING = "qwen/qwen3-next-80b-a3b-thinking", + MODEL_QWEN_QWEN3_NEXT_80B_A3B_INSTRUCT = "qwen/qwen3-next-80b-a3b-instruct", + MODEL_MEITUAN_LONGCAT_FLASH_CHAT = "meituan/longcat-flash-chat", + MODEL_QWEN_QWEN_PLUS_2025_07_28 = "qwen/qwen-plus-2025-07-28", + MODEL_QWEN_QWEN_PLUS_2025_07_28_THINKING = "qwen/qwen-plus-2025-07-28:thinking", + MODEL_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free", + MODEL_NVIDIA_NEMOTRON_NANO_9B_V2 = "nvidia/nemotron-nano-9b-v2", + MODEL_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha", + MODEL_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha", + MODEL_QWEN_QWEN3_MAX = "qwen/qwen3-max", + MODEL_MOONSHOTAI_KIMI_K2_0905 = "moonshotai/kimi-k2-0905", + MODEL_BYTEDANCE_SEED_OSS_36B_INSTRUCT = "bytedance/seed-oss-36b-instruct", MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_LLAMA_109B_MOE = "deepcogito/cogito-v2-preview-llama-109b-moe", MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_DEEPSEEK_671B = "deepcogito/cogito-v2-preview-deepseek-671b", + MODEL_STEPFUN_AI_STEP3 = "stepfun-ai/step3", MODEL_QWEN_QWEN3_30B_A3B_THINKING_2507 = "qwen/qwen3-30b-a3b-thinking-2507", MODEL_X_AI_GROK_CODE_FAST_1 = "x-ai/grok-code-fast-1", MODEL_NOUSRESEARCH_HERMES_4_70B = "nousresearch/hermes-4-70b", @@ -128,14 +141,13 @@ export declare enum E_OPENROUTER_MODEL { MODEL_MOONSHOTAI_KIMI_VL_A3B_THINKING = "moonshotai/kimi-vl-a3b-thinking", MODEL_X_AI_GROK_3_MINI_BETA = "x-ai/grok-3-mini-beta", MODEL_X_AI_GROK_3_BETA = "x-ai/grok-3-beta", - MODEL_NVIDIA_LLAMA_3_3_NEMOTRON_SUPER_49B_V1 = "nvidia/llama-3.3-nemotron-super-49b-v1", MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free", MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1 = "nvidia/llama-3.1-nemotron-ultra-253b-v1", MODEL_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free", MODEL_META_LLAMA_LLAMA_4_MAVERICK = "meta-llama/llama-4-maverick", MODEL_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free", MODEL_META_LLAMA_LLAMA_4_SCOUT = "meta-llama/llama-4-scout", - MODEL_GOOGLE_GEMINI_2_5_PRO_EXP_03_25 = "google/gemini-2.5-pro-exp-03-25", + MODEL_ALLENAI_MOLMO_7B_D = "allenai/molmo-7b-d", MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free", MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT = "qwen/qwen2.5-vl-32b-instruct", MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free", @@ -143,6 +155,7 @@ export declare enum E_OPENROUTER_MODEL { MODEL_OPENAI_O1_PRO = "openai/o1-pro", MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free", MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT = "mistralai/mistral-small-3.1-24b-instruct", + MODEL_ALLENAI_OLMO_2_0325_32B_INSTRUCT = "allenai/olmo-2-0325-32b-instruct", MODEL_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free", MODEL_GOOGLE_GEMMA_3_4B_IT = "google/gemma-3-4b-it", MODEL_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free", @@ -219,38 +232,36 @@ export declare enum E_OPENROUTER_MODEL { MODEL_MISTRALAI_MISTRAL_LARGE_2411 = "mistralai/mistral-large-2411", MODEL_MISTRALAI_MISTRAL_LARGE_2407 = "mistralai/mistral-large-2407", MODEL_MISTRALAI_PIXTRAL_LARGE_2411 = "mistralai/pixtral-large-2411", - MODEL_X_AI_GROK_VISION_BETA = "x-ai/grok-vision-beta", - MODEL_INFERMATIC_MN_INFEROR_12B = "infermatic/mn-inferor-12b", MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free", MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT = "qwen/qwen-2.5-coder-32b-instruct", MODEL_RAIFLE_SORCERERLM_8X22B = "raifle/sorcererlm-8x22b", MODEL_THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b", - MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022", MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022", MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b", MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet", MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b", MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b", MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct", MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct", - MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity", MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi", + MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity", MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b", - MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b", MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b", - MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct", + MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b", MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct", MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct", MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free", MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct", + MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct", MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free", MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct", MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b", - MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12", MODEL_OPENAI_O1_MINI = "openai/o1-mini", + MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12", MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b", - MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024", MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024", + MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024", MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b", MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct", MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct", @@ -260,9 +271,9 @@ export declare enum E_OPENROUTER_MODEL { MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b", MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06", MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b", + MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct", MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE = "meta-llama/llama-3.1-405b-instruct:free", MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct", - MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct", MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct", MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free", MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo", @@ -275,9 +286,9 @@ export declare enum E_OPENROUTER_MODEL { MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b", MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b", MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3", MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free", MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct", - MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3", MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct", MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct", MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b", @@ -295,29 +306,28 @@ export declare enum E_OPENROUTER_MODEL { MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus", MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024", MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b", - MODEL_COHERE_COMMAND_R = "cohere/command-r", MODEL_COHERE_COMMAND = "cohere/command", + MODEL_COHERE_COMMAND_R = "cohere/command-r", MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku", MODEL_ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus", MODEL_COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024", MODEL_MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large", MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview", MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613", - MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small", MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny", + MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small", MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct", MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b", MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b", MODEL_OPENROUTER_AUTO = "openrouter/auto", MODEL_OPENAI_GPT_4_1106_PREVIEW = "openai/gpt-4-1106-preview", - MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1", MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct", - MODEL_PYGMALIONAI_MYTHALION_13B = "pygmalionai/mythalion-13b", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1", MODEL_OPENAI_GPT_3_5_TURBO_16K = "openai/gpt-3.5-turbo-16k", MODEL_MANCER_WEAVER = "mancer/weaver", MODEL_UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b", MODEL_GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b", - MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314", MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo", + MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314", MODEL_OPENAI_GPT_4 = "openai/gpt-4" } diff --git a/packages/kbot/dist-in/models/cache/openrouter-models.js b/packages/kbot/dist-in/models/cache/openrouter-models.js index 22090836..694a422f 100644 --- a/packages/kbot/dist-in/models/cache/openrouter-models.js +++ b/packages/kbot/dist-in/models/cache/openrouter-models.js @@ -1,7 +1,20 @@ export var E_OPENROUTER_MODEL; (function (E_OPENROUTER_MODEL) { + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN3_NEXT_80B_A3B_THINKING"] = "qwen/qwen3-next-80b-a3b-thinking"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN3_NEXT_80B_A3B_INSTRUCT"] = "qwen/qwen3-next-80b-a3b-instruct"; + E_OPENROUTER_MODEL["MODEL_MEITUAN_LONGCAT_FLASH_CHAT"] = "meituan/longcat-flash-chat"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_PLUS_2025_07_28"] = "qwen/qwen-plus-2025-07-28"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_PLUS_2025_07_28_THINKING"] = "qwen/qwen-plus-2025-07-28:thinking"; + E_OPENROUTER_MODEL["MODEL_NVIDIA_NEMOTRON_NANO_9B_V2_FREE"] = "nvidia/nemotron-nano-9b-v2:free"; + E_OPENROUTER_MODEL["MODEL_NVIDIA_NEMOTRON_NANO_9B_V2"] = "nvidia/nemotron-nano-9b-v2"; + E_OPENROUTER_MODEL["MODEL_OPENROUTER_SONOMA_DUSK_ALPHA"] = "openrouter/sonoma-dusk-alpha"; + E_OPENROUTER_MODEL["MODEL_OPENROUTER_SONOMA_SKY_ALPHA"] = "openrouter/sonoma-sky-alpha"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN3_MAX"] = "qwen/qwen3-max"; + E_OPENROUTER_MODEL["MODEL_MOONSHOTAI_KIMI_K2_0905"] = "moonshotai/kimi-k2-0905"; + E_OPENROUTER_MODEL["MODEL_BYTEDANCE_SEED_OSS_36B_INSTRUCT"] = "bytedance/seed-oss-36b-instruct"; E_OPENROUTER_MODEL["MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_LLAMA_109B_MOE"] = "deepcogito/cogito-v2-preview-llama-109b-moe"; E_OPENROUTER_MODEL["MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_DEEPSEEK_671B"] = "deepcogito/cogito-v2-preview-deepseek-671b"; + E_OPENROUTER_MODEL["MODEL_STEPFUN_AI_STEP3"] = "stepfun-ai/step3"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN3_30B_A3B_THINKING_2507"] = "qwen/qwen3-30b-a3b-thinking-2507"; E_OPENROUTER_MODEL["MODEL_X_AI_GROK_CODE_FAST_1"] = "x-ai/grok-code-fast-1"; E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_HERMES_4_70B"] = "nousresearch/hermes-4-70b"; @@ -129,14 +142,13 @@ export var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_MOONSHOTAI_KIMI_VL_A3B_THINKING"] = "moonshotai/kimi-vl-a3b-thinking"; E_OPENROUTER_MODEL["MODEL_X_AI_GROK_3_MINI_BETA"] = "x-ai/grok-3-mini-beta"; E_OPENROUTER_MODEL["MODEL_X_AI_GROK_3_BETA"] = "x-ai/grok-3-beta"; - E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_3_NEMOTRON_SUPER_49B_V1"] = "nvidia/llama-3.3-nemotron-super-49b-v1"; E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE"] = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free"; E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1"] = "nvidia/llama-3.1-nemotron-ultra-253b-v1"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_4_MAVERICK_FREE"] = "meta-llama/llama-4-maverick:free"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_4_MAVERICK"] = "meta-llama/llama-4-maverick"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_4_SCOUT_FREE"] = "meta-llama/llama-4-scout:free"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_4_SCOUT"] = "meta-llama/llama-4-scout"; - E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_5_PRO_EXP_03_25"] = "google/gemini-2.5-pro-exp-03-25"; + E_OPENROUTER_MODEL["MODEL_ALLENAI_MOLMO_7B_D"] = "allenai/molmo-7b-d"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-32b-instruct:free"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT"] = "qwen/qwen2.5-vl-32b-instruct"; E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE"] = "deepseek/deepseek-chat-v3-0324:free"; @@ -144,6 +156,7 @@ export var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_OPENAI_O1_PRO"] = "openai/o1-pro"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE"] = "mistralai/mistral-small-3.1-24b-instruct:free"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT"] = "mistralai/mistral-small-3.1-24b-instruct"; + E_OPENROUTER_MODEL["MODEL_ALLENAI_OLMO_2_0325_32B_INSTRUCT"] = "allenai/olmo-2-0325-32b-instruct"; E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_4B_IT_FREE"] = "google/gemma-3-4b-it:free"; E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_4B_IT"] = "google/gemma-3-4b-it"; E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_12B_IT_FREE"] = "google/gemma-3-12b-it:free"; @@ -220,38 +233,36 @@ export var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE_2411"] = "mistralai/mistral-large-2411"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE_2407"] = "mistralai/mistral-large-2407"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_PIXTRAL_LARGE_2411"] = "mistralai/pixtral-large-2411"; - E_OPENROUTER_MODEL["MODEL_X_AI_GROK_VISION_BETA"] = "x-ai/grok-vision-beta"; - E_OPENROUTER_MODEL["MODEL_INFERMATIC_MN_INFEROR_12B"] = "infermatic/mn-inferor-12b"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE"] = "qwen/qwen-2.5-coder-32b-instruct:free"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT"] = "qwen/qwen-2.5-coder-32b-instruct"; E_OPENROUTER_MODEL["MODEL_RAIFLE_SORCERERLM_8X22B"] = "raifle/sorcererlm-8x22b"; E_OPENROUTER_MODEL["MODEL_THEDRUMMER_UNSLOPNEMO_12B"] = "thedrummer/unslopnemo-12b"; - E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022"] = "anthropic/claude-3.5-haiku-20241022"; E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU"] = "anthropic/claude-3.5-haiku"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022"] = "anthropic/claude-3.5-haiku-20241022"; E_OPENROUTER_MODEL["MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B"] = "anthracite-org/magnum-v4-72b"; E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_SONNET"] = "anthropic/claude-3.5-sonnet"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MINISTRAL_3B"] = "mistralai/ministral-3b"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MINISTRAL_8B"] = "mistralai/ministral-8b"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_7B_INSTRUCT"] = "qwen/qwen-2.5-7b-instruct"; E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT"] = "nvidia/llama-3.1-nemotron-70b-instruct"; - E_OPENROUTER_MODEL["MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY"] = "inflection/inflection-3-productivity"; E_OPENROUTER_MODEL["MODEL_INFLECTION_INFLECTION_3_PI"] = "inflection/inflection-3-pi"; + E_OPENROUTER_MODEL["MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY"] = "inflection/inflection-3-productivity"; E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_FLASH_1_5_8B"] = "google/gemini-flash-1.5-8b"; - E_OPENROUTER_MODEL["MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B"] = "anthracite-org/magnum-v2-72b"; E_OPENROUTER_MODEL["MODEL_THEDRUMMER_ROCINANTE_12B"] = "thedrummer/rocinante-12b"; - E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT"] = "meta-llama/llama-3.2-11b-vision-instruct"; + E_OPENROUTER_MODEL["MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B"] = "anthracite-org/magnum-v2-72b"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT"] = "meta-llama/llama-3.2-90b-vision-instruct"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT"] = "meta-llama/llama-3.2-1b-instruct"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE"] = "meta-llama/llama-3.2-3b-instruct:free"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT"] = "meta-llama/llama-3.2-3b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT"] = "meta-llama/llama-3.2-11b-vision-instruct"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE"] = "qwen/qwen-2.5-72b-instruct:free"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_72B_INSTRUCT"] = "qwen/qwen-2.5-72b-instruct"; E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B"] = "neversleep/llama-3.1-lumimaid-8b"; - E_OPENROUTER_MODEL["MODEL_OPENAI_O1_MINI_2024_09_12"] = "openai/o1-mini-2024-09-12"; E_OPENROUTER_MODEL["MODEL_OPENAI_O1_MINI"] = "openai/o1-mini"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_MINI_2024_09_12"] = "openai/o1-mini-2024-09-12"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_PIXTRAL_12B"] = "mistralai/pixtral-12b"; - E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_08_2024"] = "cohere/command-r-08-2024"; E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS_08_2024"] = "cohere/command-r-plus-08-2024"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_08_2024"] = "cohere/command-r-08-2024"; E_OPENROUTER_MODEL["MODEL_SAO10K_L3_1_EURYALE_70B"] = "sao10k/l3.1-euryale-70b"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT"] = "qwen/qwen-2.5-vl-7b-instruct"; E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT"] = "microsoft/phi-3.5-mini-128k-instruct"; @@ -261,9 +272,9 @@ export var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_SAO10K_L3_LUNARIS_8B"] = "sao10k/l3-lunaris-8b"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_2024_08_06"] = "openai/gpt-4o-2024-08-06"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_405B"] = "meta-llama/llama-3.1-405b"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT"] = "meta-llama/llama-3.1-8b-instruct"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE"] = "meta-llama/llama-3.1-405b-instruct:free"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT"] = "meta-llama/llama-3.1-405b-instruct"; - E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT"] = "meta-llama/llama-3.1-8b-instruct"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT"] = "meta-llama/llama-3.1-70b-instruct"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_NEMO_FREE"] = "mistralai/mistral-nemo:free"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_NEMO"] = "mistralai/mistral-nemo"; @@ -276,9 +287,9 @@ export var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_SAO10K_L3_EURYALE_70B"] = "sao10k/l3-euryale-70b"; E_OPENROUTER_MODEL["MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B"] = "cognitivecomputations/dolphin-mixtral-8x22b"; E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B"] = "nousresearch/hermes-2-pro-llama-3-8b"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3"] = "mistralai/mistral-7b-instruct-v0.3"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT"] = "mistralai/mistral-7b-instruct"; - E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3"] = "mistralai/mistral-7b-instruct-v0.3"; E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT"] = "microsoft/phi-3-mini-128k-instruct"; E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT"] = "microsoft/phi-3-medium-128k-instruct"; E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B"] = "neversleep/llama-3-lumimaid-70b"; @@ -296,30 +307,29 @@ export var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS"] = "cohere/command-r-plus"; E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS_04_2024"] = "cohere/command-r-plus-04-2024"; E_OPENROUTER_MODEL["MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B"] = "sophosympatheia/midnight-rose-70b"; - E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R"] = "cohere/command-r"; E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND"] = "cohere/command"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R"] = "cohere/command-r"; E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_HAIKU"] = "anthropic/claude-3-haiku"; E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_OPUS"] = "anthropic/claude-3-opus"; E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_03_2024"] = "cohere/command-r-03-2024"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE"] = "mistralai/mistral-large"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_TURBO_PREVIEW"] = "openai/gpt-4-turbo-preview"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_0613"] = "openai/gpt-3.5-turbo-0613"; - E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL"] = "mistralai/mistral-small"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_TINY"] = "mistralai/mistral-tiny"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL"] = "mistralai/mistral-small"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT"] = "mistralai/mixtral-8x7b-instruct"; E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_NOROMAID_20B"] = "neversleep/noromaid-20b"; E_OPENROUTER_MODEL["MODEL_ALPINDALE_GOLIATH_120B"] = "alpindale/goliath-120b"; E_OPENROUTER_MODEL["MODEL_OPENROUTER_AUTO"] = "openrouter/auto"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_1106_PREVIEW"] = "openai/gpt-4-1106-preview"; - E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1"] = "mistralai/mistral-7b-instruct-v0.1"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT"] = "openai/gpt-3.5-turbo-instruct"; - E_OPENROUTER_MODEL["MODEL_PYGMALIONAI_MYTHALION_13B"] = "pygmalionai/mythalion-13b"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1"] = "mistralai/mistral-7b-instruct-v0.1"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_16K"] = "openai/gpt-3.5-turbo-16k"; E_OPENROUTER_MODEL["MODEL_MANCER_WEAVER"] = "mancer/weaver"; E_OPENROUTER_MODEL["MODEL_UNDI95_REMM_SLERP_L2_13B"] = "undi95/remm-slerp-l2-13b"; E_OPENROUTER_MODEL["MODEL_GRYPHE_MYTHOMAX_L2_13B"] = "gryphe/mythomax-l2-13b"; - E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_0314"] = "openai/gpt-4-0314"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO"] = "openai/gpt-3.5-turbo"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_0314"] = "openai/gpt-4-0314"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4"] = "openai/gpt-4"; })(E_OPENROUTER_MODEL || (E_OPENROUTER_MODEL = {})); -//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"openrouter-models.js","sourceRoot":"","sources":["../../../src/models/cache/openrouter-models.ts"],"names":[],"mappings":"AAAA,MAAM,CAAN,IAAY,kBAkUX;AAlUD,WAAY,kBAAkB;IAC5B,uHAAiG,CAAA;IACjG,qHAA+F,CAAA;IAC/F,iGAA2E,CAAA;IAC3E,2EAAqD,CAAA;IACrD,mFAA6D,CAAA;IAC7D,qFAA+D,CAAA;IAC/D,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,uFAAiE,CAAA;IACjE,uFAAiE,CAAA;IACjE,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,qFAA+D,CAAA;IAC/D,2DAAqC,CAAA;IACrC,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,mEAA6C,CAAA;IAC7C,yDAAmC,CAAA;IACnC,mEAA6C,CAAA;IAC7C,mEAA6C,CAAA;IAC7C,iFAA2D,CAAA;IAC3D,uEAAiD,CAAA;IACjD,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,mFAA6D,CAAA;IAC7D,iFAA2D,CAAA;IAC3D,mGAA6E,CAAA;IAC7E,iGAA2E,CAAA;IAC3E,yDAAmC,CAAA;IACnC,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,qGAA+E,CAAA;IAC/E,6DAAuC,CAAA;IACvC,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,iFAA2D,CAAA;IAC3D,yFAAmE,CAAA;IACnE,mFAA6D,CAAA;IAC7D,qEAA+C,CAAA;IAC/C,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,qFAA+D,CAAA;IAC/D,mFAA6D,CAAA;IAC7D,iFAA2D,CAAA;IAC3D,2JAAqI,CAAA;IACrI,uDAAiC,CAAA;IACjC,uFAAiE,CAAA;IACjE,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,qGAA+E,CAAA;IAC/E,yEAAmD,CAAA;IACnD,uEAAiD,CAAA;IACjD,yFAAmE,CAAA;IACnE,mFAA6D,CAAA;IAC7D,qFAA+D,CAAA;IAC/D,mEAA6C,CAAA;IAC7C,2HAAqG,CAAA;IACrG,iHAA2F,CAAA;IAC3F,qEAA+C,CAAA;IAC/C,qHAA+F,CAAA;IAC/F,+EAAyD,CAAA;IACzD,2EAAqD,CAAA;IACrD,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,2DAAqC,CAAA;IACrC,iEAA2C,CAAA;IAC3C,uDAAiC,CAAA;IACjC,6FAAuE,CAAA;IACvE,+FAAyE,CAAA;IACzE,iHAA2F,CAAA;IAC3F,2FAAqE,CAAA;IACrE,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,6FAAuE,CAAA;IACvE,mFAA6D,CAAA;IAC7D,+EAAyD,CAAA;IACzD,mFAA6D,CAAA;IAC7D,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,mEAA6C,CAAA;IAC7C,2GAAqF,CAAA;IACrF,2HAAqG,CAAA;IACrG,qFAA+D,CAAA;IAC/D,uGAAiF,CAAA;IACjF,qEAA+C,CAAA;IAC/C,qFAA+D,CAAA;IAC/D,+EAAyD,CAAA;IACzD,yEAAmD,CAAA;IACnD,6FAAuE,CAAA;IACvE,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,uFAAiE,CAAA;IACjE,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,qEAA+C,CAAA;IAC/C,2DAAqC,CAAA;IACrC,uEAAiD,CAAA;IACjD,6DAAuC,CAAA;IACvC,6DAAuC,CAAA;IACvC,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,iFAA2D,CAAA;IAC3D,uEAAiD,CAAA;IACjD,iEAA2C,CAAA;IAC3C,+DAAyC,CAAA;IACzC,uEAAiD,CAAA;IACjD,mDAA6B,CAAA;IAC7B,6DAAuC,CAAA;IACvC,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,6DAAuC,CAAA;IACvC,uEAAiD,CAAA;IACjD,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,mHAA6F,CAAA;IAC7F,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,yGAAmF,CAAA;IACnF,+FAAyE,CAAA;IACzE,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,6GAAuF,CAAA;IACvF,yHAAmG,CAAA;IACnG,+GAAyF,CAAA;IACzF,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,2FAAqE,CAAA;IACrE,iFAA2D,CAAA;IAC3D,+FAAyE,CAAA;IACzE,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,2DAAqC,CAAA;IACrC,2HAAqG,CAAA;IACrG,iHAA2F,CAAA;IAC3F,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,iFAA2D,CAAA;IAC3D,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,2FAAqE,CAAA;IACrE,mFAA6D,CAAA;IAC7D,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,yEAAmD,CAAA;IACnD,6FAAuE,CAAA;IACvE,mEAA6C,CAAA;IAC7C,yDAAmC,CAAA;IACnC,mIAA6G,CAAA;IAC7G,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,yGAAmF,CAAA;IACnF,qEAA+C,CAAA;IAC/C,6EAAuD,CAAA;IACvD,yIAAmH,CAAA;IACnH,+HAAyG,CAAA;IACzG,mIAA6G,CAAA;IAC7G,yHAAmG,CAAA;IACnG,uFAAiE,CAAA;IACjE,uEAAiD,CAAA;IACjD,2GAAqF,CAAA;IACrF,uFAAiE,CAAA;IACjE,mEAA6C,CAAA;IAC7C,qEAA+C,CAAA;IAC/C,+EAAyD,CAAA;IACzD,6FAAuE,CAAA;IACvE,iEAA2C,CAAA;IAC3C,+DAAyC,CAAA;IACzC,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,6DAAuC,CAAA;IACvC,2DAAqC,CAAA;IACrC,6DAAuC,CAAA;IACvC,6HAAuG,CAAA;IACvG,mHAA6F,CAAA;IAC7F,2GAAqF,CAAA;IACrF,qHAA+F,CAAA;IAC/F,2GAAqF,CAAA;IACrF,qFAA+D,CAAA;IAC/D,iEAA2C,CAAA;IAC3C,2DAAqC,CAAA;IACrC,2DAAqC,CAAA;IACrC,uHAAiG,CAAA;IACjG,6GAAuF,CAAA;IACvF,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,qEAA+C,CAAA;IAC/C,iFAA2D,CAAA;IAC3D,+DAAyC,CAAA;IACzC,6EAAuD,CAAA;IACvD,+EAAyD,CAAA;IACzD,mDAA6B,CAAA;IAC7B,+EAAyD,CAAA;IACzD,iEAA2C,CAAA;IAC3C,qFAA+D,CAAA;IAC/D,iGAA2E,CAAA;IAC3E,6GAAuF,CAAA;IACvF,mGAA6E,CAAA;IAC7E,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,qEAA+C,CAAA;IAC/C,yEAAmD,CAAA;IACnD,iFAA2D,CAAA;IAC3D,yFAAmE,CAAA;IACnE,yFAAmE,CAAA;IACnE,yFAAmE,CAAA;IACnE,2EAAqD,CAAA;IACrD,mFAA6D,CAAA;IAC7D,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,+EAAyD,CAAA;IACzD,mFAA6D,CAAA;IAC7D,uGAAiF,CAAA;IACjF,qFAA+D,CAAA;IAC/D,yFAAmE,CAAA;IACnE,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,6EAAuD,CAAA;IACvD,mFAA6D,CAAA;IAC7D,6GAAuF,CAAA;IACvF,yGAAmF,CAAA;IACnF,qFAA+D,CAAA;IAC/D,qFAA+D,CAAA;IAC/D,yFAAmE,CAAA;IACnE,iFAA2D,CAAA;IAC3D,iHAA2F,CAAA;IAC3F,iHAA2F,CAAA;IAC3F,iGAA2E,CAAA;IAC3E,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,+FAAyE,CAAA;IACzE,qFAA+D,CAAA;IAC/D,iGAA2E,CAAA;IAC3E,mFAA6D,CAAA;IAC7D,6DAAuC,CAAA;IACvC,2EAAqD,CAAA;IACrD,iFAA2D,CAAA;IAC3D,2FAAqE,CAAA;IACrE,+EAAyD,CAAA;IACzD,yFAAmE,CAAA;IACnE,yGAAmF,CAAA;IACnF,uGAAiF,CAAA;IACjF,yGAAmF,CAAA;IACnF,iFAA2D,CAAA;IAC3D,yEAAmD,CAAA;IACnD,iFAA2D,CAAA;IAC3D,mFAA6D,CAAA;IAC7D,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,iGAA2E,CAAA;IAC3E,mGAA6E,CAAA;IAC7E,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,2FAAqE,CAAA;IACrE,qEAA+C,CAAA;IAC/C,2EAAqD,CAAA;IACrD,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,yGAAmF,CAAA;IACnF,2EAAqD,CAAA;IACrD,uHAAiG,CAAA;IACjG,yGAAmF,CAAA;IACnF,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,qGAA+E,CAAA;IAC/E,qGAA+E,CAAA;IAC/E,yGAAmF,CAAA;IACnF,+FAAyE,CAAA;IACzE,+EAAyD,CAAA;IACzD,iFAA2D,CAAA;IAC3D,2DAAqC,CAAA;IACrC,6EAAuD,CAAA;IACvD,uFAAiE,CAAA;IACjE,+FAAyE,CAAA;IACzE,6FAAuE,CAAA;IACvE,iGAA2E,CAAA;IAC3E,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,qEAA+C,CAAA;IAC/C,2EAAqD,CAAA;IACrD,2FAAqE,CAAA;IACrE,mGAA6E,CAAA;IAC7E,iEAA2C,CAAA;IAC3C,6DAAuC,CAAA;IACvC,iFAA2D,CAAA;IAC3D,+EAAyD,CAAA;IACzD,iFAA2D,CAAA;IAC3D,+EAAyD,CAAA;IACzD,qFAA+D,CAAA;IAC/D,mFAA6D,CAAA;IAC7D,+EAAyD,CAAA;IACzD,6EAAuD,CAAA;IACvD,+FAAyE,CAAA;IACzE,+EAAyD,CAAA;IACzD,6EAAuD,CAAA;IACvD,+DAAyC,CAAA;IACzC,mFAA6D,CAAA;IAC7D,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,mFAA6D,CAAA;IAC7D,iFAA2D,CAAA;IAC3D,2DAAqC,CAAA;IACrC,iFAA2D,CAAA;IAC3D,6EAAuD,CAAA;IACvD,mEAA6C,CAAA;IAC7C,yEAAmD,CAAA;IACnD,yDAAmC,CAAA;AACrC,CAAC,EAlUW,kBAAkB,KAAlB,kBAAkB,QAkU7B"} \ No newline at end of file +//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"openrouter-models.js","sourceRoot":"","sources":["../../../src/models/cache/openrouter-models.ts"],"names":[],"mappings":"AAAA,MAAM,CAAN,IAAY,kBA4UX;AA5UD,WAAY,kBAAkB;IAC5B,iGAA2E,CAAA;IAC3E,iGAA2E,CAAA;IAC3E,qFAA+D,CAAA;IAC/D,mFAA6D,CAAA;IAC7D,qGAA+E,CAAA;IAC/E,+FAAyE,CAAA;IACzE,qFAA+D,CAAA;IAC/D,yFAAmE,CAAA;IACnE,uFAAiE,CAAA;IACjE,6DAAuC,CAAA;IACvC,+EAAyD,CAAA;IACzD,+FAAyE,CAAA;IACzE,uHAAiG,CAAA;IACjG,qHAA+F,CAAA;IAC/F,iEAA2C,CAAA;IAC3C,iGAA2E,CAAA;IAC3E,2EAAqD,CAAA;IACrD,mFAA6D,CAAA;IAC7D,qFAA+D,CAAA;IAC/D,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,uFAAiE,CAAA;IACjE,uFAAiE,CAAA;IACjE,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,qFAA+D,CAAA;IAC/D,2DAAqC,CAAA;IACrC,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,mEAA6C,CAAA;IAC7C,yDAAmC,CAAA;IACnC,mEAA6C,CAAA;IAC7C,mEAA6C,CAAA;IAC7C,iFAA2D,CAAA;IAC3D,uEAAiD,CAAA;IACjD,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,mFAA6D,CAAA;IAC7D,iFAA2D,CAAA;IAC3D,mGAA6E,CAAA;IAC7E,iGAA2E,CAAA;IAC3E,yDAAmC,CAAA;IACnC,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,qGAA+E,CAAA;IAC/E,6DAAuC,CAAA;IACvC,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,iFAA2D,CAAA;IAC3D,yFAAmE,CAAA;IACnE,mFAA6D,CAAA;IAC7D,qEAA+C,CAAA;IAC/C,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,qFAA+D,CAAA;IAC/D,mFAA6D,CAAA;IAC7D,iFAA2D,CAAA;IAC3D,2JAAqI,CAAA;IACrI,uDAAiC,CAAA;IACjC,uFAAiE,CAAA;IACjE,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,qGAA+E,CAAA;IAC/E,yEAAmD,CAAA;IACnD,uEAAiD,CAAA;IACjD,yFAAmE,CAAA;IACnE,mFAA6D,CAAA;IAC7D,qFAA+D,CAAA;IAC/D,mEAA6C,CAAA;IAC7C,2HAAqG,CAAA;IACrG,iHAA2F,CAAA;IAC3F,qEAA+C,CAAA;IAC/C,qHAA+F,CAAA;IAC/F,+EAAyD,CAAA;IACzD,2EAAqD,CAAA;IACrD,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,2DAAqC,CAAA;IACrC,iEAA2C,CAAA;IAC3C,uDAAiC,CAAA;IACjC,6FAAuE,CAAA;IACvE,+FAAyE,CAAA;IACzE,iHAA2F,CAAA;IAC3F,2FAAqE,CAAA;IACrE,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,6FAAuE,CAAA;IACvE,mFAA6D,CAAA;IAC7D,+EAAyD,CAAA;IACzD,mFAA6D,CAAA;IAC7D,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,mEAA6C,CAAA;IAC7C,2GAAqF,CAAA;IACrF,2HAAqG,CAAA;IACrG,qFAA+D,CAAA;IAC/D,uGAAiF,CAAA;IACjF,qEAA+C,CAAA;IAC/C,qFAA+D,CAAA;IAC/D,+EAAyD,CAAA;IACzD,yEAAmD,CAAA;IACnD,6FAAuE,CAAA;IACvE,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,uFAAiE,CAAA;IACjE,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,qEAA+C,CAAA;IAC/C,2DAAqC,CAAA;IACrC,uEAAiD,CAAA;IACjD,6DAAuC,CAAA;IACvC,6DAAuC,CAAA;IACvC,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,iFAA2D,CAAA;IAC3D,uEAAiD,CAAA;IACjD,iEAA2C,CAAA;IAC3C,+DAAyC,CAAA;IACzC,uEAAiD,CAAA;IACjD,mDAA6B,CAAA;IAC7B,6DAAuC,CAAA;IACvC,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,6DAAuC,CAAA;IACvC,uEAAiD,CAAA;IACjD,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,mHAA6F,CAAA;IAC7F,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,yGAAmF,CAAA;IACnF,+FAAyE,CAAA;IACzE,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,yHAAmG,CAAA;IACnG,+GAAyF,CAAA;IACzF,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,2FAAqE,CAAA;IACrE,iFAA2D,CAAA;IAC3D,qEAA+C,CAAA;IAC/C,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,2DAAqC,CAAA;IACrC,2HAAqG,CAAA;IACrG,iHAA2F,CAAA;IAC3F,iGAA2E,CAAA;IAC3E,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,iFAA2D,CAAA;IAC3D,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,2FAAqE,CAAA;IACrE,mFAA6D,CAAA;IAC7D,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,yEAAmD,CAAA;IACnD,6FAAuE,CAAA;IACvE,mEAA6C,CAAA;IAC7C,yDAAmC,CAAA;IACnC,mIAA6G,CAAA;IAC7G,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,yGAAmF,CAAA;IACnF,qEAA+C,CAAA;IAC/C,6EAAuD,CAAA;IACvD,yIAAmH,CAAA;IACnH,+HAAyG,CAAA;IACzG,mIAA6G,CAAA;IAC7G,yHAAmG,CAAA;IACnG,uFAAiE,CAAA;IACjE,uEAAiD,CAAA;IACjD,2GAAqF,CAAA;IACrF,uFAAiE,CAAA;IACjE,mEAA6C,CAAA;IAC7C,qEAA+C,CAAA;IAC/C,+EAAyD,CAAA;IACzD,6FAAuE,CAAA;IACvE,iEAA2C,CAAA;IAC3C,+DAAyC,CAAA;IACzC,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,6DAAuC,CAAA;IACvC,2DAAqC,CAAA;IACrC,6DAAuC,CAAA;IACvC,6HAAuG,CAAA;IACvG,mHAA6F,CAAA;IAC7F,2GAAqF,CAAA;IACrF,qHAA+F,CAAA;IAC/F,2GAAqF,CAAA;IACrF,qFAA+D,CAAA;IAC/D,iEAA2C,CAAA;IAC3C,2DAAqC,CAAA;IACrC,2DAAqC,CAAA;IACrC,uHAAiG,CAAA;IACjG,6GAAuF,CAAA;IACvF,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,qEAA+C,CAAA;IAC/C,iFAA2D,CAAA;IAC3D,+DAAyC,CAAA;IACzC,6EAAuD,CAAA;IACvD,+EAAyD,CAAA;IACzD,mDAA6B,CAAA;IAC7B,+EAAyD,CAAA;IACzD,iEAA2C,CAAA;IAC3C,qFAA+D,CAAA;IAC/D,iGAA2E,CAAA;IAC3E,6GAAuF,CAAA;IACvF,mGAA6E,CAAA;IAC7E,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,qEAA+C,CAAA;IAC/C,yEAAmD,CAAA;IACnD,iFAA2D,CAAA;IAC3D,yFAAmE,CAAA;IACnE,yFAAmE,CAAA;IACnE,yFAAmE,CAAA;IACnE,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,+EAAyD,CAAA;IACzD,mFAA6D,CAAA;IAC7D,qFAA+D,CAAA;IAC/D,uGAAiF,CAAA;IACjF,yFAAmE,CAAA;IACnE,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,6EAAuD,CAAA;IACvD,mFAA6D,CAAA;IAC7D,6GAAuF,CAAA;IACvF,qFAA+D,CAAA;IAC/D,yGAAmF,CAAA;IACnF,qFAA+D,CAAA;IAC/D,iFAA2D,CAAA;IAC3D,yFAAmE,CAAA;IACnE,iHAA2F,CAAA;IAC3F,iGAA2E,CAAA;IAC3E,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,iHAA2F,CAAA;IAC3F,+FAAyE,CAAA;IACzE,qFAA+D,CAAA;IAC/D,iGAA2E,CAAA;IAC3E,6DAAuC,CAAA;IACvC,mFAA6D,CAAA;IAC7D,2EAAqD,CAAA;IACrD,2FAAqE,CAAA;IACrE,iFAA2D,CAAA;IAC3D,+EAAyD,CAAA;IACzD,yFAAmE,CAAA;IACnE,yGAAmF,CAAA;IACnF,uGAAiF,CAAA;IACjF,yGAAmF,CAAA;IACnF,iFAA2D,CAAA;IAC3D,yEAAmD,CAAA;IACnD,iFAA2D,CAAA;IAC3D,mFAA6D,CAAA;IAC7D,iGAA2E,CAAA;IAC3E,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,mGAA6E,CAAA;IAC7E,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,2FAAqE,CAAA;IACrE,qEAA+C,CAAA;IAC/C,2EAAqD,CAAA;IACrD,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,yGAAmF,CAAA;IACnF,2EAAqD,CAAA;IACrD,uHAAiG,CAAA;IACjG,yGAAmF,CAAA;IACnF,qGAA+E,CAAA;IAC/E,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,qGAA+E,CAAA;IAC/E,yGAAmF,CAAA;IACnF,+FAAyE,CAAA;IACzE,+EAAyD,CAAA;IACzD,iFAA2D,CAAA;IAC3D,2DAAqC,CAAA;IACrC,6EAAuD,CAAA;IACvD,uFAAiE,CAAA;IACjE,+FAAyE,CAAA;IACzE,6FAAuE,CAAA;IACvE,iGAA2E,CAAA;IAC3E,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,qEAA+C,CAAA;IAC/C,2EAAqD,CAAA;IACrD,2FAAqE,CAAA;IACrE,mGAA6E,CAAA;IAC7E,6DAAuC,CAAA;IACvC,iEAA2C,CAAA;IAC3C,iFAA2D,CAAA;IAC3D,+EAAyD,CAAA;IACzD,iFAA2D,CAAA;IAC3D,+EAAyD,CAAA;IACzD,qFAA+D,CAAA;IAC/D,mFAA6D,CAAA;IAC7D,6EAAuD,CAAA;IACvD,+EAAyD,CAAA;IACzD,+FAAyE,CAAA;IACzE,+EAAyD,CAAA;IACzD,6EAAuD,CAAA;IACvD,+DAAyC,CAAA;IACzC,mFAA6D,CAAA;IAC7D,2FAAqE,CAAA;IACrE,qGAA+E,CAAA;IAC/E,iFAA2D,CAAA;IAC3D,2DAAqC,CAAA;IACrC,iFAA2D,CAAA;IAC3D,6EAAuD,CAAA;IACvD,yEAAmD,CAAA;IACnD,mEAA6C,CAAA;IAC7C,yDAAmC,CAAA;AACrC,CAAC,EA5UW,kBAAkB,KAAlB,kBAAkB,QA4U7B"} \ No newline at end of file diff --git a/packages/kbot/dist-in/src/models/cache/openrouter.ts b/packages/kbot/dist-in/src/models/cache/openrouter.ts index ddb7378c..48cb4632 100644 --- a/packages/kbot/dist-in/src/models/cache/openrouter.ts +++ b/packages/kbot/dist-in/src/models/cache/openrouter.ts @@ -1 +1 @@ -export const models = [{"id":"deepcogito/cogito-v2-preview-llama-109b-moe","name":"Cogito V2 Preview Llama 109B","pricing":{"prompt":"0.00000018","completion":"0.00000059","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756831568,"top_provider":{"context_length":32767,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-deepseek-671b","name":"Deep Cogito: Cogito V2 Preview Deepseek 671B","pricing":{"prompt":"0.00000125","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756830949,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-thinking-2507","name":"Qwen: Qwen3 30B A3B Thinking 2507","pricing":{"prompt":"0.0000000713","completion":"0.0000002852","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756399192,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"x-ai/grok-code-fast-1","name":"xAI: Grok Code Fast 1","pricing":{"prompt":"0.0000002","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1756238927,"top_provider":{"context_length":256000,"max_completion_tokens":10000,"is_moderated":false}},{"id":"nousresearch/hermes-4-70b","name":"Nous: Hermes 4 70B","pricing":{"prompt":"0.00000009329544","completion":"0.0000003733632","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756236182,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-4-405b","name":"Nous: Hermes 4 405B","pricing":{"prompt":"0.0000001999188","completion":"0.000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756235463,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-flash-image-preview","name":"Google: Gemini 2.5 Flash Image Preview","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0"},"created":1756218977,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3.1:free","name":"DeepSeek: DeepSeek V3.1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755779628,"top_provider":{"context_length":64000,"max_completion_tokens":null,"is_moderated":true}},{"id":"deepseek/deepseek-chat-v3.1","name":"DeepSeek: DeepSeek V3.1","pricing":{"prompt":"0.0000002","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755779628,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-v3.1-base","name":"DeepSeek: DeepSeek V3.1 Base","pricing":{"prompt":"0.0000002","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755727017,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-audio-preview","name":"OpenAI: GPT-4o Audio","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","audio":"0.00004","web_search":"0","internal_reasoning":"0"},"created":1755233061,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-medium-3.1","name":"Mistral: Mistral Medium 3.1","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755095639,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"baidu/ernie-4.5-21b-a3b","name":"Baidu: ERNIE 4.5 21B A3B","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755034167,"top_provider":{"context_length":120000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-28b-a3b","name":"Baidu: ERNIE 4.5 VL 28B A3B","pricing":{"prompt":"0.00000014","completion":"0.00000056","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755032836,"top_provider":{"context_length":30000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"z-ai/glm-4.5v","name":"Z.AI: GLM 4.5V","pricing":{"prompt":"0.0000005","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754922288,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"ai21/jamba-mini-1.7","name":"AI21: Jamba Mini 1.7","pricing":{"prompt":"0.0000002","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754670601,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"ai21/jamba-large-1.7","name":"AI21: Jamba Large 1.7","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754669020,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-5-chat","name":"OpenAI: GPT-5 Chat","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000125"},"created":1754587837,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-5","name":"OpenAI: GPT-5","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000125"},"created":1754587413,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-mini","name":"OpenAI: GPT-5 Mini","pricing":{"prompt":"0.00000025","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025"},"created":1754587407,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-nano","name":"OpenAI: GPT-5 Nano","pricing":{"prompt":"0.00000005","completion":"0.0000004","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000005"},"created":1754587402,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-oss-120b:free","name":"OpenAI: gpt-oss-120b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414231,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":true}},{"id":"openai/gpt-oss-120b","name":"OpenAI: gpt-oss-120b","pricing":{"prompt":"0.000000072","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414231,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"openai/gpt-oss-20b:free","name":"OpenAI: gpt-oss-20b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414229,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/gpt-oss-20b","name":"OpenAI: gpt-oss-20b","pricing":{"prompt":"0.00000004","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414229,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"anthropic/claude-opus-4.1","name":"Anthropic: Claude Opus 4.1","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1754411591,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"mistralai/codestral-2508","name":"Mistral: Codestral 2508","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754079630,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder-30b-a3b-instruct","name":"Qwen: Qwen3 Coder 30B A3B Instruct","pricing":{"prompt":"0.0000000518308","completion":"0.000000207424","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753972379,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-instruct-2507","name":"Qwen: Qwen3 30B A3B Instruct 2507","pricing":{"prompt":"0.0000000518308","completion":"0.000000207424","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753806965,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4.5","name":"Z.AI: GLM 4.5","pricing":{"prompt":"0.00000032986602","completion":"0.0000013201056","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471347,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4.5-air:free","name":"Z.AI: GLM 4.5 Air (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4.5-air","name":"Z.AI: GLM 4.5 Air","pricing":{"prompt":"0.00000014","completion":"0.00000086","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-thinking-2507","name":"Qwen: Qwen3 235B A22B Thinking 2507","pricing":{"prompt":"0.000000077968332","completion":"0.00000031202496","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753449557,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4-32b","name":"Z.AI: GLM 4 32B ","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753376617,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder:free","name":"Qwen: Qwen3 Coder 480B A35B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder","name":"Qwen: Qwen3 Coder 480B A35B","pricing":{"prompt":"0.0000002","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"bytedance/ui-tars-1.5-7b","name":"ByteDance: UI-TARS 7B ","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753205056,"top_provider":{"context_length":128000,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite","name":"Google: Gemini 2.5 Flash Lite","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1753200276,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-2507","name":"Qwen: Qwen3 235B A22B Instruct 2507","pricing":{"prompt":"0.000000077968332","completion":"0.00000031202496","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753119555,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"switchpoint/router","name":"Switchpoint Router","pricing":{"prompt":"0.00000085","completion":"0.0000034","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752272899,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2:free","name":"MoonshotAI: Kimi K2 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752263252,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2","name":"MoonshotAI: Kimi K2","pricing":{"prompt":"0.00000014","completion":"0.00000249","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752263252,"top_provider":{"context_length":63000,"max_completion_tokens":63000,"is_moderated":false}},{"id":"thudm/glm-4.1v-9b-thinking","name":"THUDM: GLM 4.1V 9B Thinking","pricing":{"prompt":"0.000000035","completion":"0.000000138","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752244385,"top_provider":{"context_length":65536,"max_completion_tokens":8000,"is_moderated":false}},{"id":"mistralai/devstral-medium","name":"Mistral: Devstral Medium","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752161321,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small","name":"Mistral: Devstral Small 1.1","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752160751,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mistral-24b-venice-edition:free","name":"Venice: Uncensored (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752094966,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-4","name":"xAI: Grok 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1752087689,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3n-e2b-it:free","name":"Google: Gemma 3n 2B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752074904,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"tencent/hunyuan-a13b-instruct:free","name":"Tencent: Hunyuan A13B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751987664,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"tencent/hunyuan-a13b-instruct","name":"Tencent: Hunyuan A13B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751987664,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t2-chimera:free","name":"TNG: DeepSeek R1T2 Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751986985,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"morph/morph-v3-large","name":"Morph: Morph V3 Large","pricing":{"prompt":"0.0000009","completion":"0.0000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751910858,"top_provider":{"context_length":81920,"max_completion_tokens":38000,"is_moderated":false}},{"id":"morph/morph-v3-fast","name":"Morph: Morph V3 Fast","pricing":{"prompt":"0.0000009","completion":"0.0000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751910002,"top_provider":{"context_length":81920,"max_completion_tokens":38000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-424b-a47b","name":"Baidu: ERNIE 4.5 VL 424B A47B ","pricing":{"prompt":"0.00000042","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300903,"top_provider":{"context_length":123000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"baidu/ernie-4.5-300b-a47b","name":"Baidu: ERNIE 4.5 300B A47B ","pricing":{"prompt":"0.00000028","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300139,"top_provider":{"context_length":123000,"max_completion_tokens":12000,"is_moderated":false}},{"id":"thedrummer/anubis-70b-v1.1","name":"TheDrummer: Anubis 70B V1.1","pricing":{"prompt":"0.0000004","completion":"0.0000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751208347,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"inception/mercury","name":"Inception: Mercury","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750973026,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct:free","name":"Mistral: Mistral Small 3.2 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct","name":"Mistral: Mistral Small 3.2 24B","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"minimax/minimax-m1","name":"MiniMax: MiniMax M1","pricing":{"prompt":"0.0000003","completion":"0.00000165","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750200414,"top_provider":{"context_length":1000000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite-preview-06-17","name":"Google: Gemini 2.5 Flash Lite Preview 06-17","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1750173831,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-flash","name":"Google: Gemini 2.5 Flash","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075","input_cache_write":"0.0000003833"},"created":1750172488,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-pro","name":"Google: Gemini 2.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1750169544,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b:free","name":"MoonshotAI: Kimi Dev 72B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b","name":"MoonshotAI: Kimi Dev 72B","pricing":{"prompt":"0.00000029","completion":"0.00000115","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/o3-pro","name":"OpenAI: o3 Pro","pricing":{"prompt":"0.00002","completion":"0.00008","request":"0","image":"0.0153","web_search":"0","internal_reasoning":"0"},"created":1749598352,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"x-ai/grok-3-mini","name":"xAI: Grok 3 Mini","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1749583245,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3","name":"xAI: Grok 3","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1749582908,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/magistral-small-2506","name":"Mistral: Magistral Small 2506","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749569561,"top_provider":{"context_length":40000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506","name":"Mistral: Magistral Medium 2506","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506:thinking","name":"Mistral: Magistral Medium 2506 (thinking)","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview","name":"Google: Gemini 2.5 Pro Preview 06-05","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1749137257,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b:free","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B","pricing":{"prompt":"0.00000001703012","completion":"0.0000000681536","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528:free","name":"DeepSeek: R1 0528 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528","name":"DeepSeek: R1 0528","pricing":{"prompt":"0.0000001999188","completion":"0.000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-opus-4","name":"Anthropic: Claude Opus 4","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1747931245,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"anthropic/claude-sonnet-4","name":"Anthropic: Claude Sonnet 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1747930371,"top_provider":{"context_length":1000000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"mistralai/devstral-small-2505:free","name":"Mistral: Devstral Small 2505 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small-2505","name":"Mistral: Devstral Small 2505","pricing":{"prompt":"0.00000001999188","completion":"0.0000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it:free","name":"Google: Gemma 3n 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it","name":"Google: Gemma 3n 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/codex-mini","name":"OpenAI: Codex Mini","pricing":{"prompt":"0.0000015","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000375"},"created":1747409761,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"meta-llama/llama-3.3-8b-instruct:free","name":"Meta: Llama 3.3 8B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747230154,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"nousresearch/deephermes-3-mistral-24b-preview","name":"Nous: DeepHermes 3 Mistral 24B Preview","pricing":{"prompt":"0.00000009329544","completion":"0.0000003733632","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746830904,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-medium-3","name":"Mistral: Mistral Medium 3","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746627341,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview-05-06","name":"Google: Gemini 2.5 Pro Preview 05-06","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1746578513,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"arcee-ai/spotlight","name":"Arcee AI: Spotlight","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481552,"top_provider":{"context_length":131072,"max_completion_tokens":65537,"is_moderated":false}},{"id":"arcee-ai/maestro-reasoning","name":"Arcee AI: Maestro Reasoning","pricing":{"prompt":"0.0000009","completion":"0.0000033","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481269,"top_provider":{"context_length":131072,"max_completion_tokens":32000,"is_moderated":false}},{"id":"arcee-ai/virtuoso-large","name":"Arcee AI: Virtuoso Large","pricing":{"prompt":"0.00000075","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478885,"top_provider":{"context_length":131072,"max_completion_tokens":64000,"is_moderated":false}},{"id":"arcee-ai/coder-large","name":"Arcee AI: Coder Large","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478663,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4-reasoning-plus","name":"Microsoft: Phi 4 Reasoning Plus","pricing":{"prompt":"0.00000007","completion":"0.00000035","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746130961,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"inception/mercury-coder","name":"Inception: Mercury Coder","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746033880,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen3-4b:free","name":"Qwen: Qwen3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746031104,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-prover-v2","name":"DeepSeek: DeepSeek Prover V2","pricing":{"prompt":"0.0000005","completion":"0.00000218","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746013094,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-4-12b","name":"Meta: Llama Guard 4 12B","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745975193,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b:free","name":"Qwen: Qwen3 30B A3B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b","name":"Qwen: Qwen3 30B A3B","pricing":{"prompt":"0.00000001999188","completion":"0.0000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-8b:free","name":"Qwen: Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-8b","name":"Qwen: Qwen3 8B","pricing":{"prompt":"0.000000035","completion":"0.000000138","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":128000,"max_completion_tokens":20000,"is_moderated":false}},{"id":"qwen/qwen3-14b:free","name":"Qwen: Qwen3 14B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-14b","name":"Qwen: Qwen3 14B","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-32b","name":"Qwen: Qwen3 32B","pricing":{"prompt":"0.000000017992692","completion":"0.00000007200576","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875945,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b:free","name":"Qwen: Qwen3 235B A22B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b","name":"Qwen: Qwen3 235B A22B","pricing":{"prompt":"0.00000013","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera:free","name":"TNG: DeepSeek R1T Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera","name":"TNG: DeepSeek R1T Chimera","pricing":{"prompt":"0.0000001999188","completion":"0.000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/mai-ds-r1:free","name":"Microsoft: MAI DS R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745194100,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/mai-ds-r1","name":"Microsoft: MAI DS R1","pricing":{"prompt":"0.0000001999188","completion":"0.000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745194100,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-z1-32b","name":"THUDM: GLM Z1 32B","pricing":{"prompt":"0.00000001999188","completion":"0.0000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744924148,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-4-32b","name":"THUDM: GLM 4 32B","pricing":{"prompt":"0.00000055","completion":"0.00000166","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744920915,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"openai/o4-mini-high","name":"OpenAI: o4 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744824212,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o3","name":"OpenAI: o3","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0.00153","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744823457,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o4-mini","name":"OpenAI: o4 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744820942,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"shisa-ai/shisa-v2-llama3.3-70b:free","name":"Shisa AI: Shisa V2 Llama 3.3 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744754858,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"shisa-ai/shisa-v2-llama3.3-70b","name":"Shisa AI: Shisa V2 Llama 3.3 70B ","pricing":{"prompt":"0.00000001999188","completion":"0.0000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744754858,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4.1","name":"OpenAI: GPT-4.1","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744651385,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-mini","name":"OpenAI: GPT-4.1 Mini","pricing":{"prompt":"0.0000004","completion":"0.0000016","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000001"},"created":1744651381,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-nano","name":"OpenAI: GPT-4.1 Nano","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025"},"created":1744651369,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"eleutherai/llemma_7b","name":"EleutherAI: Llemma 7b","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744643225,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"alfredpros/codellama-7b-instruct-solidity","name":"AlfredPros: CodeLLaMa 7B Instruct Solidity","pricing":{"prompt":"0.0000007","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744641874,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"arliai/qwq-32b-arliai-rpr-v1:free","name":"ArliAI: QwQ 32B RpR v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555982,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"arliai/qwq-32b-arliai-rpr-v1","name":"ArliAI: QwQ 32B RpR v1","pricing":{"prompt":"0.00000001","completion":"0.0000000400032","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555982,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"agentica-org/deepcoder-14b-preview:free","name":"Agentica: Deepcoder 14B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555395,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"agentica-org/deepcoder-14b-preview","name":"Agentica: Deepcoder 14B Preview","pricing":{"prompt":"0.000000015","completion":"0.000000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555395,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-vl-a3b-thinking:free","name":"MoonshotAI: Kimi VL A3B Thinking (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744304841,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-vl-a3b-thinking","name":"MoonshotAI: Kimi VL A3B Thinking","pricing":{"prompt":"0.00000002498985","completion":"0.000000100008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744304841,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-mini-beta","name":"xAI: Grok 3 Mini Beta","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1744240195,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-beta","name":"xAI: Grok 3 Beta","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1744240068,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.3-nemotron-super-49b-v1","name":"NVIDIA: Llama 3.3 Nemotron Super 49B v1","pricing":{"prompt":"0.00000013","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744119494,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1:free","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1","pricing":{"prompt":"0.0000006","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-maverick:free","name":"Meta: Llama 4 Maverick (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"meta-llama/llama-4-maverick","name":"Meta: Llama 4 Maverick","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.0006684","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":1048576,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-4-scout:free","name":"Meta: Llama 4 Scout (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"meta-llama/llama-4-scout","name":"Meta: Llama 4 Scout","pricing":{"prompt":"0.00000008","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":1048576,"max_completion_tokens":1048576,"is_moderated":false}},{"id":"google/gemini-2.5-pro-exp-03-25","name":"Google: Gemini 2.5 Pro Experimental","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742922099,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct:free","name":"Qwen: Qwen2.5 VL 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct","name":"Qwen: Qwen2.5 VL 32B Instruct","pricing":{"prompt":"0.00000001999188","completion":"0.0000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324:free","name":"DeepSeek: DeepSeek V3 0324 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324","name":"DeepSeek: DeepSeek V3 0324","pricing":{"prompt":"0.0000001999188","completion":"0.000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o1-pro","name":"OpenAI: o1-pro","pricing":{"prompt":"0.00015","completion":"0.0006","request":"0","image":"0.21675","web_search":"0","internal_reasoning":"0"},"created":1742423211,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-3.1-24b-instruct:free","name":"Mistral: Mistral Small 3.1 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.1-24b-instruct","name":"Mistral: Mistral Small 3.1 24B","pricing":{"prompt":"0.00000001999188","completion":"0.0000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":131072,"max_completion_tokens":96000,"is_moderated":false}},{"id":"google/gemma-3-4b-it:free","name":"Google: Gemma 3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-4b-it","name":"Google: Gemma 3 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-12b-it:free","name":"Google: Gemma 3 12B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-12b-it","name":"Google: Gemma 3 12B","pricing":{"prompt":"0.0000000481286","completion":"0.000000192608","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"cohere/command-a","name":"Cohere: Command A","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741894342,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-mini-search-preview","name":"OpenAI: GPT-4o-mini Search Preview","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0.0275","image":"0.000217","web_search":"0","internal_reasoning":"0"},"created":1741818122,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-search-preview","name":"OpenAI: GPT-4o Search Preview","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0.035","image":"0.003613","web_search":"0","internal_reasoning":"0"},"created":1741817949,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"rekaai/reka-flash-3:free","name":"Reka: Flash 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741812813,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-27b-it:free","name":"Google: Gemma 3 27B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-27b-it","name":"Google: Gemma 3 27B","pricing":{"prompt":"0.0000000666396","completion":"0.000000266688","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"thedrummer/anubis-pro-105b-v1","name":"TheDrummer: Anubis Pro 105B V1","pricing":{"prompt":"0.0000005","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741642290,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"thedrummer/skyfall-36b-v2","name":"TheDrummer: Skyfall 36B V2","pricing":{"prompt":"0.0000000481286","completion":"0.000000192608","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741636566,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4-multimodal-instruct","name":"Microsoft: Phi 4 Multimodal Instruct","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0.00017685","web_search":"0","internal_reasoning":"0"},"created":1741396284,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-reasoning-pro","name":"Perplexity: Sonar Reasoning Pro","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741313308,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-pro","name":"Perplexity: Sonar Pro","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741312423,"top_provider":{"context_length":200000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"perplexity/sonar-deep-research","name":"Perplexity: Sonar Deep Research","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0.000003"},"created":1741311246,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b:free","name":"Qwen: QwQ 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741208814,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b","name":"Qwen: QwQ 32B","pricing":{"prompt":"0.000000075","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741208814,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/deephermes-3-llama-3-8b-preview:free","name":"Nous: DeepHermes 3 Llama 3 8B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740719372,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.0-flash-lite-001","name":"Google: Gemini 2.0 Flash Lite","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740506212,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet","name":"Anthropic: Claude 3.7 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet:thinking","name":"Anthropic: Claude 3.7 Sonnet (thinking)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"perplexity/r1-1776","name":"Perplexity: R1 1776","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740004929,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-saba","name":"Mistral: Saba","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739803239,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-r1-mistral-24b:free","name":"Dolphin3.0 R1 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462498,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-r1-mistral-24b","name":"Dolphin3.0 R1 Mistral 24B","pricing":{"prompt":"0.00000001","completion":"0.0000000340768","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462498,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-mistral-24b:free","name":"Dolphin3.0 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462019,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-mistral-24b","name":"Dolphin3.0 Mistral 24B","pricing":{"prompt":"0.000000037022","completion":"0.00000014816","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462019,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-3-8b","name":"Llama Guard 3 8B","pricing":{"prompt":"0.00000002","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739401318,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o3-mini-high","name":"OpenAI: o3 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1739372611,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"deepseek/deepseek-r1-distill-llama-8b","name":"DeepSeek: R1 Distill Llama 8B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738937718,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"google/gemini-2.0-flash-001","name":"Google: Gemini 2.0 Flash","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0.0000258","audio":"0.0000007","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1738769413,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-vl-plus","name":"Qwen: Qwen VL Plus","pricing":{"prompt":"0.00000021","completion":"0.00000063","request":"0","image":"0.0002688","web_search":"0","internal_reasoning":"0"},"created":1738731255,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"aion-labs/aion-1.0","name":"AionLabs: Aion-1.0","pricing":{"prompt":"0.000004","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697557,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-1.0-mini","name":"AionLabs: Aion-1.0-Mini","pricing":{"prompt":"0.0000007","completion":"0.0000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697107,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-rp-llama-3.1-8b","name":"AionLabs: Aion-RP 1.0 (8B)","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738696718,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-vl-max","name":"Qwen: Qwen VL Max","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.001024","web_search":"0","internal_reasoning":"0"},"created":1738434304,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"qwen/qwen-turbo","name":"Qwen: Qwen-Turbo","pricing":{"prompt":"0.00000005","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1738410974,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct:free","name":"Qwen: Qwen2.5 VL 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct","name":"Qwen: Qwen2.5 VL 72B Instruct","pricing":{"prompt":"0.0000000999594","completion":"0.000000400032","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-plus","name":"Qwen: Qwen-Plus","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000016"},"created":1738409840,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-max","name":"Qwen: Qwen-Max ","pricing":{"prompt":"0.0000016","completion":"0.0000064","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000064"},"created":1738402289,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/o3-mini","name":"OpenAI: o3 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1738351721,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-24b-instruct-2501:free","name":"Mistral: Mistral Small 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-24b-instruct-2501","name":"Mistral: Mistral Small 3","pricing":{"prompt":"0.00000001999188","completion":"0.0000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-32b","name":"DeepSeek: R1 Distill Qwen 32B","pricing":{"prompt":"0.000000075","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738194830,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b:free","name":"DeepSeek: R1 Distill Qwen 14B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738193940,"top_provider":{"context_length":64000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b","name":"DeepSeek: R1 Distill Qwen 14B","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738193940,"top_provider":{"context_length":64000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"perplexity/sonar-reasoning","name":"Perplexity: Sonar Reasoning","pricing":{"prompt":"0.000001","completion":"0.000005","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738131107,"top_provider":{"context_length":127000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar","name":"Perplexity: Sonar","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738013808,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-7b","name":"Liquid: LFM 7B","pricing":{"prompt":"0.00000001","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806883,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-3b","name":"Liquid: LFM 3B","pricing":{"prompt":"0.00000002","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806501,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b:free","name":"DeepSeek: R1 Distill Llama 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b","name":"DeepSeek: R1 Distill Llama 70B","pricing":{"prompt":"0.0000000259154","completion":"0.000000103712","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1:free","name":"DeepSeek: R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1","name":"DeepSeek: R1","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"minimax/minimax-01","name":"MiniMax: MiniMax-01","pricing":{"prompt":"0.0000002","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736915462,"top_provider":{"context_length":1000192,"max_completion_tokens":1000192,"is_moderated":false}},{"id":"mistralai/codestral-2501","name":"Mistral: Codestral 2501","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736895522,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4","name":"Microsoft: Phi 4","pricing":{"prompt":"0.00000006","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736489872,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat","name":"DeepSeek: DeepSeek V3","pricing":{"prompt":"0.0000001999188","completion":"0.000000800064","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1735241320,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3.3-euryale-70b","name":"Sao10K: Llama 3.3 Euryale 70B","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734535928,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/o1","name":"OpenAI: o1","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0.021675","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000075"},"created":1734459999,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"x-ai/grok-2-vision-1212","name":"xAI: Grok 2 Vision 1212","pricing":{"prompt":"0.000002","completion":"0.00001","request":"0","image":"0.0036","web_search":"0","internal_reasoning":"0"},"created":1734237338,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-2-1212","name":"xAI: Grok 2 1212","pricing":{"prompt":"0.000002","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734232814,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r7b-12-2024","name":"Cohere: Command R7B (12-2024)","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734158152,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"google/gemini-2.0-flash-exp:free","name":"Google: Gemini 2.0 Flash Experimental (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733937523,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct:free","name":"Meta: Llama 3.3 70B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct","name":"Meta: Llama 3.3 70B Instruct","pricing":{"prompt":"0.000000038","completion":"0.00000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"amazon/nova-lite-v1","name":"Amazon: Nova Lite 1.0","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0.00009","web_search":"0","internal_reasoning":"0"},"created":1733437363,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-micro-v1","name":"Amazon: Nova Micro 1.0","pricing":{"prompt":"0.000000035","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733437237,"top_provider":{"context_length":128000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-pro-v1","name":"Amazon: Nova Pro 1.0","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.0012","web_search":"0","internal_reasoning":"0"},"created":1733436303,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"qwen/qwq-32b-preview","name":"Qwen: QwQ 32B Preview","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1732754541,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-11-20","name":"OpenAI: GPT-4o (2024-11-20)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1732127594,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-large-2411","name":"Mistral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978685,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-large-2407","name":"Mistral Large 2407","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978415,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/pixtral-large-2411","name":"Mistral: Pixtral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0.002888","web_search":"0","internal_reasoning":"0"},"created":1731977388,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-vision-beta","name":"xAI: Grok Vision Beta","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.009","web_search":"0","internal_reasoning":"0"},"created":1731976624,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"infermatic/mn-inferor-12b","name":"Infermatic: Mistral Nemo Inferor 12B","pricing":{"prompt":"0.0000006","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731464428,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct:free","name":"Qwen2.5 Coder 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct","name":"Qwen2.5 Coder 32B Instruct","pricing":{"prompt":"0.0000000499797","completion":"0.000000200016","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"raifle/sorcererlm-8x22b","name":"SorcererLM 8x22B","pricing":{"prompt":"0.0000045","completion":"0.0000045","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731105083,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"thedrummer/unslopnemo-12b","name":"TheDrummer: UnslopNemo 12B","pricing":{"prompt":"0.0000004","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731103448,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku-20241022","name":"Anthropic: Claude 3.5 Haiku (2024-10-22)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku","name":"Anthropic: Claude 3.5 Haiku","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"anthracite-org/magnum-v4-72b","name":"Magnum v4 72B","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet","name":"Anthropic: Claude 3.5 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"mistralai/ministral-3b","name":"Mistral: Ministral 3B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-8b","name":"Mistral: Ministral 8B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-7b-instruct","name":"Qwen2.5 7B Instruct","pricing":{"prompt":"0.00000004","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729036800,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-70b-instruct","name":"NVIDIA: Llama 3.1 Nemotron 70B Instruct","pricing":{"prompt":"0.00000012","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728950400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"inflection/inflection-3-productivity","name":"Inflection: Inflection 3 Productivity","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"inflection/inflection-3-pi","name":"Inflection: Inflection 3 Pi","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"google/gemini-flash-1.5-8b","name":"Google: Gemini 1.5 Flash 8B","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000001","input_cache_write":"0.0000000583"},"created":1727913600,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthracite-org/magnum-v2-72b","name":"Magnum v2 72B","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"thedrummer/rocinante-12b","name":"TheDrummer: Rocinante 12B","pricing":{"prompt":"0.00000017","completion":"0.00000043","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct","name":"Meta: Llama 3.2 11B Vision Instruct","pricing":{"prompt":"0.000000049","completion":"0.000000049","request":"0","image":"0.00007948","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-90b-vision-instruct","name":"Meta: Llama 3.2 90B Vision Instruct","pricing":{"prompt":"0.00000035","completion":"0.0000004","request":"0","image":"0.0005058","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-1b-instruct","name":"Meta: Llama 3.2 1B Instruct","pricing":{"prompt":"0.000000005","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct:free","name":"Meta: Llama 3.2 3B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct","name":"Meta: Llama 3.2 3B Instruct","pricing":{"prompt":"0.000000012","completion":"0.000000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct:free","name":"Qwen2.5 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct","name":"Qwen2.5 72B Instruct","pricing":{"prompt":"0.0000000518308","completion":"0.000000207424","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"neversleep/llama-3.1-lumimaid-8b","name":"NeverSleep: Lumimaid v0.2 8B","pricing":{"prompt":"0.00000009","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726358400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o1-mini-2024-09-12","name":"OpenAI: o1-mini (2024-09-12)","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"openai/o1-mini","name":"OpenAI: o1-mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"mistralai/pixtral-12b","name":"Mistral: Pixtral 12B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1725926400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r-08-2024","name":"Cohere: Command R (08-2024)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-plus-08-2024","name":"Cohere: Command R+ (08-2024)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"sao10k/l3.1-euryale-70b","name":"Sao10K: Llama 3.1 Euryale 70B v2.2","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-vl-7b-instruct","name":"Qwen: Qwen2.5-VL 7B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3.5-mini-128k-instruct","name":"Microsoft: Phi-3.5 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724198400,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-70b","name":"Nous: Hermes 3 70B Instruct","pricing":{"prompt":"0.0000001","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723939200,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-405b","name":"Nous: Hermes 3 405B Instruct","pricing":{"prompt":"0.0000007","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723766400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/chatgpt-4o-latest","name":"OpenAI: ChatGPT-4o","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1723593600,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"sao10k/l3-lunaris-8b","name":"Sao10K: Llama 3 8B Lunaris","pricing":{"prompt":"0.00000002","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723507200,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-08-06","name":"OpenAI: GPT-4o (2024-08-06)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1722902400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b","name":"Meta: Llama 3.1 405B (base)","pricing":{"prompt":"0.000002","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722556800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct:free","name":"Meta: Llama 3.1 405B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct","name":"Meta: Llama 3.1 405B Instruct","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-8b-instruct","name":"Meta: Llama 3.1 8B Instruct","pricing":{"prompt":"0.000000015","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-70b-instruct","name":"Meta: Llama 3.1 70B Instruct","pricing":{"prompt":"0.0000001","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-nemo:free","name":"Mistral: Mistral Nemo (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":128000,"is_moderated":false}},{"id":"mistralai/mistral-nemo","name":"Mistral: Mistral Nemo","pricing":{"prompt":"0.00000001","completion":"0.0000000400032","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":128000,"is_moderated":false}},{"id":"openai/gpt-4o-mini-2024-07-18","name":"OpenAI: GPT-4o-mini (2024-07-18)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-mini","name":"OpenAI: GPT-4o-mini","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.000217","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-2-27b-it","name":"Google: Gemma 2 27B","pricing":{"prompt":"0.00000065","completion":"0.00000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1720828800,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-2-9b-it:free","name":"Google: Gemma 2 9B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-2-9b-it","name":"Google: Gemma 2 9B","pricing":{"prompt":"0.00000001","completion":"0.0000000100008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet-20240620","name":"Anthropic: Claude 3.5 Sonnet (2024-06-20)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1718841600,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"sao10k/l3-euryale-70b","name":"Sao10k: Llama 3 Euryale 70B v2.1","pricing":{"prompt":"0.00000148","completion":"0.00000148","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1718668800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mixtral-8x22b","name":"Dolphin 2.9.2 Mixtral 8x22B 🐬","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1717804800,"top_provider":{"context_length":16000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"nousresearch/hermes-2-pro-llama-3-8b","name":"NousResearch: Hermes 2 Pro - Llama-3 8B","pricing":{"prompt":"0.000000025","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct:free","name":"Mistral: Mistral 7B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct","name":"Mistral: Mistral 7B Instruct","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.3","name":"Mistral: Mistral 7B Instruct v0.3","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"microsoft/phi-3-mini-128k-instruct","name":"Microsoft: Phi-3 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716681600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3-medium-128k-instruct","name":"Microsoft: Phi-3 Medium 128K Instruct","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716508800,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"neversleep/llama-3-lumimaid-70b","name":"NeverSleep: Llama 3 Lumimaid 70B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715817600,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemini-flash-1.5","name":"Google: Gemini 1.5 Flash ","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0.00004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000001875","input_cache_write":"0.0000001583"},"created":1715644800,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4o-2024-05-13","name":"OpenAI: GPT-4o (2024-05-13)","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4o","name":"OpenAI: GPT-4o","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o:extended","name":"OpenAI: GPT-4o (extended)","pricing":{"prompt":"0.000006","completion":"0.000018","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"meta-llama/llama-guard-2-8b","name":"Meta: LlamaGuard 2 8B","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3-70b-instruct","name":"Meta: Llama 3 70B Instruct","pricing":{"prompt":"0.0000003","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3-8b-instruct","name":"Meta: Llama 3 8B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mixtral-8x22b-instruct","name":"Mistral: Mixtral 8x22B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713312000,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/wizardlm-2-8x22b","name":"WizardLM-2 8x22B","pricing":{"prompt":"0.00000048","completion":"0.00000048","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713225600,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"google/gemini-pro-1.5","name":"Google: Gemini 1.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.000005","request":"0","image":"0.0006575","web_search":"0","internal_reasoning":"0"},"created":1712620800,"top_provider":{"context_length":2000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4-turbo","name":"OpenAI: GPT-4 Turbo","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0.01445","web_search":"0","internal_reasoning":"0"},"created":1712620800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"cohere/command-r-plus","name":"Cohere: Command R+","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1712188800,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-plus-04-2024","name":"Cohere: Command R+ (04-2024)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1712016000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"sophosympatheia/midnight-rose-70b","name":"Midnight Rose 70B","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1711065600,"top_provider":{"context_length":4096,"max_completion_tokens":2048,"is_moderated":false}},{"id":"cohere/command-r","name":"Cohere: Command R","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1710374400,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command","name":"Cohere: Command","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1710374400,"top_provider":{"context_length":4096,"max_completion_tokens":4000,"is_moderated":true}},{"id":"anthropic/claude-3-haiku","name":"Anthropic: Claude 3 Haiku","pricing":{"prompt":"0.00000025","completion":"0.00000125","request":"0","image":"0.0004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000003","input_cache_write":"0.0000003"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-opus","name":"Anthropic: Claude 3 Opus","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"cohere/command-r-03-2024","name":"Cohere: Command R (03-2024)","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1709341200,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"mistralai/mistral-large","name":"Mistral Large","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1708905600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-turbo-preview","name":"OpenAI: GPT-4 Turbo Preview","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo-0613","name":"OpenAI: GPT-3.5 Turbo (older v0613)","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":false}},{"id":"mistralai/mistral-small","name":"Mistral Small","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-tiny","name":"Mistral Tiny","pricing":{"prompt":"0.00000025","completion":"0.00000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mixtral-8x7b-instruct","name":"Mistral: Mixtral 8x7B Instruct","pricing":{"prompt":"0.00000008","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1702166400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/noromaid-20b","name":"Noromaid 20B","pricing":{"prompt":"0.000001","completion":"0.00000175","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700956800,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"alpindale/goliath-120b","name":"Goliath 120B","pricing":{"prompt":"0.000004","completion":"0.0000055","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699574400,"top_provider":{"context_length":6144,"max_completion_tokens":512,"is_moderated":false}},{"id":"openrouter/auto","name":"Auto Router","pricing":{"prompt":"-1","completion":"-1"},"created":1699401600,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-1106-preview","name":"OpenAI: GPT-4 Turbo (older v1106)","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699228800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-7b-instruct-v0.1","name":"Mistral: Mistral 7B Instruct v0.1","pricing":{"prompt":"0.00000011","completion":"0.00000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":2824,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-instruct","name":"OpenAI: GPT-3.5 Turbo Instruct","pricing":{"prompt":"0.0000015","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":true}},{"id":"pygmalionai/mythalion-13b","name":"Pygmalion: Mythalion 13B","pricing":{"prompt":"0.0000007","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1693612800,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-16k","name":"OpenAI: GPT-3.5 Turbo 16k","pricing":{"prompt":"0.000003","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1693180800,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mancer/weaver","name":"Mancer: Weaver (alpha)","pricing":{"prompt":"0.000001125","completion":"0.000001125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1690934400,"top_provider":{"context_length":8000,"max_completion_tokens":2000,"is_moderated":false}},{"id":"undi95/remm-slerp-l2-13b","name":"ReMM SLERP 13B","pricing":{"prompt":"0.00000045","completion":"0.00000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1689984000,"top_provider":{"context_length":6144,"max_completion_tokens":null,"is_moderated":false}},{"id":"gryphe/mythomax-l2-13b","name":"MythoMax 13B","pricing":{"prompt":"0.00000006","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1688256000,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-0314","name":"OpenAI: GPT-4 (older v0314)","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo","name":"OpenAI: GPT-3.5 Turbo","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4","name":"OpenAI: GPT-4","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}}] \ No newline at end of file +export const models = [{"id":"qwen/qwen3-next-80b-a3b-thinking","name":"Qwen: Qwen3 Next 80B A3B Thinking","pricing":{"prompt":"0.00000009782604","completion":"0.000000391304304","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757612284,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-next-80b-a3b-instruct","name":"Qwen: Qwen3 Next 80B A3B Instruct","pricing":{"prompt":"0.00000009782604","completion":"0.000000391304304","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757612213,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"meituan/longcat-flash-chat","name":"Meituan: LongCat Flash Chat","pricing":{"prompt":"0.00000015","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757427658,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen-plus-2025-07-28","name":"Qwen: Qwen Plus 0728","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757347599,"top_provider":{"context_length":1000000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-plus-2025-07-28:thinking","name":"Qwen: Qwen Plus 0728 (thinking)","pricing":{"prompt":"0.0000004","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757347599,"top_provider":{"context_length":1000000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"nvidia/nemotron-nano-9b-v2:free","name":"NVIDIA: Nemotron Nano 9B V2 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757106807,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/nemotron-nano-9b-v2","name":"NVIDIA: Nemotron Nano 9B V2","pricing":{"prompt":"0.00000004","completion":"0.00000016","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757106807,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openrouter/sonoma-dusk-alpha","name":"Sonoma Dusk Alpha","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757093247,"top_provider":{"context_length":2000000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openrouter/sonoma-sky-alpha","name":"Sonoma Sky Alpha","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757093001,"top_provider":{"context_length":2000000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-max","name":"Qwen: Qwen3 Max","pricing":{"prompt":"0.0000012","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000024"},"created":1757076567,"top_provider":{"context_length":256000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"moonshotai/kimi-k2-0905","name":"MoonshotAI: Kimi K2 0905","pricing":{"prompt":"0.0000003804346","completion":"0.00000152173896","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1757021147,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"bytedance/seed-oss-36b-instruct","name":"ByteDance: Seed OSS 36B Instruct","pricing":{"prompt":"0.0000002006688","completion":"0.00000080267549538462","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756834704,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-llama-109b-moe","name":"Cogito V2 Preview Llama 109B","pricing":{"prompt":"0.00000018","completion":"0.00000059","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756831568,"top_provider":{"context_length":32767,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepcogito/cogito-v2-preview-deepseek-671b","name":"Deep Cogito: Cogito V2 Preview Deepseek 671B","pricing":{"prompt":"0.00000125","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756830949,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"stepfun-ai/step3","name":"StepFun: Step3","pricing":{"prompt":"0.00000057","completion":"0.00000142","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756415375,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-thinking-2507","name":"Qwen: Qwen3 30B A3B Thinking 2507","pricing":{"prompt":"0.00000008967387","completion":"0.000000358695612","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756399192,"top_provider":{"context_length":262144,"max_completion_tokens":262144,"is_moderated":false}},{"id":"x-ai/grok-code-fast-1","name":"xAI: Grok Code Fast 1","pricing":{"prompt":"0.0000002","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1756238927,"top_provider":{"context_length":256000,"max_completion_tokens":10000,"is_moderated":false}},{"id":"nousresearch/hermes-4-70b","name":"Nous: Hermes 4 70B","pricing":{"prompt":"0.000000127173852","completion":"0.0000005086955952","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756236182,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-4-405b","name":"Nous: Hermes 4 405B","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1756235463,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-flash-image-preview","name":"Google: Gemini 2.5 Flash Image Preview","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0"},"created":1756218977,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3.1:free","name":"DeepSeek: DeepSeek V3.1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755779628,"top_provider":{"context_length":64000,"max_completion_tokens":null,"is_moderated":true}},{"id":"deepseek/deepseek-chat-v3.1","name":"DeepSeek: DeepSeek V3.1","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755779628,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-v3.1-base","name":"DeepSeek: DeepSeek V3.1 Base","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755727017,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-audio-preview","name":"OpenAI: GPT-4o Audio","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","audio":"0.00004","web_search":"0","internal_reasoning":"0"},"created":1755233061,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-medium-3.1","name":"Mistral: Mistral Medium 3.1","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755095639,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"baidu/ernie-4.5-21b-a3b","name":"Baidu: ERNIE 4.5 21B A3B","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755034167,"top_provider":{"context_length":120000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-28b-a3b","name":"Baidu: ERNIE 4.5 VL 28B A3B","pricing":{"prompt":"0.00000014","completion":"0.00000056","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1755032836,"top_provider":{"context_length":30000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"z-ai/glm-4.5v","name":"Z.AI: GLM 4.5V","pricing":{"prompt":"0.0000005","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754922288,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"ai21/jamba-mini-1.7","name":"AI21: Jamba Mini 1.7","pricing":{"prompt":"0.0000002","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754670601,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"ai21/jamba-large-1.7","name":"AI21: Jamba Large 1.7","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754669020,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-5-chat","name":"OpenAI: GPT-5 Chat","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000125"},"created":1754587837,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-5","name":"OpenAI: GPT-5","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000125"},"created":1754587413,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-mini","name":"OpenAI: GPT-5 Mini","pricing":{"prompt":"0.00000025","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025"},"created":1754587407,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-5-nano","name":"OpenAI: GPT-5 Nano","pricing":{"prompt":"0.00000005","completion":"0.0000004","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.000000005"},"created":1754587402,"top_provider":{"context_length":400000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"openai/gpt-oss-120b:free","name":"OpenAI: gpt-oss-120b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414231,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":true}},{"id":"openai/gpt-oss-120b","name":"OpenAI: gpt-oss-120b","pricing":{"prompt":"0.000000072","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414231,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"openai/gpt-oss-20b:free","name":"OpenAI: gpt-oss-20b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414229,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/gpt-oss-20b","name":"OpenAI: gpt-oss-20b","pricing":{"prompt":"0.00000004","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754414229,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"anthropic/claude-opus-4.1","name":"Anthropic: Claude Opus 4.1","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1754411591,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"mistralai/codestral-2508","name":"Mistral: Codestral 2508","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1754079630,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder-30b-a3b-instruct","name":"Qwen: Qwen3 Coder 30B A3B Instruct","pricing":{"prompt":"0.00000007065214","completion":"0.000000282608664","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753972379,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b-instruct-2507","name":"Qwen: Qwen3 30B A3B Instruct 2507","pricing":{"prompt":"0.00000007065214","completion":"0.000000282608664","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753806965,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4.5","name":"Z.AI: GLM 4.5","pricing":{"prompt":"0.000000412499802","completion":"0.0000016499998152","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471347,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4.5-air:free","name":"Z.AI: GLM 4.5 Air (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4.5-air","name":"Z.AI: GLM 4.5 Air","pricing":{"prompt":"0.00000014","completion":"0.00000086","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753471258,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-thinking-2507","name":"Qwen: Qwen3 235B A22B Thinking 2507","pricing":{"prompt":"0.0000000974999532","completion":"0.00000038999995632","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753449557,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"z-ai/glm-4-32b","name":"Z.AI: GLM 4 32B ","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753376617,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder:free","name":"Qwen: Qwen3 Coder 480B A35B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-coder","name":"Qwen: Qwen3 Coder 480B A35B","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753230546,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"bytedance/ui-tars-1.5-7b","name":"ByteDance: UI-TARS 7B ","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753205056,"top_provider":{"context_length":128000,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite","name":"Google: Gemini 2.5 Flash Lite","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1753200276,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b-2507","name":"Qwen: Qwen3 235B A22B Instruct 2507","pricing":{"prompt":"0.0000000974999532","completion":"0.00000038999995632","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1753119555,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"switchpoint/router","name":"Switchpoint Router","pricing":{"prompt":"0.00000085","completion":"0.0000034","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752272899,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2:free","name":"MoonshotAI: Kimi K2 0711 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752263252,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-k2","name":"MoonshotAI: Kimi K2 0711","pricing":{"prompt":"0.00000014","completion":"0.00000249","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752263252,"top_provider":{"context_length":63000,"max_completion_tokens":63000,"is_moderated":false}},{"id":"thudm/glm-4.1v-9b-thinking","name":"THUDM: GLM 4.1V 9B Thinking","pricing":{"prompt":"0.000000035","completion":"0.000000138","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752244385,"top_provider":{"context_length":65536,"max_completion_tokens":8000,"is_moderated":false}},{"id":"mistralai/devstral-medium","name":"Mistral: Devstral Medium","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752161321,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small","name":"Mistral: Devstral Small 1.1","pricing":{"prompt":"0.00000007","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752160751,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mistral-24b-venice-edition:free","name":"Venice: Uncensored (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752094966,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-4","name":"xAI: Grok 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1752087689,"top_provider":{"context_length":256000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3n-e2b-it:free","name":"Google: Gemma 3n 2B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1752074904,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"tencent/hunyuan-a13b-instruct:free","name":"Tencent: Hunyuan A13B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751987664,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"tencent/hunyuan-a13b-instruct","name":"Tencent: Hunyuan A13B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751987664,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t2-chimera:free","name":"TNG: DeepSeek R1T2 Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751986985,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"morph/morph-v3-large","name":"Morph: Morph V3 Large","pricing":{"prompt":"0.0000009","completion":"0.0000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751910858,"top_provider":{"context_length":81920,"max_completion_tokens":38000,"is_moderated":false}},{"id":"morph/morph-v3-fast","name":"Morph: Morph V3 Fast","pricing":{"prompt":"0.0000009","completion":"0.0000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751910002,"top_provider":{"context_length":81920,"max_completion_tokens":38000,"is_moderated":false}},{"id":"baidu/ernie-4.5-vl-424b-a47b","name":"Baidu: ERNIE 4.5 VL 424B A47B ","pricing":{"prompt":"0.00000042","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300903,"top_provider":{"context_length":123000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"baidu/ernie-4.5-300b-a47b","name":"Baidu: ERNIE 4.5 300B A47B ","pricing":{"prompt":"0.00000028","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300139,"top_provider":{"context_length":123000,"max_completion_tokens":12000,"is_moderated":false}},{"id":"thedrummer/anubis-70b-v1.1","name":"TheDrummer: Anubis 70B V1.1","pricing":{"prompt":"0.0000004","completion":"0.0000007","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751208347,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"inception/mercury","name":"Inception: Mercury","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750973026,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct:free","name":"Mistral: Mistral Small 3.2 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct","name":"Mistral: Mistral Small 3.2 24B","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"minimax/minimax-m1","name":"MiniMax: MiniMax M1","pricing":{"prompt":"0.0000003","completion":"0.00000165","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750200414,"top_provider":{"context_length":1000000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite-preview-06-17","name":"Google: Gemini 2.5 Flash Lite Preview 06-17","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1750173831,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-flash","name":"Google: Gemini 2.5 Flash","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075","input_cache_write":"0.0000003833"},"created":1750172488,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-pro","name":"Google: Gemini 2.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1750169544,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b:free","name":"MoonshotAI: Kimi Dev 72B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b","name":"MoonshotAI: Kimi Dev 72B","pricing":{"prompt":"0.00000029","completion":"0.00000115","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/o3-pro","name":"OpenAI: o3 Pro","pricing":{"prompt":"0.00002","completion":"0.00008","request":"0","image":"0.0153","web_search":"0","internal_reasoning":"0"},"created":1749598352,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"x-ai/grok-3-mini","name":"xAI: Grok 3 Mini","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1749583245,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3","name":"xAI: Grok 3","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1749582908,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/magistral-small-2506","name":"Mistral: Magistral Small 2506","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749569561,"top_provider":{"context_length":40000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506","name":"Mistral: Magistral Medium 2506","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506:thinking","name":"Mistral: Magistral Medium 2506 (thinking)","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview","name":"Google: Gemini 2.5 Pro Preview 06-05","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1749137257,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b:free","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B","pricing":{"prompt":"0.000000013043472","completion":"0.0000000521739072","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528:free","name":"DeepSeek: R1 0528 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528","name":"DeepSeek: R1 0528","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-opus-4","name":"Anthropic: Claude Opus 4","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1747931245,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"anthropic/claude-sonnet-4","name":"Anthropic: Claude Sonnet 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1747930371,"top_provider":{"context_length":1000000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"mistralai/devstral-small-2505:free","name":"Mistral: Devstral Small 2505 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small-2505","name":"Mistral: Devstral Small 2505","pricing":{"prompt":"0.000000035869548","completion":"0.0000001434782448","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it:free","name":"Google: Gemma 3n 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it","name":"Google: Gemma 3n 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/codex-mini","name":"OpenAI: Codex Mini","pricing":{"prompt":"0.0000015","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000375"},"created":1747409761,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"meta-llama/llama-3.3-8b-instruct:free","name":"Meta: Llama 3.3 8B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747230154,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"nousresearch/deephermes-3-mistral-24b-preview","name":"Nous: DeepHermes 3 Mistral 24B Preview","pricing":{"prompt":"0.000000127173852","completion":"0.0000005086955952","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746830904,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-medium-3","name":"Mistral: Mistral Medium 3","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746627341,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview-05-06","name":"Google: Gemini 2.5 Pro Preview 05-06","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1746578513,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"arcee-ai/spotlight","name":"Arcee AI: Spotlight","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481552,"top_provider":{"context_length":131072,"max_completion_tokens":65537,"is_moderated":false}},{"id":"arcee-ai/maestro-reasoning","name":"Arcee AI: Maestro Reasoning","pricing":{"prompt":"0.0000009","completion":"0.0000033","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481269,"top_provider":{"context_length":131072,"max_completion_tokens":32000,"is_moderated":false}},{"id":"arcee-ai/virtuoso-large","name":"Arcee AI: Virtuoso Large","pricing":{"prompt":"0.00000075","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478885,"top_provider":{"context_length":131072,"max_completion_tokens":64000,"is_moderated":false}},{"id":"arcee-ai/coder-large","name":"Arcee AI: Coder Large","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478663,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4-reasoning-plus","name":"Microsoft: Phi 4 Reasoning Plus","pricing":{"prompt":"0.00000007","completion":"0.00000035","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746130961,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"inception/mercury-coder","name":"Inception: Mercury Coder","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746033880,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen3-4b:free","name":"Qwen: Qwen3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746031104,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-prover-v2","name":"DeepSeek: DeepSeek Prover V2","pricing":{"prompt":"0.0000005","completion":"0.00000218","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746013094,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-4-12b","name":"Meta: Llama Guard 4 12B","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745975193,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b:free","name":"Qwen: Qwen3 30B A3B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b","name":"Qwen: Qwen3 30B A3B","pricing":{"prompt":"0.000000035869548","completion":"0.0000001434782448","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-8b:free","name":"Qwen: Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-8b","name":"Qwen: Qwen3 8B","pricing":{"prompt":"0.000000035","completion":"0.000000138","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":128000,"max_completion_tokens":20000,"is_moderated":false}},{"id":"qwen/qwen3-14b:free","name":"Qwen: Qwen3 14B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-14b","name":"Qwen: Qwen3 14B","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-32b","name":"Qwen: Qwen3 32B","pricing":{"prompt":"0.0000000322825932","completion":"0.00000012913042032","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875945,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b:free","name":"Qwen: Qwen3 235B A22B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b","name":"Qwen: Qwen3 235B A22B","pricing":{"prompt":"0.00000013","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera:free","name":"TNG: DeepSeek R1T Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera","name":"TNG: DeepSeek R1T Chimera","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/mai-ds-r1:free","name":"Microsoft: MAI DS R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745194100,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/mai-ds-r1","name":"Microsoft: MAI DS R1","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745194100,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-z1-32b","name":"THUDM: GLM Z1 32B","pricing":{"prompt":"0.000000035869548","completion":"0.0000001434782448","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744924148,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-4-32b","name":"THUDM: GLM 4 32B","pricing":{"prompt":"0.00000055","completion":"0.00000166","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744920915,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"openai/o4-mini-high","name":"OpenAI: o4 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744824212,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o3","name":"OpenAI: o3","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0.00153","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744823457,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o4-mini","name":"OpenAI: o4 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744820942,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"shisa-ai/shisa-v2-llama3.3-70b:free","name":"Shisa AI: Shisa V2 Llama 3.3 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744754858,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"shisa-ai/shisa-v2-llama3.3-70b","name":"Shisa AI: Shisa V2 Llama 3.3 70B ","pricing":{"prompt":"0.000000035869548","completion":"0.0000001434782448","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744754858,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4.1","name":"OpenAI: GPT-4.1","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744651385,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-mini","name":"OpenAI: GPT-4.1 Mini","pricing":{"prompt":"0.0000004","completion":"0.0000016","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000001"},"created":1744651381,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-nano","name":"OpenAI: GPT-4.1 Nano","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025"},"created":1744651369,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"eleutherai/llemma_7b","name":"EleutherAI: Llemma 7b","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744643225,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"alfredpros/codellama-7b-instruct-solidity","name":"AlfredPros: CodeLLaMa 7B Instruct Solidity","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744641874,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"arliai/qwq-32b-arliai-rpr-v1:free","name":"ArliAI: QwQ 32B RpR v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555982,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"arliai/qwq-32b-arliai-rpr-v1","name":"ArliAI: QwQ 32B RpR v1","pricing":{"prompt":"0.000000017934774","completion":"0.0000000717391224","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555982,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"agentica-org/deepcoder-14b-preview:free","name":"Agentica: Deepcoder 14B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555395,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"agentica-org/deepcoder-14b-preview","name":"Agentica: Deepcoder 14B Preview","pricing":{"prompt":"0.000000015","completion":"0.000000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555395,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-vl-a3b-thinking:free","name":"MoonshotAI: Kimi VL A3B Thinking (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744304841,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-vl-a3b-thinking","name":"MoonshotAI: Kimi VL A3B Thinking","pricing":{"prompt":"0.00000006249997","completion":"0.000000249999972","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744304841,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-mini-beta","name":"xAI: Grok 3 Mini Beta","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1744240195,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-beta","name":"xAI: Grok 3 Beta","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1744240068,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1:free","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1","pricing":{"prompt":"0.0000006","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-maverick:free","name":"Meta: Llama 4 Maverick (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"meta-llama/llama-4-maverick","name":"Meta: Llama 4 Maverick","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.0006684","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":1048576,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-4-scout:free","name":"Meta: Llama 4 Scout (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":128000,"max_completion_tokens":4028,"is_moderated":true}},{"id":"meta-llama/llama-4-scout","name":"Meta: Llama 4 Scout","pricing":{"prompt":"0.00000008","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":1048576,"max_completion_tokens":1048576,"is_moderated":false}},{"id":"allenai/molmo-7b-d","name":"AllenAI: Molmo 7B D","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743023247,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct:free","name":"Qwen: Qwen2.5 VL 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct","name":"Qwen: Qwen2.5 VL 32B Instruct","pricing":{"prompt":"0.000000035869548","completion":"0.0000001434782448","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324:free","name":"DeepSeek: DeepSeek V3 0324 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324","name":"DeepSeek: DeepSeek V3 0324","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o1-pro","name":"OpenAI: o1-pro","pricing":{"prompt":"0.00015","completion":"0.0006","request":"0","image":"0.21675","web_search":"0","internal_reasoning":"0"},"created":1742423211,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-3.1-24b-instruct:free","name":"Mistral: Mistral Small 3.1 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.1-24b-instruct","name":"Mistral: Mistral Small 3.1 24B","pricing":{"prompt":"0.00000003804346","completion":"0.000000152173896","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":131072,"max_completion_tokens":96000,"is_moderated":false}},{"id":"allenai/olmo-2-0325-32b-instruct","name":"AllenAI: Olmo 2 32B Instruct","pricing":{"prompt":"0.000001","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741988556,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemma-3-4b-it:free","name":"Google: Gemma 3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-4b-it","name":"Google: Gemma 3 4B","pricing":{"prompt":"0.00000004","completion":"0.00000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-12b-it:free","name":"Google: Gemma 3 12B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-12b-it","name":"Google: Gemma 3 12B","pricing":{"prompt":"0.00000003532607","completion":"0.000000141304332","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"cohere/command-a","name":"Cohere: Command A","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741894342,"top_provider":{"context_length":256000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"openai/gpt-4o-mini-search-preview","name":"OpenAI: GPT-4o-mini Search Preview","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0.0275","image":"0.000217","web_search":"0","internal_reasoning":"0"},"created":1741818122,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-search-preview","name":"OpenAI: GPT-4o Search Preview","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0.035","image":"0.003613","web_search":"0","internal_reasoning":"0"},"created":1741817949,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"rekaai/reka-flash-3:free","name":"Reka: Flash 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741812813,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-27b-it:free","name":"Google: Gemma 3 27B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-27b-it","name":"Google: Gemma 3 27B","pricing":{"prompt":"0.00000006521736","completion":"0.000000260869536","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"thedrummer/anubis-pro-105b-v1","name":"TheDrummer: Anubis Pro 105B V1","pricing":{"prompt":"0.0000005","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741642290,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"thedrummer/skyfall-36b-v2","name":"TheDrummer: Skyfall 36B V2","pricing":{"prompt":"0.000000039130416","completion":"0.0000001565217216","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741636566,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4-multimodal-instruct","name":"Microsoft: Phi 4 Multimodal Instruct","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0.00017685","web_search":"0","internal_reasoning":"0"},"created":1741396284,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-reasoning-pro","name":"Perplexity: Sonar Reasoning Pro","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741313308,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-pro","name":"Perplexity: Sonar Pro","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741312423,"top_provider":{"context_length":200000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"perplexity/sonar-deep-research","name":"Perplexity: Sonar Deep Research","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0.000003"},"created":1741311246,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b:free","name":"Qwen: QwQ 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741208814,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b","name":"Qwen: QwQ 32B","pricing":{"prompt":"0.00000015","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741208814,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/deephermes-3-llama-3-8b-preview:free","name":"Nous: DeepHermes 3 Llama 3 8B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740719372,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.0-flash-lite-001","name":"Google: Gemini 2.0 Flash Lite","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740506212,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet","name":"Anthropic: Claude 3.7 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet:thinking","name":"Anthropic: Claude 3.7 Sonnet (thinking)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"perplexity/r1-1776","name":"Perplexity: R1 1776","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740004929,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-saba","name":"Mistral: Saba","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739803239,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-r1-mistral-24b:free","name":"Dolphin3.0 R1 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462498,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-r1-mistral-24b","name":"Dolphin3.0 R1 Mistral 24B","pricing":{"prompt":"0.00000001333333333333","completion":"0.0000000347826048","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462498,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-mistral-24b:free","name":"Dolphin3.0 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462019,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-mistral-24b","name":"Dolphin3.0 Mistral 24B","pricing":{"prompt":"0.0000000271739","completion":"0.00000010869564","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462019,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-3-8b","name":"Llama Guard 3 8B","pricing":{"prompt":"0.00000002","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739401318,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o3-mini-high","name":"OpenAI: o3 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1739372611,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"deepseek/deepseek-r1-distill-llama-8b","name":"DeepSeek: R1 Distill Llama 8B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738937718,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"google/gemini-2.0-flash-001","name":"Google: Gemini 2.0 Flash","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0.0000258","audio":"0.0000007","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1738769413,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-vl-plus","name":"Qwen: Qwen VL Plus","pricing":{"prompt":"0.00000021","completion":"0.00000063","request":"0","image":"0.0002688","web_search":"0","internal_reasoning":"0"},"created":1738731255,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"aion-labs/aion-1.0","name":"AionLabs: Aion-1.0","pricing":{"prompt":"0.000004","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697557,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-1.0-mini","name":"AionLabs: Aion-1.0-Mini","pricing":{"prompt":"0.0000007","completion":"0.0000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697107,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-rp-llama-3.1-8b","name":"AionLabs: Aion-RP 1.0 (8B)","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738696718,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-vl-max","name":"Qwen: Qwen VL Max","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.001024","web_search":"0","internal_reasoning":"0"},"created":1738434304,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"qwen/qwen-turbo","name":"Qwen: Qwen-Turbo","pricing":{"prompt":"0.00000005","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1738410974,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct:free","name":"Qwen: Qwen2.5 VL 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct","name":"Qwen: Qwen2.5 VL 72B Instruct","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-plus","name":"Qwen: Qwen-Plus","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000016"},"created":1738409840,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-max","name":"Qwen: Qwen-Max ","pricing":{"prompt":"0.0000016","completion":"0.0000064","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000064"},"created":1738402289,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/o3-mini","name":"OpenAI: o3 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1738351721,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-24b-instruct-2501:free","name":"Mistral: Mistral Small 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-24b-instruct-2501","name":"Mistral: Mistral Small 3","pricing":{"prompt":"0.00000003804346","completion":"0.000000152173896","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-32b","name":"DeepSeek: R1 Distill Qwen 32B","pricing":{"prompt":"0.00000027","completion":"0.00000027","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738194830,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b:free","name":"DeepSeek: R1 Distill Qwen 14B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738193940,"top_provider":{"context_length":64000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b","name":"DeepSeek: R1 Distill Qwen 14B","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738193940,"top_provider":{"context_length":64000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"perplexity/sonar-reasoning","name":"Perplexity: Sonar Reasoning","pricing":{"prompt":"0.000001","completion":"0.000005","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738131107,"top_provider":{"context_length":127000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar","name":"Perplexity: Sonar","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738013808,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-7b","name":"Liquid: LFM 7B","pricing":{"prompt":"0.00000001","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806883,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-3b","name":"Liquid: LFM 3B","pricing":{"prompt":"0.00000002","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806501,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b:free","name":"DeepSeek: R1 Distill Llama 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b","name":"DeepSeek: R1 Distill Llama 70B","pricing":{"prompt":"0.00000003260868","completion":"0.000000130434768","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1:free","name":"DeepSeek: R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1","name":"DeepSeek: R1","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"minimax/minimax-01","name":"MiniMax: MiniMax-01","pricing":{"prompt":"0.0000002","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736915462,"top_provider":{"context_length":1000192,"max_completion_tokens":1000192,"is_moderated":false}},{"id":"mistralai/codestral-2501","name":"Mistral: Codestral 2501","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736895522,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4","name":"Microsoft: Phi 4","pricing":{"prompt":"0.00000006","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736489872,"top_provider":{"context_length":16384,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat","name":"DeepSeek: DeepSeek V3","pricing":{"prompt":"0.00000024999988","completion":"0.000000999999888","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1735241320,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3.3-euryale-70b","name":"Sao10K: Llama 3.3 Euryale 70B","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734535928,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/o1","name":"OpenAI: o1","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0.021675","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000075"},"created":1734459999,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"x-ai/grok-2-vision-1212","name":"xAI: Grok 2 Vision 1212","pricing":{"prompt":"0.000002","completion":"0.00001","request":"0","image":"0.0036","web_search":"0","internal_reasoning":"0"},"created":1734237338,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-2-1212","name":"xAI: Grok 2 1212","pricing":{"prompt":"0.000002","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734232814,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r7b-12-2024","name":"Cohere: Command R7B (12-2024)","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734158152,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"google/gemini-2.0-flash-exp:free","name":"Google: Gemini 2.0 Flash Experimental (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733937523,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct:free","name":"Meta: Llama 3.3 70B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct","name":"Meta: Llama 3.3 70B Instruct","pricing":{"prompt":"0.000000038","completion":"0.00000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"amazon/nova-lite-v1","name":"Amazon: Nova Lite 1.0","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0.00009","web_search":"0","internal_reasoning":"0"},"created":1733437363,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-micro-v1","name":"Amazon: Nova Micro 1.0","pricing":{"prompt":"0.000000035","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733437237,"top_provider":{"context_length":128000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-pro-v1","name":"Amazon: Nova Pro 1.0","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.0012","web_search":"0","internal_reasoning":"0"},"created":1733436303,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"qwen/qwq-32b-preview","name":"Qwen: QwQ 32B Preview","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1732754541,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-11-20","name":"OpenAI: GPT-4o (2024-11-20)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1732127594,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-large-2411","name":"Mistral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978685,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-large-2407","name":"Mistral Large 2407","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978415,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/pixtral-large-2411","name":"Mistral: Pixtral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0.002888","web_search":"0","internal_reasoning":"0"},"created":1731977388,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct:free","name":"Qwen2.5 Coder 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct","name":"Qwen2.5 Coder 32B Instruct","pricing":{"prompt":"0.00000006","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"raifle/sorcererlm-8x22b","name":"SorcererLM 8x22B","pricing":{"prompt":"0.0000045","completion":"0.0000045","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731105083,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"thedrummer/unslopnemo-12b","name":"TheDrummer: UnslopNemo 12B","pricing":{"prompt":"0.0000004","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731103448,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku","name":"Anthropic: Claude 3.5 Haiku","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0.01","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"anthropic/claude-3.5-haiku-20241022","name":"Anthropic: Claude 3.5 Haiku (2024-10-22)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthracite-org/magnum-v4-72b","name":"Magnum v4 72B","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet","name":"Anthropic: Claude 3.5 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"mistralai/ministral-3b","name":"Mistral: Ministral 3B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-8b","name":"Mistral: Ministral 8B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-7b-instruct","name":"Qwen2.5 7B Instruct","pricing":{"prompt":"0.00000004","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729036800,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-70b-instruct","name":"NVIDIA: Llama 3.1 Nemotron 70B Instruct","pricing":{"prompt":"0.00000012","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728950400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"inflection/inflection-3-pi","name":"Inflection: Inflection 3 Pi","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"inflection/inflection-3-productivity","name":"Inflection: Inflection 3 Productivity","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"google/gemini-flash-1.5-8b","name":"Google: Gemini 1.5 Flash 8B","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000001","input_cache_write":"0.0000000583"},"created":1727913600,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"thedrummer/rocinante-12b","name":"TheDrummer: Rocinante 12B","pricing":{"prompt":"0.00000017","completion":"0.00000043","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthracite-org/magnum-v2-72b","name":"Magnum v2 72B","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-90b-vision-instruct","name":"Meta: Llama 3.2 90B Vision Instruct","pricing":{"prompt":"0.00000035","completion":"0.0000004","request":"0","image":"0.0005058","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-1b-instruct","name":"Meta: Llama 3.2 1B Instruct","pricing":{"prompt":"0.000000005","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct:free","name":"Meta: Llama 3.2 3B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct","name":"Meta: Llama 3.2 3B Instruct","pricing":{"prompt":"0.000000012","completion":"0.000000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct","name":"Meta: Llama 3.2 11B Vision Instruct","pricing":{"prompt":"0.000000049","completion":"0.000000049","request":"0","image":"0.00007948","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct:free","name":"Qwen2.5 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct","name":"Qwen2.5 72B Instruct","pricing":{"prompt":"0.00000006521736","completion":"0.000000260869536","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"neversleep/llama-3.1-lumimaid-8b","name":"NeverSleep: Lumimaid v0.2 8B","pricing":{"prompt":"0.00000009","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726358400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o1-mini","name":"OpenAI: o1-mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"openai/o1-mini-2024-09-12","name":"OpenAI: o1-mini (2024-09-12)","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"mistralai/pixtral-12b","name":"Mistral: Pixtral 12B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1725926400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r-plus-08-2024","name":"Cohere: Command R+ (08-2024)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-08-2024","name":"Cohere: Command R (08-2024)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"sao10k/l3.1-euryale-70b","name":"Sao10K: Llama 3.1 Euryale 70B v2.2","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-vl-7b-instruct","name":"Qwen: Qwen2.5-VL 7B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3.5-mini-128k-instruct","name":"Microsoft: Phi-3.5 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724198400,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-70b","name":"Nous: Hermes 3 70B Instruct","pricing":{"prompt":"0.00000012","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723939200,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-405b","name":"Nous: Hermes 3 405B Instruct","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723766400,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/chatgpt-4o-latest","name":"OpenAI: ChatGPT-4o","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1723593600,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"sao10k/l3-lunaris-8b","name":"Sao10K: Llama 3 8B Lunaris","pricing":{"prompt":"0.00000002","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723507200,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-08-06","name":"OpenAI: GPT-4o (2024-08-06)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1722902400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b","name":"Meta: Llama 3.1 405B (base)","pricing":{"prompt":"0.000002","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722556800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-8b-instruct","name":"Meta: Llama 3.1 8B Instruct","pricing":{"prompt":"0.000000015","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct:free","name":"Meta: Llama 3.1 405B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct","name":"Meta: Llama 3.1 405B Instruct","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-70b-instruct","name":"Meta: Llama 3.1 70B Instruct","pricing":{"prompt":"0.0000001","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-nemo:free","name":"Mistral: Mistral Nemo (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":128000,"is_moderated":false}},{"id":"mistralai/mistral-nemo","name":"Mistral: Mistral Nemo","pricing":{"prompt":"0.000000017934774","completion":"0.0000000717391224","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":128000,"is_moderated":false}},{"id":"openai/gpt-4o-mini-2024-07-18","name":"OpenAI: GPT-4o-mini (2024-07-18)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-mini","name":"OpenAI: GPT-4o-mini","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.000217","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-2-27b-it","name":"Google: Gemma 2 27B","pricing":{"prompt":"0.00000065","completion":"0.00000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1720828800,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-2-9b-it:free","name":"Google: Gemma 2 9B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-2-9b-it","name":"Google: Gemma 2 9B","pricing":{"prompt":"0.00000002","completion":"0.0000000358695612","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet-20240620","name":"Anthropic: Claude 3.5 Sonnet (2024-06-20)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1718841600,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"sao10k/l3-euryale-70b","name":"Sao10k: Llama 3 Euryale 70B v2.1","pricing":{"prompt":"0.00000148","completion":"0.00000148","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1718668800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mixtral-8x22b","name":"Dolphin 2.9.2 Mixtral 8x22B 🐬","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1717804800,"top_provider":{"context_length":16000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"nousresearch/hermes-2-pro-llama-3-8b","name":"NousResearch: Hermes 2 Pro - Llama-3 8B","pricing":{"prompt":"0.000000025","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.3","name":"Mistral: Mistral 7B Instruct v0.3","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct:free","name":"Mistral: Mistral 7B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct","name":"Mistral: Mistral 7B Instruct","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"microsoft/phi-3-mini-128k-instruct","name":"Microsoft: Phi-3 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716681600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3-medium-128k-instruct","name":"Microsoft: Phi-3 Medium 128K Instruct","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716508800,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"neversleep/llama-3-lumimaid-70b","name":"NeverSleep: Llama 3 Lumimaid 70B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715817600,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemini-flash-1.5","name":"Google: Gemini 1.5 Flash ","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0.00004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000001875","input_cache_write":"0.0000001583"},"created":1715644800,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4o-2024-05-13","name":"OpenAI: GPT-4o (2024-05-13)","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4o","name":"OpenAI: GPT-4o","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o:extended","name":"OpenAI: GPT-4o (extended)","pricing":{"prompt":"0.000006","completion":"0.000018","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"meta-llama/llama-guard-2-8b","name":"Meta: LlamaGuard 2 8B","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3-70b-instruct","name":"Meta: Llama 3 70B Instruct","pricing":{"prompt":"0.0000003","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3-8b-instruct","name":"Meta: Llama 3 8B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mixtral-8x22b-instruct","name":"Mistral: Mixtral 8x22B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713312000,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/wizardlm-2-8x22b","name":"WizardLM-2 8x22B","pricing":{"prompt":"0.00000048","completion":"0.00000048","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713225600,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"google/gemini-pro-1.5","name":"Google: Gemini 1.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.000005","request":"0","image":"0.0006575","web_search":"0","internal_reasoning":"0"},"created":1712620800,"top_provider":{"context_length":2000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4-turbo","name":"OpenAI: GPT-4 Turbo","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0.01445","web_search":"0","internal_reasoning":"0"},"created":1712620800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"cohere/command-r-plus","name":"Cohere: Command R+","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1712188800,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-plus-04-2024","name":"Cohere: Command R+ (04-2024)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1712016000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"sophosympatheia/midnight-rose-70b","name":"Midnight Rose 70B","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1711065600,"top_provider":{"context_length":4096,"max_completion_tokens":2048,"is_moderated":false}},{"id":"cohere/command","name":"Cohere: Command","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1710374400,"top_provider":{"context_length":4096,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r","name":"Cohere: Command R","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1710374400,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"anthropic/claude-3-haiku","name":"Anthropic: Claude 3 Haiku","pricing":{"prompt":"0.00000025","completion":"0.00000125","request":"0","image":"0.0004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000003","input_cache_write":"0.0000003"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-opus","name":"Anthropic: Claude 3 Opus","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"cohere/command-r-03-2024","name":"Cohere: Command R (03-2024)","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1709341200,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"mistralai/mistral-large","name":"Mistral Large","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1708905600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-turbo-preview","name":"OpenAI: GPT-4 Turbo Preview","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo-0613","name":"OpenAI: GPT-3.5 Turbo (older v0613)","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":false}},{"id":"mistralai/mistral-tiny","name":"Mistral Tiny","pricing":{"prompt":"0.00000025","completion":"0.00000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small","name":"Mistral Small","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mixtral-8x7b-instruct","name":"Mistral: Mixtral 8x7B Instruct","pricing":{"prompt":"0.00000008","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1702166400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/noromaid-20b","name":"Noromaid 20B","pricing":{"prompt":"0.000001","completion":"0.00000175","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700956800,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"alpindale/goliath-120b","name":"Goliath 120B","pricing":{"prompt":"0.000004","completion":"0.0000055","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699574400,"top_provider":{"context_length":6144,"max_completion_tokens":512,"is_moderated":false}},{"id":"openrouter/auto","name":"Auto Router","pricing":{"prompt":"-1","completion":"-1"},"created":1699401600,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-1106-preview","name":"OpenAI: GPT-4 Turbo (older v1106)","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699228800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo-instruct","name":"OpenAI: GPT-3.5 Turbo Instruct","pricing":{"prompt":"0.0000015","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-7b-instruct-v0.1","name":"Mistral: Mistral 7B Instruct v0.1","pricing":{"prompt":"0.00000011","completion":"0.00000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":2824,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-16k","name":"OpenAI: GPT-3.5 Turbo 16k","pricing":{"prompt":"0.000003","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1693180800,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mancer/weaver","name":"Mancer: Weaver (alpha)","pricing":{"prompt":"0.000001125","completion":"0.000001125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1690934400,"top_provider":{"context_length":8000,"max_completion_tokens":2000,"is_moderated":false}},{"id":"undi95/remm-slerp-l2-13b","name":"ReMM SLERP 13B","pricing":{"prompt":"0.00000045","completion":"0.00000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1689984000,"top_provider":{"context_length":6144,"max_completion_tokens":null,"is_moderated":false}},{"id":"gryphe/mythomax-l2-13b","name":"MythoMax 13B","pricing":{"prompt":"0.00000006","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1688256000,"top_provider":{"context_length":4096,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo","name":"OpenAI: GPT-3.5 Turbo","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4-0314","name":"OpenAI: GPT-4 (older v0314)","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4","name":"OpenAI: GPT-4","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}}] \ No newline at end of file diff --git a/packages/kbot/dist/main_node.js b/packages/kbot/dist/main_node.js index 61bf96c1..cd79be95 100644 --- a/packages/kbot/dist/main_node.js +++ b/packages/kbot/dist/main_node.js @@ -169473,8 +169473,21 @@ var E_OPENAI_MODEL; ;// ./dist-in/models/cache/openrouter-models.js var E_OPENROUTER_MODEL; (function (E_OPENROUTER_MODEL) { + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN3_NEXT_80B_A3B_THINKING"] = "qwen/qwen3-next-80b-a3b-thinking"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN3_NEXT_80B_A3B_INSTRUCT"] = "qwen/qwen3-next-80b-a3b-instruct"; + E_OPENROUTER_MODEL["MODEL_MEITUAN_LONGCAT_FLASH_CHAT"] = "meituan/longcat-flash-chat"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_PLUS_2025_07_28"] = "qwen/qwen-plus-2025-07-28"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_PLUS_2025_07_28_THINKING"] = "qwen/qwen-plus-2025-07-28:thinking"; + E_OPENROUTER_MODEL["MODEL_NVIDIA_NEMOTRON_NANO_9B_V2_FREE"] = "nvidia/nemotron-nano-9b-v2:free"; + E_OPENROUTER_MODEL["MODEL_NVIDIA_NEMOTRON_NANO_9B_V2"] = "nvidia/nemotron-nano-9b-v2"; + E_OPENROUTER_MODEL["MODEL_OPENROUTER_SONOMA_DUSK_ALPHA"] = "openrouter/sonoma-dusk-alpha"; + E_OPENROUTER_MODEL["MODEL_OPENROUTER_SONOMA_SKY_ALPHA"] = "openrouter/sonoma-sky-alpha"; + E_OPENROUTER_MODEL["MODEL_QWEN_QWEN3_MAX"] = "qwen/qwen3-max"; + E_OPENROUTER_MODEL["MODEL_MOONSHOTAI_KIMI_K2_0905"] = "moonshotai/kimi-k2-0905"; + E_OPENROUTER_MODEL["MODEL_BYTEDANCE_SEED_OSS_36B_INSTRUCT"] = "bytedance/seed-oss-36b-instruct"; E_OPENROUTER_MODEL["MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_LLAMA_109B_MOE"] = "deepcogito/cogito-v2-preview-llama-109b-moe"; E_OPENROUTER_MODEL["MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_DEEPSEEK_671B"] = "deepcogito/cogito-v2-preview-deepseek-671b"; + E_OPENROUTER_MODEL["MODEL_STEPFUN_AI_STEP3"] = "stepfun-ai/step3"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN3_30B_A3B_THINKING_2507"] = "qwen/qwen3-30b-a3b-thinking-2507"; E_OPENROUTER_MODEL["MODEL_X_AI_GROK_CODE_FAST_1"] = "x-ai/grok-code-fast-1"; E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_HERMES_4_70B"] = "nousresearch/hermes-4-70b"; @@ -169602,14 +169615,13 @@ var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_MOONSHOTAI_KIMI_VL_A3B_THINKING"] = "moonshotai/kimi-vl-a3b-thinking"; E_OPENROUTER_MODEL["MODEL_X_AI_GROK_3_MINI_BETA"] = "x-ai/grok-3-mini-beta"; E_OPENROUTER_MODEL["MODEL_X_AI_GROK_3_BETA"] = "x-ai/grok-3-beta"; - E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_3_NEMOTRON_SUPER_49B_V1"] = "nvidia/llama-3.3-nemotron-super-49b-v1"; E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE"] = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free"; E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1"] = "nvidia/llama-3.1-nemotron-ultra-253b-v1"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_4_MAVERICK_FREE"] = "meta-llama/llama-4-maverick:free"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_4_MAVERICK"] = "meta-llama/llama-4-maverick"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_4_SCOUT_FREE"] = "meta-llama/llama-4-scout:free"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_4_SCOUT"] = "meta-llama/llama-4-scout"; - E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_2_5_PRO_EXP_03_25"] = "google/gemini-2.5-pro-exp-03-25"; + E_OPENROUTER_MODEL["MODEL_ALLENAI_MOLMO_7B_D"] = "allenai/molmo-7b-d"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-32b-instruct:free"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT"] = "qwen/qwen2.5-vl-32b-instruct"; E_OPENROUTER_MODEL["MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE"] = "deepseek/deepseek-chat-v3-0324:free"; @@ -169617,6 +169629,7 @@ var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_OPENAI_O1_PRO"] = "openai/o1-pro"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE"] = "mistralai/mistral-small-3.1-24b-instruct:free"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT"] = "mistralai/mistral-small-3.1-24b-instruct"; + E_OPENROUTER_MODEL["MODEL_ALLENAI_OLMO_2_0325_32B_INSTRUCT"] = "allenai/olmo-2-0325-32b-instruct"; E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_4B_IT_FREE"] = "google/gemma-3-4b-it:free"; E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_4B_IT"] = "google/gemma-3-4b-it"; E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMMA_3_12B_IT_FREE"] = "google/gemma-3-12b-it:free"; @@ -169693,38 +169706,36 @@ var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE_2411"] = "mistralai/mistral-large-2411"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE_2407"] = "mistralai/mistral-large-2407"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_PIXTRAL_LARGE_2411"] = "mistralai/pixtral-large-2411"; - E_OPENROUTER_MODEL["MODEL_X_AI_GROK_VISION_BETA"] = "x-ai/grok-vision-beta"; - E_OPENROUTER_MODEL["MODEL_INFERMATIC_MN_INFEROR_12B"] = "infermatic/mn-inferor-12b"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE"] = "qwen/qwen-2.5-coder-32b-instruct:free"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT"] = "qwen/qwen-2.5-coder-32b-instruct"; E_OPENROUTER_MODEL["MODEL_RAIFLE_SORCERERLM_8X22B"] = "raifle/sorcererlm-8x22b"; E_OPENROUTER_MODEL["MODEL_THEDRUMMER_UNSLOPNEMO_12B"] = "thedrummer/unslopnemo-12b"; - E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022"] = "anthropic/claude-3.5-haiku-20241022"; E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU"] = "anthropic/claude-3.5-haiku"; + E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022"] = "anthropic/claude-3.5-haiku-20241022"; E_OPENROUTER_MODEL["MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B"] = "anthracite-org/magnum-v4-72b"; E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_5_SONNET"] = "anthropic/claude-3.5-sonnet"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MINISTRAL_3B"] = "mistralai/ministral-3b"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MINISTRAL_8B"] = "mistralai/ministral-8b"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_7B_INSTRUCT"] = "qwen/qwen-2.5-7b-instruct"; E_OPENROUTER_MODEL["MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT"] = "nvidia/llama-3.1-nemotron-70b-instruct"; - E_OPENROUTER_MODEL["MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY"] = "inflection/inflection-3-productivity"; E_OPENROUTER_MODEL["MODEL_INFLECTION_INFLECTION_3_PI"] = "inflection/inflection-3-pi"; + E_OPENROUTER_MODEL["MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY"] = "inflection/inflection-3-productivity"; E_OPENROUTER_MODEL["MODEL_GOOGLE_GEMINI_FLASH_1_5_8B"] = "google/gemini-flash-1.5-8b"; - E_OPENROUTER_MODEL["MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B"] = "anthracite-org/magnum-v2-72b"; E_OPENROUTER_MODEL["MODEL_THEDRUMMER_ROCINANTE_12B"] = "thedrummer/rocinante-12b"; - E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT"] = "meta-llama/llama-3.2-11b-vision-instruct"; + E_OPENROUTER_MODEL["MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B"] = "anthracite-org/magnum-v2-72b"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT"] = "meta-llama/llama-3.2-90b-vision-instruct"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT"] = "meta-llama/llama-3.2-1b-instruct"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE"] = "meta-llama/llama-3.2-3b-instruct:free"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT"] = "meta-llama/llama-3.2-3b-instruct"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT"] = "meta-llama/llama-3.2-11b-vision-instruct"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE"] = "qwen/qwen-2.5-72b-instruct:free"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_72B_INSTRUCT"] = "qwen/qwen-2.5-72b-instruct"; E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B"] = "neversleep/llama-3.1-lumimaid-8b"; - E_OPENROUTER_MODEL["MODEL_OPENAI_O1_MINI_2024_09_12"] = "openai/o1-mini-2024-09-12"; E_OPENROUTER_MODEL["MODEL_OPENAI_O1_MINI"] = "openai/o1-mini"; + E_OPENROUTER_MODEL["MODEL_OPENAI_O1_MINI_2024_09_12"] = "openai/o1-mini-2024-09-12"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_PIXTRAL_12B"] = "mistralai/pixtral-12b"; - E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_08_2024"] = "cohere/command-r-08-2024"; E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS_08_2024"] = "cohere/command-r-plus-08-2024"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_08_2024"] = "cohere/command-r-08-2024"; E_OPENROUTER_MODEL["MODEL_SAO10K_L3_1_EURYALE_70B"] = "sao10k/l3.1-euryale-70b"; E_OPENROUTER_MODEL["MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT"] = "qwen/qwen-2.5-vl-7b-instruct"; E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT"] = "microsoft/phi-3.5-mini-128k-instruct"; @@ -169734,9 +169745,9 @@ var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_SAO10K_L3_LUNARIS_8B"] = "sao10k/l3-lunaris-8b"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4O_2024_08_06"] = "openai/gpt-4o-2024-08-06"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_405B"] = "meta-llama/llama-3.1-405b"; + E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT"] = "meta-llama/llama-3.1-8b-instruct"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE"] = "meta-llama/llama-3.1-405b-instruct:free"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT"] = "meta-llama/llama-3.1-405b-instruct"; - E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT"] = "meta-llama/llama-3.1-8b-instruct"; E_OPENROUTER_MODEL["MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT"] = "meta-llama/llama-3.1-70b-instruct"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_NEMO_FREE"] = "mistralai/mistral-nemo:free"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_NEMO"] = "mistralai/mistral-nemo"; @@ -169749,9 +169760,9 @@ var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_SAO10K_L3_EURYALE_70B"] = "sao10k/l3-euryale-70b"; E_OPENROUTER_MODEL["MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B"] = "cognitivecomputations/dolphin-mixtral-8x22b"; E_OPENROUTER_MODEL["MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B"] = "nousresearch/hermes-2-pro-llama-3-8b"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3"] = "mistralai/mistral-7b-instruct-v0.3"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT"] = "mistralai/mistral-7b-instruct"; - E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3"] = "mistralai/mistral-7b-instruct-v0.3"; E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT"] = "microsoft/phi-3-mini-128k-instruct"; E_OPENROUTER_MODEL["MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT"] = "microsoft/phi-3-medium-128k-instruct"; E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B"] = "neversleep/llama-3-lumimaid-70b"; @@ -169769,33 +169780,32 @@ var E_OPENROUTER_MODEL; E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS"] = "cohere/command-r-plus"; E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_PLUS_04_2024"] = "cohere/command-r-plus-04-2024"; E_OPENROUTER_MODEL["MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B"] = "sophosympatheia/midnight-rose-70b"; - E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R"] = "cohere/command-r"; E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND"] = "cohere/command"; + E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R"] = "cohere/command-r"; E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_HAIKU"] = "anthropic/claude-3-haiku"; E_OPENROUTER_MODEL["MODEL_ANTHROPIC_CLAUDE_3_OPUS"] = "anthropic/claude-3-opus"; E_OPENROUTER_MODEL["MODEL_COHERE_COMMAND_R_03_2024"] = "cohere/command-r-03-2024"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_LARGE"] = "mistralai/mistral-large"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_TURBO_PREVIEW"] = "openai/gpt-4-turbo-preview"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_0613"] = "openai/gpt-3.5-turbo-0613"; - E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL"] = "mistralai/mistral-small"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_TINY"] = "mistralai/mistral-tiny"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_SMALL"] = "mistralai/mistral-small"; E_OPENROUTER_MODEL["MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT"] = "mistralai/mixtral-8x7b-instruct"; E_OPENROUTER_MODEL["MODEL_NEVERSLEEP_NOROMAID_20B"] = "neversleep/noromaid-20b"; E_OPENROUTER_MODEL["MODEL_ALPINDALE_GOLIATH_120B"] = "alpindale/goliath-120b"; E_OPENROUTER_MODEL["MODEL_OPENROUTER_AUTO"] = "openrouter/auto"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_1106_PREVIEW"] = "openai/gpt-4-1106-preview"; - E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1"] = "mistralai/mistral-7b-instruct-v0.1"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT"] = "openai/gpt-3.5-turbo-instruct"; - E_OPENROUTER_MODEL["MODEL_PYGMALIONAI_MYTHALION_13B"] = "pygmalionai/mythalion-13b"; + E_OPENROUTER_MODEL["MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1"] = "mistralai/mistral-7b-instruct-v0.1"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO_16K"] = "openai/gpt-3.5-turbo-16k"; E_OPENROUTER_MODEL["MODEL_MANCER_WEAVER"] = "mancer/weaver"; E_OPENROUTER_MODEL["MODEL_UNDI95_REMM_SLERP_L2_13B"] = "undi95/remm-slerp-l2-13b"; E_OPENROUTER_MODEL["MODEL_GRYPHE_MYTHOMAX_L2_13B"] = "gryphe/mythomax-l2-13b"; - E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_0314"] = "openai/gpt-4-0314"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_3_5_TURBO"] = "openai/gpt-3.5-turbo"; + E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4_0314"] = "openai/gpt-4-0314"; E_OPENROUTER_MODEL["MODEL_OPENAI_GPT_4"] = "openai/gpt-4"; })(E_OPENROUTER_MODEL || (E_OPENROUTER_MODEL = {})); -//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"openrouter-models.js","sourceRoot":"","sources":["../../../src/models/cache/openrouter-models.ts"],"names":[],"mappings":"AAAA,MAAM,CAAN,IAAY,kBAkUX;AAlUD,WAAY,kBAAkB;IAC5B,uHAAiG,CAAA;IACjG,qHAA+F,CAAA;IAC/F,iGAA2E,CAAA;IAC3E,2EAAqD,CAAA;IACrD,mFAA6D,CAAA;IAC7D,qFAA+D,CAAA;IAC/D,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,uFAAiE,CAAA;IACjE,uFAAiE,CAAA;IACjE,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,qFAA+D,CAAA;IAC/D,2DAAqC,CAAA;IACrC,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,mEAA6C,CAAA;IAC7C,yDAAmC,CAAA;IACnC,mEAA6C,CAAA;IAC7C,mEAA6C,CAAA;IAC7C,iFAA2D,CAAA;IAC3D,uEAAiD,CAAA;IACjD,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,mFAA6D,CAAA;IAC7D,iFAA2D,CAAA;IAC3D,mGAA6E,CAAA;IAC7E,iGAA2E,CAAA;IAC3E,yDAAmC,CAAA;IACnC,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,qGAA+E,CAAA;IAC/E,6DAAuC,CAAA;IACvC,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,iFAA2D,CAAA;IAC3D,yFAAmE,CAAA;IACnE,mFAA6D,CAAA;IAC7D,qEAA+C,CAAA;IAC/C,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,qFAA+D,CAAA;IAC/D,mFAA6D,CAAA;IAC7D,iFAA2D,CAAA;IAC3D,2JAAqI,CAAA;IACrI,uDAAiC,CAAA;IACjC,uFAAiE,CAAA;IACjE,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,qGAA+E,CAAA;IAC/E,yEAAmD,CAAA;IACnD,uEAAiD,CAAA;IACjD,yFAAmE,CAAA;IACnE,mFAA6D,CAAA;IAC7D,qFAA+D,CAAA;IAC/D,mEAA6C,CAAA;IAC7C,2HAAqG,CAAA;IACrG,iHAA2F,CAAA;IAC3F,qEAA+C,CAAA;IAC/C,qHAA+F,CAAA;IAC/F,+EAAyD,CAAA;IACzD,2EAAqD,CAAA;IACrD,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,2DAAqC,CAAA;IACrC,iEAA2C,CAAA;IAC3C,uDAAiC,CAAA;IACjC,6FAAuE,CAAA;IACvE,+FAAyE,CAAA;IACzE,iHAA2F,CAAA;IAC3F,2FAAqE,CAAA;IACrE,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,6FAAuE,CAAA;IACvE,mFAA6D,CAAA;IAC7D,+EAAyD,CAAA;IACzD,mFAA6D,CAAA;IAC7D,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,mEAA6C,CAAA;IAC7C,2GAAqF,CAAA;IACrF,2HAAqG,CAAA;IACrG,qFAA+D,CAAA;IAC/D,uGAAiF,CAAA;IACjF,qEAA+C,CAAA;IAC/C,qFAA+D,CAAA;IAC/D,+EAAyD,CAAA;IACzD,yEAAmD,CAAA;IACnD,6FAAuE,CAAA;IACvE,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,uFAAiE,CAAA;IACjE,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,qEAA+C,CAAA;IAC/C,2DAAqC,CAAA;IACrC,uEAAiD,CAAA;IACjD,6DAAuC,CAAA;IACvC,6DAAuC,CAAA;IACvC,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,iFAA2D,CAAA;IAC3D,uEAAiD,CAAA;IACjD,iEAA2C,CAAA;IAC3C,+DAAyC,CAAA;IACzC,uEAAiD,CAAA;IACjD,mDAA6B,CAAA;IAC7B,6DAAuC,CAAA;IACvC,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,6DAAuC,CAAA;IACvC,uEAAiD,CAAA;IACjD,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,mHAA6F,CAAA;IAC7F,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,yGAAmF,CAAA;IACnF,+FAAyE,CAAA;IACzE,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,6GAAuF,CAAA;IACvF,yHAAmG,CAAA;IACnG,+GAAyF,CAAA;IACzF,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,2FAAqE,CAAA;IACrE,iFAA2D,CAAA;IAC3D,+FAAyE,CAAA;IACzE,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,2DAAqC,CAAA;IACrC,2HAAqG,CAAA;IACrG,iHAA2F,CAAA;IAC3F,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,iFAA2D,CAAA;IAC3D,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,2FAAqE,CAAA;IACrE,mFAA6D,CAAA;IAC7D,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,yEAAmD,CAAA;IACnD,6FAAuE,CAAA;IACvE,mEAA6C,CAAA;IAC7C,yDAAmC,CAAA;IACnC,mIAA6G,CAAA;IAC7G,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,yGAAmF,CAAA;IACnF,qEAA+C,CAAA;IAC/C,6EAAuD,CAAA;IACvD,yIAAmH,CAAA;IACnH,+HAAyG,CAAA;IACzG,mIAA6G,CAAA;IAC7G,yHAAmG,CAAA;IACnG,uFAAiE,CAAA;IACjE,uEAAiD,CAAA;IACjD,2GAAqF,CAAA;IACrF,uFAAiE,CAAA;IACjE,mEAA6C,CAAA;IAC7C,qEAA+C,CAAA;IAC/C,+EAAyD,CAAA;IACzD,6FAAuE,CAAA;IACvE,iEAA2C,CAAA;IAC3C,+DAAyC,CAAA;IACzC,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,6DAAuC,CAAA;IACvC,2DAAqC,CAAA;IACrC,6DAAuC,CAAA;IACvC,6HAAuG,CAAA;IACvG,mHAA6F,CAAA;IAC7F,2GAAqF,CAAA;IACrF,qHAA+F,CAAA;IAC/F,2GAAqF,CAAA;IACrF,qFAA+D,CAAA;IAC/D,iEAA2C,CAAA;IAC3C,2DAAqC,CAAA;IACrC,2DAAqC,CAAA;IACrC,uHAAiG,CAAA;IACjG,6GAAuF,CAAA;IACvF,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,qEAA+C,CAAA;IAC/C,iFAA2D,CAAA;IAC3D,+DAAyC,CAAA;IACzC,6EAAuD,CAAA;IACvD,+EAAyD,CAAA;IACzD,mDAA6B,CAAA;IAC7B,+EAAyD,CAAA;IACzD,iEAA2C,CAAA;IAC3C,qFAA+D,CAAA;IAC/D,iGAA2E,CAAA;IAC3E,6GAAuF,CAAA;IACvF,mGAA6E,CAAA;IAC7E,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,qEAA+C,CAAA;IAC/C,yEAAmD,CAAA;IACnD,iFAA2D,CAAA;IAC3D,yFAAmE,CAAA;IACnE,yFAAmE,CAAA;IACnE,yFAAmE,CAAA;IACnE,2EAAqD,CAAA;IACrD,mFAA6D,CAAA;IAC7D,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,+EAAyD,CAAA;IACzD,mFAA6D,CAAA;IAC7D,uGAAiF,CAAA;IACjF,qFAA+D,CAAA;IAC/D,yFAAmE,CAAA;IACnE,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,6EAAuD,CAAA;IACvD,mFAA6D,CAAA;IAC7D,6GAAuF,CAAA;IACvF,yGAAmF,CAAA;IACnF,qFAA+D,CAAA;IAC/D,qFAA+D,CAAA;IAC/D,yFAAmE,CAAA;IACnE,iFAA2D,CAAA;IAC3D,iHAA2F,CAAA;IAC3F,iHAA2F,CAAA;IAC3F,iGAA2E,CAAA;IAC3E,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,+FAAyE,CAAA;IACzE,qFAA+D,CAAA;IAC/D,iGAA2E,CAAA;IAC3E,mFAA6D,CAAA;IAC7D,6DAAuC,CAAA;IACvC,2EAAqD,CAAA;IACrD,iFAA2D,CAAA;IAC3D,2FAAqE,CAAA;IACrE,+EAAyD,CAAA;IACzD,yFAAmE,CAAA;IACnE,yGAAmF,CAAA;IACnF,uGAAiF,CAAA;IACjF,yGAAmF,CAAA;IACnF,iFAA2D,CAAA;IAC3D,yEAAmD,CAAA;IACnD,iFAA2D,CAAA;IAC3D,mFAA6D,CAAA;IAC7D,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,iGAA2E,CAAA;IAC3E,mGAA6E,CAAA;IAC7E,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,2FAAqE,CAAA;IACrE,qEAA+C,CAAA;IAC/C,2EAAqD,CAAA;IACrD,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,yGAAmF,CAAA;IACnF,2EAAqD,CAAA;IACrD,uHAAiG,CAAA;IACjG,yGAAmF,CAAA;IACnF,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,qGAA+E,CAAA;IAC/E,qGAA+E,CAAA;IAC/E,yGAAmF,CAAA;IACnF,+FAAyE,CAAA;IACzE,+EAAyD,CAAA;IACzD,iFAA2D,CAAA;IAC3D,2DAAqC,CAAA;IACrC,6EAAuD,CAAA;IACvD,uFAAiE,CAAA;IACjE,+FAAyE,CAAA;IACzE,6FAAuE,CAAA;IACvE,iGAA2E,CAAA;IAC3E,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,qEAA+C,CAAA;IAC/C,2EAAqD,CAAA;IACrD,2FAAqE,CAAA;IACrE,mGAA6E,CAAA;IAC7E,iEAA2C,CAAA;IAC3C,6DAAuC,CAAA;IACvC,iFAA2D,CAAA;IAC3D,+EAAyD,CAAA;IACzD,iFAA2D,CAAA;IAC3D,+EAAyD,CAAA;IACzD,qFAA+D,CAAA;IAC/D,mFAA6D,CAAA;IAC7D,+EAAyD,CAAA;IACzD,6EAAuD,CAAA;IACvD,+FAAyE,CAAA;IACzE,+EAAyD,CAAA;IACzD,6EAAuD,CAAA;IACvD,+DAAyC,CAAA;IACzC,mFAA6D,CAAA;IAC7D,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,mFAA6D,CAAA;IAC7D,iFAA2D,CAAA;IAC3D,2DAAqC,CAAA;IACrC,iFAA2D,CAAA;IAC3D,6EAAuD,CAAA;IACvD,mEAA6C,CAAA;IAC7C,yEAAmD,CAAA;IACnD,yDAAmC,CAAA;AACrC,CAAC,EAlUW,kBAAkB,KAAlB,kBAAkB,QAkU7B"} +//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"openrouter-models.js","sourceRoot":"","sources":["../../../src/models/cache/openrouter-models.ts"],"names":[],"mappings":"AAAA,MAAM,CAAN,IAAY,kBA4UX;AA5UD,WAAY,kBAAkB;IAC5B,iGAA2E,CAAA;IAC3E,iGAA2E,CAAA;IAC3E,qFAA+D,CAAA;IAC/D,mFAA6D,CAAA;IAC7D,qGAA+E,CAAA;IAC/E,+FAAyE,CAAA;IACzE,qFAA+D,CAAA;IAC/D,yFAAmE,CAAA;IACnE,uFAAiE,CAAA;IACjE,6DAAuC,CAAA;IACvC,+EAAyD,CAAA;IACzD,+FAAyE,CAAA;IACzE,uHAAiG,CAAA;IACjG,qHAA+F,CAAA;IAC/F,iEAA2C,CAAA;IAC3C,iGAA2E,CAAA;IAC3E,2EAAqD,CAAA;IACrD,mFAA6D,CAAA;IAC7D,qFAA+D,CAAA;IAC/D,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,uFAAiE,CAAA;IACjE,uFAAiE,CAAA;IACjE,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,qFAA+D,CAAA;IAC/D,2DAAqC,CAAA;IACrC,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,mEAA6C,CAAA;IAC7C,yDAAmC,CAAA;IACnC,mEAA6C,CAAA;IAC7C,mEAA6C,CAAA;IAC7C,iFAA2D,CAAA;IAC3D,uEAAiD,CAAA;IACjD,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,mFAA6D,CAAA;IAC7D,iFAA2D,CAAA;IAC3D,mGAA6E,CAAA;IAC7E,iGAA2E,CAAA;IAC3E,yDAAmC,CAAA;IACnC,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,qGAA+E,CAAA;IAC/E,6DAAuC,CAAA;IACvC,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,iFAA2D,CAAA;IAC3D,yFAAmE,CAAA;IACnE,mFAA6D,CAAA;IAC7D,qEAA+C,CAAA;IAC/C,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,qFAA+D,CAAA;IAC/D,mFAA6D,CAAA;IAC7D,iFAA2D,CAAA;IAC3D,2JAAqI,CAAA;IACrI,uDAAiC,CAAA;IACjC,uFAAiE,CAAA;IACjE,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,qGAA+E,CAAA;IAC/E,yEAAmD,CAAA;IACnD,uEAAiD,CAAA;IACjD,yFAAmE,CAAA;IACnE,mFAA6D,CAAA;IAC7D,qFAA+D,CAAA;IAC/D,mEAA6C,CAAA;IAC7C,2HAAqG,CAAA;IACrG,iHAA2F,CAAA;IAC3F,qEAA+C,CAAA;IAC/C,qHAA+F,CAAA;IAC/F,+EAAyD,CAAA;IACzD,2EAAqD,CAAA;IACrD,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,2DAAqC,CAAA;IACrC,iEAA2C,CAAA;IAC3C,uDAAiC,CAAA;IACjC,6FAAuE,CAAA;IACvE,+FAAyE,CAAA;IACzE,iHAA2F,CAAA;IAC3F,2FAAqE,CAAA;IACrE,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,6FAAuE,CAAA;IACvE,mFAA6D,CAAA;IAC7D,+EAAyD,CAAA;IACzD,mFAA6D,CAAA;IAC7D,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,mEAA6C,CAAA;IAC7C,2GAAqF,CAAA;IACrF,2HAAqG,CAAA;IACrG,qFAA+D,CAAA;IAC/D,uGAAiF,CAAA;IACjF,qEAA+C,CAAA;IAC/C,qFAA+D,CAAA;IAC/D,+EAAyD,CAAA;IACzD,yEAAmD,CAAA;IACnD,6FAAuE,CAAA;IACvE,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,uFAAiE,CAAA;IACjE,yFAAmE,CAAA;IACnE,+EAAyD,CAAA;IACzD,qEAA+C,CAAA;IAC/C,qEAA+C,CAAA;IAC/C,2DAAqC,CAAA;IACrC,uEAAiD,CAAA;IACjD,6DAAuC,CAAA;IACvC,6DAAuC,CAAA;IACvC,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,iFAA2D,CAAA;IAC3D,uEAAiD,CAAA;IACjD,iEAA2C,CAAA;IAC3C,+DAAyC,CAAA;IACzC,uEAAiD,CAAA;IACjD,mDAA6B,CAAA;IAC7B,6DAAuC,CAAA;IACvC,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,6DAAuC,CAAA;IACvC,uEAAiD,CAAA;IACjD,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,mHAA6F,CAAA;IAC7F,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,yGAAmF,CAAA;IACnF,+FAAyE,CAAA;IACzE,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,yHAAmG,CAAA;IACnG,+GAAyF,CAAA;IACzF,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,2FAAqE,CAAA;IACrE,iFAA2D,CAAA;IAC3D,qEAA+C,CAAA;IAC/C,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,2DAAqC,CAAA;IACrC,2HAAqG,CAAA;IACrG,iHAA2F,CAAA;IAC3F,iGAA2E,CAAA;IAC3E,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,iEAA2C,CAAA;IAC3C,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,iFAA2D,CAAA;IAC3D,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,2FAAqE,CAAA;IACrE,mFAA6D,CAAA;IAC7D,uGAAiF,CAAA;IACjF,6FAAuE,CAAA;IACvE,yEAAmD,CAAA;IACnD,6FAAuE,CAAA;IACvE,mEAA6C,CAAA;IAC7C,yDAAmC,CAAA;IACnC,mIAA6G,CAAA;IAC7G,iGAA2E,CAAA;IAC3E,uFAAiE,CAAA;IACjE,yGAAmF,CAAA;IACnF,qEAA+C,CAAA;IAC/C,6EAAuD,CAAA;IACvD,yIAAmH,CAAA;IACnH,+HAAyG,CAAA;IACzG,mIAA6G,CAAA;IAC7G,yHAAmG,CAAA;IACnG,uFAAiE,CAAA;IACjE,uEAAiD,CAAA;IACjD,2GAAqF,CAAA;IACrF,uFAAiE,CAAA;IACjE,mEAA6C,CAAA;IAC7C,qEAA+C,CAAA;IAC/C,+EAAyD,CAAA;IACzD,6FAAuE,CAAA;IACvE,iEAA2C,CAAA;IAC3C,+DAAyC,CAAA;IACzC,mGAA6E,CAAA;IAC7E,yFAAmE,CAAA;IACnE,6DAAuC,CAAA;IACvC,2DAAqC,CAAA;IACrC,6DAAuC,CAAA;IACvC,6HAAuG,CAAA;IACvG,mHAA6F,CAAA;IAC7F,2GAAqF,CAAA;IACrF,qHAA+F,CAAA;IAC/F,2GAAqF,CAAA;IACrF,qFAA+D,CAAA;IAC/D,iEAA2C,CAAA;IAC3C,2DAAqC,CAAA;IACrC,2DAAqC,CAAA;IACrC,uHAAiG,CAAA;IACjG,6GAAuF,CAAA;IACvF,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,qEAA+C,CAAA;IAC/C,iFAA2D,CAAA;IAC3D,+DAAyC,CAAA;IACzC,6EAAuD,CAAA;IACvD,+EAAyD,CAAA;IACzD,mDAA6B,CAAA;IAC7B,+EAAyD,CAAA;IACzD,iEAA2C,CAAA;IAC3C,qFAA+D,CAAA;IAC/D,iGAA2E,CAAA;IAC3E,6GAAuF,CAAA;IACvF,mGAA6E,CAAA;IAC7E,uEAAiD,CAAA;IACjD,yEAAmD,CAAA;IACnD,qEAA+C,CAAA;IAC/C,yEAAmD,CAAA;IACnD,iFAA2D,CAAA;IAC3D,yFAAmE,CAAA;IACnE,yFAAmE,CAAA;IACnE,yFAAmE,CAAA;IACnE,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,+EAAyD,CAAA;IACzD,mFAA6D,CAAA;IAC7D,qFAA+D,CAAA;IAC/D,uGAAiF,CAAA;IACjF,yFAAmE,CAAA;IACnE,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,6EAAuD,CAAA;IACvD,mFAA6D,CAAA;IAC7D,6GAAuF,CAAA;IACvF,qFAA+D,CAAA;IAC/D,yGAAmF,CAAA;IACnF,qFAA+D,CAAA;IAC/D,iFAA2D,CAAA;IAC3D,yFAAmE,CAAA;IACnE,iHAA2F,CAAA;IAC3F,iGAA2E,CAAA;IAC3E,2GAAqF,CAAA;IACrF,iGAA2E,CAAA;IAC3E,iHAA2F,CAAA;IAC3F,+FAAyE,CAAA;IACzE,qFAA+D,CAAA;IAC/D,iGAA2E,CAAA;IAC3E,6DAAuC,CAAA;IACvC,mFAA6D,CAAA;IAC7D,2EAAqD,CAAA;IACrD,2FAAqE,CAAA;IACrE,iFAA2D,CAAA;IAC3D,+EAAyD,CAAA;IACzD,yFAAmE,CAAA;IACnE,yGAAmF,CAAA;IACnF,uGAAiF,CAAA;IACjF,yGAAmF,CAAA;IACnF,iFAA2D,CAAA;IAC3D,yEAAmD,CAAA;IACnD,iFAA2D,CAAA;IAC3D,mFAA6D,CAAA;IAC7D,iGAA2E,CAAA;IAC3E,+GAAyF,CAAA;IACzF,qGAA+E,CAAA;IAC/E,mGAA6E,CAAA;IAC7E,uFAAiE,CAAA;IACjE,6EAAuD,CAAA;IACvD,2FAAqE,CAAA;IACrE,qEAA+C,CAAA;IAC/C,2EAAqD,CAAA;IACrD,mFAA6D,CAAA;IAC7D,yEAAmD,CAAA;IACnD,yGAAmF,CAAA;IACnF,2EAAqD,CAAA;IACrD,uHAAiG,CAAA;IACjG,yGAAmF,CAAA;IACnF,qGAA+E,CAAA;IAC/E,qGAA+E,CAAA;IAC/E,2FAAqE,CAAA;IACrE,qGAA+E,CAAA;IAC/E,yGAAmF,CAAA;IACnF,+FAAyE,CAAA;IACzE,+EAAyD,CAAA;IACzD,iFAA2D,CAAA;IAC3D,2DAAqC,CAAA;IACrC,6EAAuD,CAAA;IACvD,uFAAiE,CAAA;IACjE,+FAAyE,CAAA;IACzE,6FAAuE,CAAA;IACvE,iGAA2E,CAAA;IAC3E,qFAA+D,CAAA;IAC/D,2EAAqD,CAAA;IACrD,qEAA+C,CAAA;IAC/C,2EAAqD,CAAA;IACrD,2FAAqE,CAAA;IACrE,mGAA6E,CAAA;IAC7E,6DAAuC,CAAA;IACvC,iEAA2C,CAAA;IAC3C,iFAA2D,CAAA;IAC3D,+EAAyD,CAAA;IACzD,iFAA2D,CAAA;IAC3D,+EAAyD,CAAA;IACzD,qFAA+D,CAAA;IAC/D,mFAA6D,CAAA;IAC7D,6EAAuD,CAAA;IACvD,+EAAyD,CAAA;IACzD,+FAAyE,CAAA;IACzE,+EAAyD,CAAA;IACzD,6EAAuD,CAAA;IACvD,+DAAyC,CAAA;IACzC,mFAA6D,CAAA;IAC7D,2FAAqE,CAAA;IACrE,qGAA+E,CAAA;IAC/E,iFAA2D,CAAA;IAC3D,2DAAqC,CAAA;IACrC,iFAA2D,CAAA;IAC3D,6EAAuD,CAAA;IACvD,yEAAmD,CAAA;IACnD,mEAA6C,CAAA;IAC7C,yDAAmC,CAAA;AACrC,CAAC,EA5UW,kBAAkB,KAAlB,kBAAkB,QA4U7B"} ;// ./dist-in/models/cache/openrouter-models-free.js var E_OPENROUTER_MODEL_FREE; (function (E_OPENROUTER_MODEL_FREE) { diff --git a/packages/kbot/dist/package-lock.json b/packages/kbot/dist/package-lock.json index 9a1d87f7..ca9c125c 100644 --- a/packages/kbot/dist/package-lock.json +++ b/packages/kbot/dist/package-lock.json @@ -1,12 +1,12 @@ { "name": "@plastichub/kbot", - "version": "1.1.50", + "version": "1.1.51", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@plastichub/kbot", - "version": "1.1.50", + "version": "1.1.51", "license": "ISC", "dependencies": { "node-emoji": "^2.2.0" diff --git a/packages/kbot/dist/package.json b/packages/kbot/dist/package.json index 24ee066e..1bf65549 100644 --- a/packages/kbot/dist/package.json +++ b/packages/kbot/dist/package.json @@ -1,6 +1,6 @@ { "name": "@plastichub/kbot", - "version": "1.1.50", + "version": "1.1.51", "main": "main_node.js", "author": "", "license": "ISC", diff --git a/packages/kbot/src/models/cache/openrouter-models-free.ts b/packages/kbot/src/models/cache/openrouter-models-free.ts index b1cbeb1a..0858f9c2 100644 --- a/packages/kbot/src/models/cache/openrouter-models-free.ts +++ b/packages/kbot/src/models/cache/openrouter-models-free.ts @@ -1,4 +1,7 @@ export enum E_OPENROUTER_MODEL_FREE { + MODEL_FREE_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free", + MODEL_FREE_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha", + MODEL_FREE_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha", MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE = "deepseek/deepseek-chat-v3.1:free", MODEL_FREE_OPENAI_GPT_OSS_120B_FREE = "openai/gpt-oss-120b:free", MODEL_FREE_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free", @@ -30,7 +33,6 @@ export enum E_OPENROUTER_MODEL_FREE { MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free", MODEL_FREE_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free", MODEL_FREE_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free", - MODEL_FREE_GOOGLE_GEMINI_2_5_PRO_EXP_03_25 = "google/gemini-2.5-pro-exp-03-25", MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free", MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free", MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free", diff --git a/packages/kbot/src/models/cache/openrouter-models.ts b/packages/kbot/src/models/cache/openrouter-models.ts index 80eebaaa..3a5cf94c 100644 --- a/packages/kbot/src/models/cache/openrouter-models.ts +++ b/packages/kbot/src/models/cache/openrouter-models.ts @@ -1,6 +1,19 @@ export enum E_OPENROUTER_MODEL { + MODEL_QWEN_QWEN3_NEXT_80B_A3B_THINKING = "qwen/qwen3-next-80b-a3b-thinking", + MODEL_QWEN_QWEN3_NEXT_80B_A3B_INSTRUCT = "qwen/qwen3-next-80b-a3b-instruct", + MODEL_MEITUAN_LONGCAT_FLASH_CHAT = "meituan/longcat-flash-chat", + MODEL_QWEN_QWEN_PLUS_2025_07_28 = "qwen/qwen-plus-2025-07-28", + MODEL_QWEN_QWEN_PLUS_2025_07_28_THINKING = "qwen/qwen-plus-2025-07-28:thinking", + MODEL_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free", + MODEL_NVIDIA_NEMOTRON_NANO_9B_V2 = "nvidia/nemotron-nano-9b-v2", + MODEL_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha", + MODEL_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha", + MODEL_QWEN_QWEN3_MAX = "qwen/qwen3-max", + MODEL_MOONSHOTAI_KIMI_K2_0905 = "moonshotai/kimi-k2-0905", + MODEL_BYTEDANCE_SEED_OSS_36B_INSTRUCT = "bytedance/seed-oss-36b-instruct", MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_LLAMA_109B_MOE = "deepcogito/cogito-v2-preview-llama-109b-moe", MODEL_DEEPCOGITO_COGITO_V2_PREVIEW_DEEPSEEK_671B = "deepcogito/cogito-v2-preview-deepseek-671b", + MODEL_STEPFUN_AI_STEP3 = "stepfun-ai/step3", MODEL_QWEN_QWEN3_30B_A3B_THINKING_2507 = "qwen/qwen3-30b-a3b-thinking-2507", MODEL_X_AI_GROK_CODE_FAST_1 = "x-ai/grok-code-fast-1", MODEL_NOUSRESEARCH_HERMES_4_70B = "nousresearch/hermes-4-70b", @@ -128,14 +141,13 @@ export enum E_OPENROUTER_MODEL { MODEL_MOONSHOTAI_KIMI_VL_A3B_THINKING = "moonshotai/kimi-vl-a3b-thinking", MODEL_X_AI_GROK_3_MINI_BETA = "x-ai/grok-3-mini-beta", MODEL_X_AI_GROK_3_BETA = "x-ai/grok-3-beta", - MODEL_NVIDIA_LLAMA_3_3_NEMOTRON_SUPER_49B_V1 = "nvidia/llama-3.3-nemotron-super-49b-v1", MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free", MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1 = "nvidia/llama-3.1-nemotron-ultra-253b-v1", MODEL_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free", MODEL_META_LLAMA_LLAMA_4_MAVERICK = "meta-llama/llama-4-maverick", MODEL_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free", MODEL_META_LLAMA_LLAMA_4_SCOUT = "meta-llama/llama-4-scout", - MODEL_GOOGLE_GEMINI_2_5_PRO_EXP_03_25 = "google/gemini-2.5-pro-exp-03-25", + MODEL_ALLENAI_MOLMO_7B_D = "allenai/molmo-7b-d", MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free", MODEL_QWEN_QWEN2_5_VL_32B_INSTRUCT = "qwen/qwen2.5-vl-32b-instruct", MODEL_DEEPSEEK_DEEPSEEK_CHAT_V3_0324_FREE = "deepseek/deepseek-chat-v3-0324:free", @@ -143,6 +155,7 @@ export enum E_OPENROUTER_MODEL { MODEL_OPENAI_O1_PRO = "openai/o1-pro", MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free", MODEL_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT = "mistralai/mistral-small-3.1-24b-instruct", + MODEL_ALLENAI_OLMO_2_0325_32B_INSTRUCT = "allenai/olmo-2-0325-32b-instruct", MODEL_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free", MODEL_GOOGLE_GEMMA_3_4B_IT = "google/gemma-3-4b-it", MODEL_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free", @@ -219,38 +232,36 @@ export enum E_OPENROUTER_MODEL { MODEL_MISTRALAI_MISTRAL_LARGE_2411 = "mistralai/mistral-large-2411", MODEL_MISTRALAI_MISTRAL_LARGE_2407 = "mistralai/mistral-large-2407", MODEL_MISTRALAI_PIXTRAL_LARGE_2411 = "mistralai/pixtral-large-2411", - MODEL_X_AI_GROK_VISION_BETA = "x-ai/grok-vision-beta", - MODEL_INFERMATIC_MN_INFEROR_12B = "infermatic/mn-inferor-12b", MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free", MODEL_QWEN_QWEN_2_5_CODER_32B_INSTRUCT = "qwen/qwen-2.5-coder-32b-instruct", MODEL_RAIFLE_SORCERERLM_8X22B = "raifle/sorcererlm-8x22b", MODEL_THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b", - MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022", MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022", MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b", MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet", MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b", MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b", MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct", MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct", - MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity", MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi", + MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity", MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b", - MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b", MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b", - MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct", + MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b", MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct", MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct", MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free", MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct", + MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct", MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free", MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct", MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b", - MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12", MODEL_OPENAI_O1_MINI = "openai/o1-mini", + MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12", MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b", - MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024", MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024", + MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024", MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b", MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct", MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct", @@ -260,9 +271,9 @@ export enum E_OPENROUTER_MODEL { MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b", MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06", MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b", + MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct", MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT_FREE = "meta-llama/llama-3.1-405b-instruct:free", MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct", - MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct", MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct", MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free", MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo", @@ -275,9 +286,9 @@ export enum E_OPENROUTER_MODEL { MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b", MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b", MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3", MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free", MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct", - MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3", MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct", MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct", MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b", @@ -295,29 +306,28 @@ export enum E_OPENROUTER_MODEL { MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus", MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024", MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b", - MODEL_COHERE_COMMAND_R = "cohere/command-r", MODEL_COHERE_COMMAND = "cohere/command", + MODEL_COHERE_COMMAND_R = "cohere/command-r", MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku", MODEL_ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus", MODEL_COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024", MODEL_MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large", MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview", MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613", - MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small", MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny", + MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small", MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct", MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b", MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b", MODEL_OPENROUTER_AUTO = "openrouter/auto", MODEL_OPENAI_GPT_4_1106_PREVIEW = "openai/gpt-4-1106-preview", - MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1", MODEL_OPENAI_GPT_3_5_TURBO_INSTRUCT = "openai/gpt-3.5-turbo-instruct", - MODEL_PYGMALIONAI_MYTHALION_13B = "pygmalionai/mythalion-13b", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_1 = "mistralai/mistral-7b-instruct-v0.1", MODEL_OPENAI_GPT_3_5_TURBO_16K = "openai/gpt-3.5-turbo-16k", MODEL_MANCER_WEAVER = "mancer/weaver", MODEL_UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b", MODEL_GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b", - MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314", MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo", + MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314", MODEL_OPENAI_GPT_4 = "openai/gpt-4" } \ No newline at end of file