From ea52fc9dfe59d3b11c2750afdf023575d043fb22 Mon Sep 17 00:00:00 2001 From: babayaga Date: Fri, 4 Jul 2025 20:14:47 +0200 Subject: [PATCH] maintainence love:) --- packages/kbot/dist-in/data/openai_models.json | 2 +- .../kbot/dist-in/data/openrouter_models.json | 2330 ++++++++--------- .../dist-in/src/models/cache/openrouter.ts | 2 +- packages/kbot/dist/package-lock.json | 4 +- packages/kbot/dist/package.json | 2 +- .../models/cache/openrouter-models-free.ts | 2 - .../src/models/cache/openrouter-models.ts | 62 +- 7 files changed, 1057 insertions(+), 1347 deletions(-) diff --git a/packages/kbot/dist-in/data/openai_models.json b/packages/kbot/dist-in/data/openai_models.json index cee28afa..ed5209c3 100644 --- a/packages/kbot/dist-in/data/openai_models.json +++ b/packages/kbot/dist-in/data/openai_models.json @@ -1,5 +1,5 @@ { - "timestamp": 1751485296151, + "timestamp": 1751652874753, "models": [ { "id": "gpt-4-0613", diff --git a/packages/kbot/dist-in/data/openrouter_models.json b/packages/kbot/dist-in/data/openrouter_models.json index e0ca987c..d8bf0843 100644 --- a/packages/kbot/dist-in/data/openrouter_models.json +++ b/packages/kbot/dist-in/data/openrouter_models.json @@ -1,5 +1,5 @@ { - "timestamp": 1751485296322, + "timestamp": 1751652875009, "models": [ { "id": "openrouter/cypher-alpha:free", @@ -1087,7 +1087,9 @@ "repetition_penalty", "logprobs", "logit_bias", - "top_logprobs" + "top_logprobs", + "response_format", + "top_a" ] }, { @@ -2232,46 +2234,6 @@ "top_p" ] }, - { - "id": "opengvlab/internvl3-2b", - "canonical_slug": "opengvlab/internvl3-2b", - "hugging_face_id": "OpenGVLab/InternVL3-2B", - "name": "OpenGVLab: InternVL3 2B", - "created": 1746019807, - "description": "The 2b version of the InternVL3 series, for an even higher inference speed and very reasonable performance. An advanced multimodal large language model (MLLM) series that demonstrates superior overall performance. Compared to InternVL 2.5, InternVL3 exhibits superior multimodal perception and reasoning capabilities, while further extending its multimodal capabilities to encompass tool usage, GUI agents, industrial image analysis, 3D vision perception, and more.", - "context_length": 12288, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "image", - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Other", - "instruct_type": null - }, - "pricing": { - "prompt": "0.00000005", - "completion": "0.0000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 12288, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p" - ] - }, { "id": "deepseek/deepseek-prover-v2", "canonical_slug": "deepseek/deepseek-prover-v2", @@ -2945,55 +2907,6 @@ "top_logprobs" ] }, - { - "id": "thudm/glm-z1-rumination-32b", - "canonical_slug": "thudm/glm-z1-rumination-32b-0414", - "hugging_face_id": "THUDM/GLM-Z1-Rumination-32B-0414", - "name": "THUDM: GLM Z1 Rumination 32B ", - "created": 1745601495, - "description": "THUDM: GLM Z1 Rumination 32B is a 32B-parameter deep reasoning model from the GLM-4-Z1 series, optimized for complex, open-ended tasks requiring prolonged deliberation. It builds upon glm-4-32b-0414 with additional reinforcement learning phases and multi-stage alignment strategies, introducing “rumination” capabilities designed to emulate extended cognitive processing. This includes iterative reasoning, multi-hop analysis, and tool-augmented workflows such as search, retrieval, and citation-aware synthesis.\n\nThe model excels in research-style writing, comparative analysis, and intricate question answering. It supports function calling for search and navigation primitives (`search`, `click`, `open`, `finish`), enabling use in agent-style pipelines. Rumination behavior is governed by multi-turn loops with rule-based reward shaping and delayed decision mechanisms, benchmarked against Deep Research frameworks such as OpenAI’s internal alignment stacks. This variant is suitable for scenarios requiring depth over speed.", - "context_length": 32000, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Other", - "instruct_type": "deepseek-r1" - }, - "pricing": { - "prompt": "0.00000024", - "completion": "0.00000024", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 32000, - "max_completion_tokens": 32000, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "reasoning", - "include_reasoning", - "stop", - "frequency_penalty", - "presence_penalty", - "seed", - "top_k", - "min_p", - "repetition_penalty", - "logit_bias" - ] - }, { "id": "microsoft/mai-ds-r1:free", "canonical_slug": "microsoft/mai-ds-r1", @@ -3096,55 +3009,6 @@ "top_logprobs" ] }, - { - "id": "thudm/glm-z1-32b", - "canonical_slug": "thudm/glm-z1-32b-0414", - "hugging_face_id": "THUDM/GLM-Z1-32B-0414", - "name": "THUDM: GLM Z1 32B", - "created": 1744924148, - "description": "GLM-Z1-32B-0414 is an enhanced reasoning variant of GLM-4-32B, built for deep mathematical, logical, and code-oriented problem solving. It applies extended reinforcement learning—both task-specific and general pairwise preference-based—to improve performance on complex multi-step tasks. Compared to the base GLM-4-32B model, Z1 significantly boosts capabilities in structured reasoning and formal domains.\n\nThe model supports enforced “thinking” steps via prompt engineering and offers improved coherence for long-form outputs. It’s optimized for use in agentic workflows, and includes support for long context (via YaRN), JSON tool calling, and fine-grained sampling configuration for stable inference. Ideal for use cases requiring deliberate, multi-step reasoning or formal derivations.", - "context_length": 32000, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Other", - "instruct_type": "deepseek-r1" - }, - "pricing": { - "prompt": "0.00000024", - "completion": "0.00000024", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 32000, - "max_completion_tokens": 32000, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "reasoning", - "include_reasoning", - "stop", - "frequency_penalty", - "presence_penalty", - "seed", - "top_k", - "min_p", - "repetition_penalty", - "logit_bias" - ] - }, { "id": "thudm/glm-4-32b:free", "canonical_slug": "thudm/glm-4-32b-0414", @@ -6143,6 +6007,52 @@ "top_k" ] }, + { + "id": "anthropic/claude-3.7-sonnet:thinking", + "canonical_slug": "anthropic/claude-3-7-sonnet-20250219", + "hugging_face_id": "", + "name": "Anthropic: Claude 3.7 Sonnet (thinking)", + "created": 1740422110, + "description": "Claude 3.7 Sonnet is an advanced large language model with improved reasoning, coding, and problem-solving capabilities. It introduces a hybrid reasoning approach, allowing users to choose between rapid responses and extended, step-by-step processing for complex tasks. The model demonstrates notable improvements in coding, particularly in front-end development and full-stack updates, and excels in agentic workflows, where it can autonomously navigate multi-step processes. \n\nClaude 3.7 Sonnet maintains performance parity with its predecessor in standard mode while offering an extended reasoning mode for enhanced accuracy in math, coding, and instruction-following tasks.\n\nRead more at the [blog post here](https://www.anthropic.com/news/claude-3-7-sonnet)", + "context_length": 200000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Claude", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000003", + "completion": "0.000015", + "request": "0", + "image": "0.0048", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0.0000003", + "input_cache_write": "0.00000375" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 64000, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "stop", + "reasoning", + "include_reasoning", + "tools", + "tool_choice" + ] + }, { "id": "anthropic/claude-3.7-sonnet:beta", "canonical_slug": "anthropic/claude-3-7-sonnet-20250219", @@ -6189,52 +6099,6 @@ "tool_choice" ] }, - { - "id": "anthropic/claude-3.7-sonnet:thinking", - "canonical_slug": "anthropic/claude-3-7-sonnet-20250219", - "hugging_face_id": "", - "name": "Anthropic: Claude 3.7 Sonnet (thinking)", - "created": 1740422110, - "description": "Claude 3.7 Sonnet is an advanced large language model with improved reasoning, coding, and problem-solving capabilities. It introduces a hybrid reasoning approach, allowing users to choose between rapid responses and extended, step-by-step processing for complex tasks. The model demonstrates notable improvements in coding, particularly in front-end development and full-stack updates, and excels in agentic workflows, where it can autonomously navigate multi-step processes. \n\nClaude 3.7 Sonnet maintains performance parity with its predecessor in standard mode while offering an extended reasoning mode for enhanced accuracy in math, coding, and instruction-following tasks.\n\nRead more at the [blog post here](https://www.anthropic.com/news/claude-3-7-sonnet)", - "context_length": 200000, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000003", - "completion": "0.000015", - "request": "0", - "image": "0.0048", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0.0000003", - "input_cache_write": "0.00000375" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 128000, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "stop", - "reasoning", - "include_reasoning", - "tools", - "tool_choice" - ] - }, { "id": "perplexity/r1-1776", "canonical_slug": "perplexity/r1-1776", @@ -7223,7 +7087,7 @@ }, "pricing": { "prompt": "0.00000005", - "completion": "0.00000009", + "completion": "0.00000008", "request": "0", "image": "0", "web_search": "0", @@ -8416,7 +8280,7 @@ "name": "Meta: Llama 3.3 70B Instruct", "created": 1733506137, "description": "The Meta Llama 3.3 multilingual large language model (LLM) is a pretrained and instruction tuned generative model in 70B (text in/text out). The Llama 3.3 instruction tuned text only model is optimized for multilingual dialogue use cases and outperforms many of the available open source and closed chat models on common industry benchmarks.\n\nSupported languages: English, German, French, Italian, Portuguese, Hindi, Spanish, and Thai.\n\n[Model Card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_3/MODEL_CARD.md)", - "context_length": 131072, + "context_length": 131000, "architecture": { "modality": "text->text", "input_modalities": [ @@ -8429,7 +8293,7 @@ "instruct_type": "llama3" }, "pricing": { - "prompt": "0.000000039", + "prompt": "0.000000038", "completion": "0.00000012", "request": "0", "image": "0", @@ -8437,8 +8301,8 @@ "internal_reasoning": "0" }, "top_provider": { - "context_length": 131072, - "max_completion_tokens": 8192, + "context_length": 131000, + "max_completion_tokens": 131000, "is_moderated": false }, "per_request_limits": null, @@ -9218,98 +9082,6 @@ "logprobs" ] }, - { - "id": "anthropic/claude-3.5-haiku-20241022:beta", - "canonical_slug": "anthropic/claude-3-5-haiku-20241022", - "hugging_face_id": null, - "name": "Anthropic: Claude 3.5 Haiku (2024-10-22) (self-moderated)", - "created": 1730678400, - "description": "Claude 3.5 Haiku features enhancements across all skill sets including coding, tool use, and reasoning. As the fastest model in the Anthropic lineup, it offers rapid response times suitable for applications that require high interactivity and low latency, such as user-facing chatbots and on-the-fly code completions. It also excels in specialized tasks like data extraction and real-time content moderation, making it a versatile tool for a broad range of industries.\n\nIt does not support image inputs.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/3-5-models-and-computer-use)", - "context_length": 200000, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000008", - "completion": "0.000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0.00000008", - "input_cache_write": "0.000001" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 8192, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "tools", - "tool_choice", - "top_k", - "stop" - ] - }, - { - "id": "anthropic/claude-3.5-haiku-20241022", - "canonical_slug": "anthropic/claude-3-5-haiku-20241022", - "hugging_face_id": null, - "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)", - "created": 1730678400, - "description": "Claude 3.5 Haiku features enhancements across all skill sets including coding, tool use, and reasoning. As the fastest model in the Anthropic lineup, it offers rapid response times suitable for applications that require high interactivity and low latency, such as user-facing chatbots and on-the-fly code completions. It also excels in specialized tasks like data extraction and real-time content moderation, making it a versatile tool for a broad range of industries.\n\nIt does not support image inputs.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/3-5-models-and-computer-use)", - "context_length": 200000, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000008", - "completion": "0.000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0.00000008", - "input_cache_write": "0.000001" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 8192, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "tools", - "tool_choice", - "top_k", - "stop" - ] - }, { "id": "anthropic/claude-3.5-haiku:beta", "canonical_slug": "anthropic/claude-3-5-haiku", @@ -9403,35 +9175,38 @@ ] }, { - "id": "anthracite-org/magnum-v4-72b", - "canonical_slug": "anthracite-org/magnum-v4-72b", - "hugging_face_id": "anthracite-org/magnum-v4-72b", - "name": "Magnum v4 72B", - "created": 1729555200, - "description": "This is a series of models designed to replicate the prose quality of the Claude 3 models, specifically Sonnet(https://openrouter.ai/anthropic/claude-3.5-sonnet) and Opus(https://openrouter.ai/anthropic/claude-3-opus).\n\nThe model is fine-tuned on top of [Qwen2.5 72B](https://openrouter.ai/qwen/qwen-2.5-72b-instruct).", - "context_length": 16384, + "id": "anthropic/claude-3.5-haiku-20241022:beta", + "canonical_slug": "anthropic/claude-3-5-haiku-20241022", + "hugging_face_id": null, + "name": "Anthropic: Claude 3.5 Haiku (2024-10-22) (self-moderated)", + "created": 1730678400, + "description": "Claude 3.5 Haiku features enhancements across all skill sets including coding, tool use, and reasoning. As the fastest model in the Anthropic lineup, it offers rapid response times suitable for applications that require high interactivity and low latency, such as user-facing chatbots and on-the-fly code completions. It also excels in specialized tasks like data extraction and real-time content moderation, making it a versatile tool for a broad range of industries.\n\nIt does not support image inputs.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/3-5-models-and-computer-use)", + "context_length": 200000, "architecture": { - "modality": "text->text", + "modality": "text+image->text", "input_modalities": [ - "text" + "text", + "image" ], "output_modalities": [ "text" ], - "tokenizer": "Qwen", - "instruct_type": "chatml" + "tokenizer": "Claude", + "instruct_type": null }, "pricing": { - "prompt": "0.0000025", - "completion": "0.000003", + "prompt": "0.0000008", + "completion": "0.000004", "request": "0", "image": "0", "web_search": "0", - "internal_reasoning": "0" + "internal_reasoning": "0", + "input_cache_read": "0.00000008", + "input_cache_write": "0.000001" }, "top_provider": { - "context_length": 16384, - "max_completion_tokens": 1024, + "context_length": 200000, + "max_completion_tokens": 8192, "is_moderated": false }, "per_request_limits": null, @@ -9439,15 +9214,56 @@ "max_tokens", "temperature", "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "repetition_penalty", + "tools", + "tool_choice", "top_k", - "min_p", - "seed", - "logit_bias", - "top_a" + "stop" + ] + }, + { + "id": "anthropic/claude-3.5-haiku-20241022", + "canonical_slug": "anthropic/claude-3-5-haiku-20241022", + "hugging_face_id": null, + "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)", + "created": 1730678400, + "description": "Claude 3.5 Haiku features enhancements across all skill sets including coding, tool use, and reasoning. As the fastest model in the Anthropic lineup, it offers rapid response times suitable for applications that require high interactivity and low latency, such as user-facing chatbots and on-the-fly code completions. It also excels in specialized tasks like data extraction and real-time content moderation, making it a versatile tool for a broad range of industries.\n\nIt does not support image inputs.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/3-5-models-and-computer-use)", + "context_length": 200000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Claude", + "instruct_type": null + }, + "pricing": { + "prompt": "0.0000008", + "completion": "0.000004", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0.00000008", + "input_cache_write": "0.000001" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 8192, + "is_moderated": true + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "tools", + "tool_choice", + "top_k", + "stop" ] }, { @@ -9498,6 +9314,54 @@ "top_a" ] }, + { + "id": "anthracite-org/magnum-v4-72b", + "canonical_slug": "anthracite-org/magnum-v4-72b", + "hugging_face_id": "anthracite-org/magnum-v4-72b", + "name": "Magnum v4 72B", + "created": 1729555200, + "description": "This is a series of models designed to replicate the prose quality of the Claude 3 models, specifically Sonnet(https://openrouter.ai/anthropic/claude-3.5-sonnet) and Opus(https://openrouter.ai/anthropic/claude-3-opus).\n\nThe model is fine-tuned on top of [Qwen2.5 72B](https://openrouter.ai/qwen/qwen-2.5-72b-instruct).", + "context_length": 16384, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Qwen", + "instruct_type": "chatml" + }, + "pricing": { + "prompt": "0.0000025", + "completion": "0.000003", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 16384, + "max_completion_tokens": 1024, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "stop", + "frequency_penalty", + "presence_penalty", + "repetition_penalty", + "top_k", + "min_p", + "seed", + "logit_bias", + "top_a" + ] + }, { "id": "anthropic/claude-3.5-sonnet:beta", "canonical_slug": "anthropic/claude-3.5-sonnet", @@ -9591,13 +9455,13 @@ ] }, { - "id": "x-ai/grok-beta", - "canonical_slug": "x-ai/grok-beta", + "id": "mistralai/ministral-8b", + "canonical_slug": "mistralai/ministral-8b", "hugging_face_id": null, - "name": "xAI: Grok Beta", - "created": 1729382400, - "description": "Grok Beta is xAI's experimental language model with state-of-the-art reasoning capabilities, best for complex and multi-step use cases.\n\nIt is the successor of [Grok 2](https://x.ai/blog/grok-2) with enhanced context length.", - "context_length": 131072, + "name": "Mistral: Ministral 8B", + "created": 1729123200, + "description": "Ministral 8B is an 8B parameter model featuring a unique interleaved sliding-window attention pattern for faster, memory-efficient inference. Designed for edge use cases, it supports up to 128k context length and excels in knowledge and reasoning tasks. It outperforms peers in the sub-10B category, making it perfect for low-latency, privacy-first applications.", + "context_length": 128000, "architecture": { "modality": "text->text", "input_modalities": [ @@ -9606,19 +9470,19 @@ "output_modalities": [ "text" ], - "tokenizer": "Grok", + "tokenizer": "Mistral", "instruct_type": null }, "pricing": { - "prompt": "0.000005", - "completion": "0.000015", + "prompt": "0.0000001", + "completion": "0.0000001", "request": "0", "image": "0", "web_search": "0", "internal_reasoning": "0" }, "top_provider": { - "context_length": 131072, + "context_length": 128000, "max_completion_tokens": null, "is_moderated": false }, @@ -9632,10 +9496,9 @@ "stop", "frequency_penalty", "presence_penalty", - "seed", - "logprobs", - "top_logprobs", - "response_format" + "response_format", + "structured_outputs", + "seed" ] }, { @@ -9685,53 +9548,6 @@ "seed" ] }, - { - "id": "mistralai/ministral-8b", - "canonical_slug": "mistralai/ministral-8b", - "hugging_face_id": null, - "name": "Mistral: Ministral 8B", - "created": 1729123200, - "description": "Ministral 8B is an 8B parameter model featuring a unique interleaved sliding-window attention pattern for faster, memory-efficient inference. Designed for edge use cases, it supports up to 128k context length and excels in knowledge and reasoning tasks. It outperforms peers in the sub-10B category, making it perfect for low-latency, privacy-first applications.", - "context_length": 128000, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Mistral", - "instruct_type": null - }, - "pricing": { - "prompt": "0.0000001", - "completion": "0.0000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 128000, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "tools", - "tool_choice", - "stop", - "frequency_penalty", - "presence_penalty", - "response_format", - "structured_outputs", - "seed" - ] - }, { "id": "qwen/qwen-2.5-7b-instruct", "canonical_slug": "qwen/qwen-2.5-7b-instruct", @@ -9962,56 +9778,6 @@ "structured_outputs" ] }, - { - "id": "liquid/lfm-40b", - "canonical_slug": "liquid/lfm-40b", - "hugging_face_id": null, - "name": "Liquid: LFM 40B MoE", - "created": 1727654400, - "description": "Liquid's 40.3B Mixture of Experts (MoE) model. Liquid Foundation Models (LFMs) are large neural networks built with computational units rooted in dynamic systems.\n\nLFMs are general-purpose AI models that can be used to model any kind of sequential data, including video, audio, text, time series, and signals.\n\nSee the [launch announcement](https://www.liquid.ai/liquid-foundation-models) for benchmarks and more info.", - "context_length": 32768, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Other", - "instruct_type": "chatml" - }, - "pricing": { - "prompt": "0.00000015", - "completion": "0.00000015", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 32768, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "seed", - "top_k", - "min_p", - "repetition_penalty", - "logit_bias", - "logprobs", - "top_logprobs", - "response_format" - ] - }, { "id": "thedrummer/rocinante-12b", "canonical_slug": "thedrummer/rocinante-12b", @@ -10108,6 +9874,206 @@ "seed" ] }, + { + "id": "liquid/lfm-40b", + "canonical_slug": "liquid/lfm-40b", + "hugging_face_id": null, + "name": "Liquid: LFM 40B MoE", + "created": 1727654400, + "description": "Liquid's 40.3B Mixture of Experts (MoE) model. Liquid Foundation Models (LFMs) are large neural networks built with computational units rooted in dynamic systems.\n\nLFMs are general-purpose AI models that can be used to model any kind of sequential data, including video, audio, text, time series, and signals.\n\nSee the [launch announcement](https://www.liquid.ai/liquid-foundation-models) for benchmarks and more info.", + "context_length": 32768, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Other", + "instruct_type": "chatml" + }, + "pricing": { + "prompt": "0.00000015", + "completion": "0.00000015", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 32768, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "stop", + "frequency_penalty", + "presence_penalty", + "seed", + "top_k", + "min_p", + "repetition_penalty", + "logit_bias", + "logprobs", + "top_logprobs", + "response_format" + ] + }, + { + "id": "meta-llama/llama-3.2-3b-instruct", + "canonical_slug": "meta-llama/llama-3.2-3b-instruct", + "hugging_face_id": "meta-llama/Llama-3.2-3B-Instruct", + "name": "Meta: Llama 3.2 3B Instruct", + "created": 1727222400, + "description": "Llama 3.2 3B is a 3-billion-parameter multilingual large language model, optimized for advanced natural language processing tasks like dialogue generation, reasoning, and summarization. Designed with the latest transformer architecture, it supports eight languages, including English, Spanish, and Hindi, and is adaptable for additional languages.\n\nTrained on 9 trillion tokens, the Llama 3.2 3B model excels in instruction-following, complex reasoning, and tool use. Its balanced performance makes it ideal for applications needing accuracy and efficiency in text generation across multilingual settings.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).", + "context_length": 20000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.000000003", + "completion": "0.000000006", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 20000, + "max_completion_tokens": 20000, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "tools", + "tool_choice", + "stop", + "frequency_penalty", + "presence_penalty", + "seed", + "logit_bias", + "logprobs", + "top_logprobs", + "response_format", + "min_p", + "repetition_penalty", + "top_k" + ] + }, + { + "id": "meta-llama/llama-3.2-1b-instruct", + "canonical_slug": "meta-llama/llama-3.2-1b-instruct", + "hugging_face_id": "meta-llama/Llama-3.2-1B-Instruct", + "name": "Meta: Llama 3.2 1B Instruct", + "created": 1727222400, + "description": "Llama 3.2 1B is a 1-billion-parameter language model focused on efficiently performing natural language tasks, such as summarization, dialogue, and multilingual text analysis. Its smaller size allows it to operate efficiently in low-resource environments while maintaining strong task performance.\n\nSupporting eight core languages and fine-tunable for more, Llama 1.3B is ideal for businesses or developers seeking lightweight yet powerful AI solutions that can operate in diverse multilingual settings without the high computational demand of larger models.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).", + "context_length": 131072, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.000000005", + "completion": "0.00000001", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": 16384, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "stop", + "frequency_penalty", + "presence_penalty", + "repetition_penalty", + "response_format", + "top_k", + "seed", + "min_p", + "logit_bias", + "top_logprobs" + ] + }, + { + "id": "meta-llama/llama-3.2-90b-vision-instruct", + "canonical_slug": "meta-llama/llama-3.2-90b-vision-instruct", + "hugging_face_id": "meta-llama/Llama-3.2-90B-Vision-Instruct", + "name": "Meta: Llama 3.2 90B Vision Instruct", + "created": 1727222400, + "description": "The Llama 90B Vision model is a top-tier, 90-billion-parameter multimodal model designed for the most challenging visual reasoning and language tasks. It offers unparalleled accuracy in image captioning, visual question answering, and advanced image-text comprehension. Pre-trained on vast multimodal datasets and fine-tuned with human feedback, the Llama 90B Vision is engineered to handle the most demanding image-based AI tasks.\n\nThis model is perfect for industries requiring cutting-edge multimodal AI capabilities, particularly those dealing with complex, real-time visual and textual analysis.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD_VISION.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).", + "context_length": 131072, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.0000012", + "completion": "0.0000012", + "request": "0", + "image": "0.001734", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": 2048, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "stop", + "frequency_penalty", + "presence_penalty", + "top_k", + "repetition_penalty", + "logit_bias", + "min_p", + "response_format", + "seed" + ] + }, { "id": "meta-llama/llama-3.2-11b-vision-instruct:free", "canonical_slug": "meta-llama/llama-3.2-11b-vision-instruct", @@ -10209,205 +10175,6 @@ "logprobs" ] }, - { - "id": "meta-llama/llama-3.2-1b-instruct:free", - "canonical_slug": "meta-llama/llama-3.2-1b-instruct", - "hugging_face_id": "meta-llama/Llama-3.2-1B-Instruct", - "name": "Meta: Llama 3.2 1B Instruct (free)", - "created": 1727222400, - "description": "Llama 3.2 1B is a 1-billion-parameter language model focused on efficiently performing natural language tasks, such as summarization, dialogue, and multilingual text analysis. Its smaller size allows it to operate efficiently in low-resource environments while maintaining strong task performance.\n\nSupporting eight core languages and fine-tunable for more, Llama 1.3B is ideal for businesses or developers seeking lightweight yet powerful AI solutions that can operate in diverse multilingual settings without the high computational demand of larger models.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).", - "context_length": 131072, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0", - "completion": "0", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 131072, - "max_completion_tokens": 8192, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "seed", - "logprobs", - "top_logprobs", - "min_p", - "repetition_penalty", - "top_k", - "logit_bias" - ] - }, - { - "id": "meta-llama/llama-3.2-1b-instruct", - "canonical_slug": "meta-llama/llama-3.2-1b-instruct", - "hugging_face_id": "meta-llama/Llama-3.2-1B-Instruct", - "name": "Meta: Llama 3.2 1B Instruct", - "created": 1727222400, - "description": "Llama 3.2 1B is a 1-billion-parameter language model focused on efficiently performing natural language tasks, such as summarization, dialogue, and multilingual text analysis. Its smaller size allows it to operate efficiently in low-resource environments while maintaining strong task performance.\n\nSupporting eight core languages and fine-tunable for more, Llama 1.3B is ideal for businesses or developers seeking lightweight yet powerful AI solutions that can operate in diverse multilingual settings without the high computational demand of larger models.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).", - "context_length": 131072, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0.000000005", - "completion": "0.00000001", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 131072, - "max_completion_tokens": 16384, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "repetition_penalty", - "response_format", - "top_k", - "seed", - "min_p", - "logit_bias", - "top_logprobs" - ] - }, - { - "id": "meta-llama/llama-3.2-90b-vision-instruct", - "canonical_slug": "meta-llama/llama-3.2-90b-vision-instruct", - "hugging_face_id": "meta-llama/Llama-3.2-90B-Vision-Instruct", - "name": "Meta: Llama 3.2 90B Vision Instruct", - "created": 1727222400, - "description": "The Llama 90B Vision model is a top-tier, 90-billion-parameter multimodal model designed for the most challenging visual reasoning and language tasks. It offers unparalleled accuracy in image captioning, visual question answering, and advanced image-text comprehension. Pre-trained on vast multimodal datasets and fine-tuned with human feedback, the Llama 90B Vision is engineered to handle the most demanding image-based AI tasks.\n\nThis model is perfect for industries requiring cutting-edge multimodal AI capabilities, particularly those dealing with complex, real-time visual and textual analysis.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD_VISION.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).", - "context_length": 131072, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0.0000012", - "completion": "0.0000012", - "request": "0", - "image": "0.001734", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 131072, - "max_completion_tokens": 2048, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "top_k", - "repetition_penalty", - "logit_bias", - "min_p", - "response_format", - "seed" - ] - }, - { - "id": "meta-llama/llama-3.2-3b-instruct", - "canonical_slug": "meta-llama/llama-3.2-3b-instruct", - "hugging_face_id": "meta-llama/Llama-3.2-3B-Instruct", - "name": "Meta: Llama 3.2 3B Instruct", - "created": 1727222400, - "description": "Llama 3.2 3B is a 3-billion-parameter multilingual large language model, optimized for advanced natural language processing tasks like dialogue generation, reasoning, and summarization. Designed with the latest transformer architecture, it supports eight languages, including English, Spanish, and Hindi, and is adaptable for additional languages.\n\nTrained on 9 trillion tokens, the Llama 3.2 3B model excels in instruction-following, complex reasoning, and tool use. Its balanced performance makes it ideal for applications needing accuracy and efficiency in text generation across multilingual settings.\n\nClick here for the [original model card](https://github.com/meta-llama/llama-models/blob/main/models/llama3_2/MODEL_CARD.md).\n\nUsage of this model is subject to [Meta's Acceptable Use Policy](https://www.llama.com/llama3/use-policy/).", - "context_length": 20000, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0.000000003", - "completion": "0.000000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 20000, - "max_completion_tokens": 20000, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "tools", - "tool_choice", - "stop", - "frequency_penalty", - "presence_penalty", - "seed", - "logit_bias", - "logprobs", - "top_logprobs", - "response_format", - "min_p", - "repetition_penalty", - "top_k" - ] - }, { "id": "qwen/qwen-2.5-72b-instruct:free", "canonical_slug": "qwen/qwen-2.5-72b-instruct", @@ -10559,10 +10326,10 @@ ] }, { - "id": "openai/o1-mini", - "canonical_slug": "openai/o1-mini", + "id": "openai/o1-preview", + "canonical_slug": "openai/o1-preview", "hugging_face_id": null, - "name": "OpenAI: o1-mini", + "name": "OpenAI: o1-preview", "created": 1726099200, "description": "The latest and strongest model family from OpenAI, o1 is designed to spend more time thinking before responding.\n\nThe o1 models are optimized for math, science, programming, and other STEM-related tasks. They consistently exhibit PhD-level accuracy on benchmarks in physics, chemistry, and biology. Learn more in the [launch announcement](https://openai.com/o1).\n\nNote: This model is currently experimental and not suitable for production use-cases, and may be heavily rate-limited.", "context_length": 128000, @@ -10578,17 +10345,17 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000011", - "completion": "0.0000044", + "prompt": "0.000015", + "completion": "0.00006", "request": "0", "image": "0", "web_search": "0", "internal_reasoning": "0", - "input_cache_read": "0.00000055" + "input_cache_read": "0.0000075" }, "top_provider": { "context_length": 128000, - "max_completion_tokens": 65536, + "max_completion_tokens": 32768, "is_moderated": true }, "per_request_limits": null, @@ -10637,10 +10404,10 @@ ] }, { - "id": "openai/o1-mini-2024-09-12", - "canonical_slug": "openai/o1-mini-2024-09-12", + "id": "openai/o1-mini", + "canonical_slug": "openai/o1-mini", "hugging_face_id": null, - "name": "OpenAI: o1-mini (2024-09-12)", + "name": "OpenAI: o1-mini", "created": 1726099200, "description": "The latest and strongest model family from OpenAI, o1 is designed to spend more time thinking before responding.\n\nThe o1 models are optimized for math, science, programming, and other STEM-related tasks. They consistently exhibit PhD-level accuracy on benchmarks in physics, chemistry, and biology. Learn more in the [launch announcement](https://openai.com/o1).\n\nNote: This model is currently experimental and not suitable for production use-cases, and may be heavily rate-limited.", "context_length": 128000, @@ -10676,10 +10443,10 @@ ] }, { - "id": "openai/o1-preview", - "canonical_slug": "openai/o1-preview", + "id": "openai/o1-mini-2024-09-12", + "canonical_slug": "openai/o1-mini-2024-09-12", "hugging_face_id": null, - "name": "OpenAI: o1-preview", + "name": "OpenAI: o1-mini (2024-09-12)", "created": 1726099200, "description": "The latest and strongest model family from OpenAI, o1 is designed to spend more time thinking before responding.\n\nThe o1 models are optimized for math, science, programming, and other STEM-related tasks. They consistently exhibit PhD-level accuracy on benchmarks in physics, chemistry, and biology. Learn more in the [launch announcement](https://openai.com/o1).\n\nNote: This model is currently experimental and not suitable for production use-cases, and may be heavily rate-limited.", "context_length": 128000, @@ -10695,17 +10462,17 @@ "instruct_type": null }, "pricing": { - "prompt": "0.000015", - "completion": "0.00006", + "prompt": "0.0000011", + "completion": "0.0000044", "request": "0", "image": "0", "web_search": "0", "internal_reasoning": "0", - "input_cache_read": "0.0000075" + "input_cache_read": "0.00000055" }, "top_provider": { "context_length": 128000, - "max_completion_tokens": 32768, + "max_completion_tokens": 65536, "is_moderated": true }, "per_request_limits": null, @@ -10768,53 +10535,6 @@ "structured_outputs" ] }, - { - "id": "cohere/command-r-08-2024", - "canonical_slug": "cohere/command-r-08-2024", - "hugging_face_id": null, - "name": "Cohere: Command R (08-2024)", - "created": 1724976000, - "description": "command-r-08-2024 is an update of the [Command R](/models/cohere/command-r) with improved performance for multilingual retrieval-augmented generation (RAG) and tool use. More broadly, it is better at math, code and reasoning and is competitive with the previous version of the larger Command R+ model.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", - "context_length": 128000, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Cohere", - "instruct_type": null - }, - "pricing": { - "prompt": "0.00000015", - "completion": "0.0000006", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 128000, - "max_completion_tokens": 4000, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "tools", - "stop", - "frequency_penalty", - "presence_penalty", - "top_k", - "seed", - "response_format", - "structured_outputs" - ] - }, { "id": "cohere/command-r-plus-08-2024", "canonical_slug": "cohere/command-r-plus-08-2024", @@ -10863,13 +10583,13 @@ ] }, { - "id": "sao10k/l3.1-euryale-70b", - "canonical_slug": "sao10k/l3.1-euryale-70b", - "hugging_face_id": "Sao10K/L3.1-70B-Euryale-v2.2", - "name": "Sao10K: Llama 3.1 Euryale 70B v2.2", - "created": 1724803200, - "description": "Euryale L3.1 70B v2.2 is a model focused on creative roleplay from [Sao10k](https://ko-fi.com/sao10k). It is the successor of [Euryale L3 70B v2.1](/models/sao10k/l3-euryale-70b).", - "context_length": 32768, + "id": "cohere/command-r-08-2024", + "canonical_slug": "cohere/command-r-08-2024", + "hugging_face_id": null, + "name": "Cohere: Command R (08-2024)", + "created": 1724976000, + "description": "command-r-08-2024 is an update of the [Command R](/models/cohere/command-r) with improved performance for multilingual retrieval-augmented generation (RAG) and tool use. More broadly, it is better at math, code and reasoning and is competitive with the previous version of the larger Command R+ model.\n\nRead the launch post [here](https://docs.cohere.com/changelog/command-gets-refreshed).\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", + "context_length": 128000, "architecture": { "modality": "text->text", "input_modalities": [ @@ -10878,37 +10598,35 @@ "output_modalities": [ "text" ], - "tokenizer": "Llama3", - "instruct_type": "llama3" + "tokenizer": "Cohere", + "instruct_type": null }, "pricing": { - "prompt": "0.00000065", - "completion": "0.00000075", + "prompt": "0.00000015", + "completion": "0.0000006", "request": "0", "image": "0", "web_search": "0", "internal_reasoning": "0" }, "top_provider": { - "context_length": 32768, - "max_completion_tokens": null, - "is_moderated": false + "context_length": 128000, + "max_completion_tokens": 4000, + "is_moderated": true }, "per_request_limits": null, "supported_parameters": [ "max_tokens", "temperature", "top_p", + "tools", "stop", "frequency_penalty", "presence_penalty", - "response_format", - "structured_outputs", - "seed", "top_k", - "min_p", - "repetition_penalty", - "logit_bias" + "seed", + "response_format", + "structured_outputs" ] }, { @@ -10961,6 +10679,55 @@ "seed" ] }, + { + "id": "sao10k/l3.1-euryale-70b", + "canonical_slug": "sao10k/l3.1-euryale-70b", + "hugging_face_id": "Sao10K/L3.1-70B-Euryale-v2.2", + "name": "Sao10K: Llama 3.1 Euryale 70B v2.2", + "created": 1724803200, + "description": "Euryale L3.1 70B v2.2 is a model focused on creative roleplay from [Sao10k](https://ko-fi.com/sao10k). It is the successor of [Euryale L3 70B v2.1](/models/sao10k/l3-euryale-70b).", + "context_length": 32768, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "llama3" + }, + "pricing": { + "prompt": "0.00000065", + "completion": "0.00000075", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 32768, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "stop", + "frequency_penalty", + "presence_penalty", + "response_format", + "structured_outputs", + "seed", + "top_k", + "min_p", + "repetition_penalty", + "logit_bias" + ] + }, { "id": "microsoft/phi-3.5-mini-128k-instruct", "canonical_slug": "microsoft/phi-3.5-mini-128k-instruct", @@ -11301,52 +11068,6 @@ "structured_outputs" ] }, - { - "id": "nothingiisreal/mn-celeste-12b", - "canonical_slug": "nothingiisreal/mn-celeste-12b", - "hugging_face_id": "nothingiisreal/MN-12B-Celeste-V1.9", - "name": "Mistral Nemo 12B Celeste", - "created": 1722556800, - "description": "A specialized story writing and roleplaying model based on Mistral's NeMo 12B Instruct. Fine-tuned on curated datasets including Reddit Writing Prompts and Opus Instruct 25K.\n\nThis model excels at creative writing, offering improved NSFW capabilities, with smarter and more active narration. It demonstrates remarkable versatility in both SFW and NSFW scenarios, with strong Out of Character (OOC) steering capabilities, allowing fine-tuned control over narrative direction and character behavior.\n\nCheck out the model's [HuggingFace page](https://huggingface.co/nothingiisreal/MN-12B-Celeste-V1.9) for details on what parameters and prompts work best!", - "context_length": 16384, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Mistral", - "instruct_type": "chatml" - }, - "pricing": { - "prompt": "0.0000008", - "completion": "0.0000012", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 16384, - "max_completion_tokens": 4096, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "repetition_penalty", - "top_k", - "min_p", - "seed" - ] - }, { "id": "meta-llama/llama-3.1-405b", "canonical_slug": "meta-llama/llama-3.1-405b", @@ -11396,6 +11117,52 @@ "repetition_penalty" ] }, + { + "id": "nothingiisreal/mn-celeste-12b", + "canonical_slug": "nothingiisreal/mn-celeste-12b", + "hugging_face_id": "nothingiisreal/MN-12B-Celeste-V1.9", + "name": "Mistral Nemo 12B Celeste", + "created": 1722556800, + "description": "A specialized story writing and roleplaying model based on Mistral's NeMo 12B Instruct. Fine-tuned on curated datasets including Reddit Writing Prompts and Opus Instruct 25K.\n\nThis model excels at creative writing, offering improved NSFW capabilities, with smarter and more active narration. It demonstrates remarkable versatility in both SFW and NSFW scenarios, with strong Out of Character (OOC) steering capabilities, allowing fine-tuned control over narrative direction and character behavior.\n\nCheck out the model's [HuggingFace page](https://huggingface.co/nothingiisreal/MN-12B-Celeste-V1.9) for details on what parameters and prompts work best!", + "context_length": 16384, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Mistral", + "instruct_type": "chatml" + }, + "pricing": { + "prompt": "0.0000008", + "completion": "0.0000012", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 16384, + "max_completion_tokens": 4096, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "stop", + "frequency_penalty", + "presence_penalty", + "repetition_penalty", + "top_k", + "min_p", + "seed" + ] + }, { "id": "perplexity/llama-3.1-sonar-small-128k-online", "canonical_slug": "perplexity/llama-3.1-sonar-small-128k-online", @@ -11481,13 +11248,13 @@ ] }, { - "id": "meta-llama/llama-3.1-70b-instruct", - "canonical_slug": "meta-llama/llama-3.1-70b-instruct", - "hugging_face_id": "meta-llama/Meta-Llama-3.1-70B-Instruct", - "name": "Meta: Llama 3.1 70B Instruct", + "id": "meta-llama/llama-3.1-8b-instruct", + "canonical_slug": "meta-llama/llama-3.1-8b-instruct", + "hugging_face_id": "meta-llama/Meta-Llama-3.1-8B-Instruct", + "name": "Meta: Llama 3.1 8B Instruct", "created": 1721692800, - "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 70B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", - "context_length": 131072, + "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 8B instruct-tuned version is fast and efficient.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", + "context_length": 131000, "architecture": { "modality": "text->text", "input_modalities": [ @@ -11500,16 +11267,16 @@ "instruct_type": "llama3" }, "pricing": { - "prompt": "0.0000001", - "completion": "0.00000028", + "prompt": "0.000000015", + "completion": "0.00000002", "request": "0", "image": "0", "web_search": "0", "internal_reasoning": "0" }, "top_provider": { - "context_length": 131072, - "max_completion_tokens": 16384, + "context_length": 131000, + "max_completion_tokens": 131000, "is_moderated": false }, "per_request_limits": null, @@ -11519,17 +11286,17 @@ "top_p", "tools", "tool_choice", - "response_format", "stop", "frequency_penalty", "presence_penalty", - "repetition_penalty", - "top_k", "seed", - "min_p", "logit_bias", "logprobs", "top_logprobs", + "response_format", + "min_p", + "repetition_penalty", + "top_k", "structured_outputs" ] }, @@ -11587,12 +11354,12 @@ ] }, { - "id": "meta-llama/llama-3.1-8b-instruct:free", - "canonical_slug": "meta-llama/llama-3.1-8b-instruct", - "hugging_face_id": "meta-llama/Meta-Llama-3.1-8B-Instruct", - "name": "Meta: Llama 3.1 8B Instruct (free)", + "id": "meta-llama/llama-3.1-70b-instruct", + "canonical_slug": "meta-llama/llama-3.1-70b-instruct", + "hugging_face_id": "meta-llama/Meta-Llama-3.1-70B-Instruct", + "name": "Meta: Llama 3.1 70B Instruct", "created": 1721692800, - "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 8B instruct-tuned version is fast and efficient.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", + "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 70B instruct-tuned version is optimized for high quality dialogue usecases.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", "context_length": 131072, "architecture": { "modality": "text->text", @@ -11606,8 +11373,8 @@ "instruct_type": "llama3" }, "pricing": { - "prompt": "0", - "completion": "0", + "prompt": "0.0000001", + "completion": "0.00000028", "request": "0", "image": "0", "web_search": "0", @@ -11615,56 +11382,7 @@ }, "top_provider": { "context_length": 131072, - "max_completion_tokens": 4096, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "seed", - "logprobs", - "top_logprobs", - "min_p", - "repetition_penalty", - "top_k", - "logit_bias" - ] - }, - { - "id": "meta-llama/llama-3.1-8b-instruct", - "canonical_slug": "meta-llama/llama-3.1-8b-instruct", - "hugging_face_id": "meta-llama/Meta-Llama-3.1-8B-Instruct", - "name": "Meta: Llama 3.1 8B Instruct", - "created": 1721692800, - "description": "Meta's latest class of model (Llama 3.1) launched with a variety of sizes & flavors. This 8B instruct-tuned version is fast and efficient.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3-1/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", - "context_length": 131000, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "llama3" - }, - "pricing": { - "prompt": "0.000000016", - "completion": "0.000000021", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 131000, - "max_completion_tokens": 131000, + "max_completion_tokens": 16384, "is_moderated": false }, "per_request_limits": null, @@ -11674,17 +11392,17 @@ "top_p", "tools", "tool_choice", + "response_format", "stop", "frequency_penalty", "presence_penalty", + "repetition_penalty", + "top_k", "seed", + "min_p", "logit_bias", "logprobs", "top_logprobs", - "response_format", - "min_p", - "repetition_penalty", - "top_k", "structured_outputs" ] }, @@ -11757,7 +11475,7 @@ "instruct_type": "mistral" }, "pricing": { - "prompt": "0.00000001", + "prompt": "0.000000008", "completion": "0.000000001", "request": "0", "image": "0", @@ -11766,7 +11484,7 @@ }, "top_provider": { "context_length": 131072, - "max_completion_tokens": 16384, + "max_completion_tokens": 131072, "is_moderated": false }, "per_request_limits": null, @@ -11790,60 +11508,6 @@ "structured_outputs" ] }, - { - "id": "openai/gpt-4o-mini-2024-07-18", - "canonical_slug": "openai/gpt-4o-mini-2024-07-18", - "hugging_face_id": null, - "name": "OpenAI: GPT-4o-mini (2024-07-18)", - "created": 1721260800, - "description": "GPT-4o mini is OpenAI's newest model after [GPT-4 Omni](/models/openai/gpt-4o), supporting both text and image inputs with text outputs.\n\nAs their most advanced small model, it is many multiples more affordable than other recent frontier models, and more than 60% cheaper than [GPT-3.5 Turbo](/models/openai/gpt-3.5-turbo). It maintains SOTA intelligence, while being significantly more cost-effective.\n\nGPT-4o mini achieves an 82% score on MMLU and presently ranks higher than GPT-4 on chat preferences [common leaderboards](https://arena.lmsys.org/).\n\nCheck out the [launch announcement](https://openai.com/index/gpt-4o-mini-advancing-cost-efficient-intelligence/) to learn more.\n\n#multimodal", - "context_length": 128000, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image", - "file" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "GPT", - "instruct_type": null - }, - "pricing": { - "prompt": "0.00000015", - "completion": "0.0000006", - "request": "0", - "image": "0.007225", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0.000000075" - }, - "top_provider": { - "context_length": 128000, - "max_completion_tokens": 16384, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "tools", - "tool_choice", - "stop", - "frequency_penalty", - "presence_penalty", - "web_search_options", - "seed", - "logit_bias", - "logprobs", - "top_logprobs", - "response_format", - "structured_outputs" - ] - }, { "id": "openai/gpt-4o-mini", "canonical_slug": "openai/gpt-4o-mini", @@ -11898,6 +11562,60 @@ "tool_choice" ] }, + { + "id": "openai/gpt-4o-mini-2024-07-18", + "canonical_slug": "openai/gpt-4o-mini-2024-07-18", + "hugging_face_id": null, + "name": "OpenAI: GPT-4o-mini (2024-07-18)", + "created": 1721260800, + "description": "GPT-4o mini is OpenAI's newest model after [GPT-4 Omni](/models/openai/gpt-4o), supporting both text and image inputs with text outputs.\n\nAs their most advanced small model, it is many multiples more affordable than other recent frontier models, and more than 60% cheaper than [GPT-3.5 Turbo](/models/openai/gpt-3.5-turbo). It maintains SOTA intelligence, while being significantly more cost-effective.\n\nGPT-4o mini achieves an 82% score on MMLU and presently ranks higher than GPT-4 on chat preferences [common leaderboards](https://arena.lmsys.org/).\n\nCheck out the [launch announcement](https://openai.com/index/gpt-4o-mini-advancing-cost-efficient-intelligence/) to learn more.\n\n#multimodal", + "context_length": 128000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image", + "file" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "GPT", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000015", + "completion": "0.0000006", + "request": "0", + "image": "0.007225", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0.000000075" + }, + "top_provider": { + "context_length": 128000, + "max_completion_tokens": 16384, + "is_moderated": true + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "tools", + "tool_choice", + "stop", + "frequency_penalty", + "presence_penalty", + "web_search_options", + "seed", + "logit_bias", + "logprobs", + "top_logprobs", + "response_format", + "structured_outputs" + ] + }, { "id": "google/gemma-2-27b-it", "canonical_slug": "google/gemma-2-27b-it", @@ -12369,107 +12087,6 @@ "response_format" ] }, - { - "id": "mistralai/mistral-7b-instruct-v0.3", - "canonical_slug": "mistralai/mistral-7b-instruct-v0.3", - "hugging_face_id": "mistralai/Mistral-7B-Instruct-v0.3", - "name": "Mistral: Mistral 7B Instruct v0.3", - "created": 1716768000, - "description": "A high-performing, industry-standard 7.3B parameter model, with optimizations for speed and context length.\n\nAn improved version of [Mistral 7B Instruct v0.2](/models/mistralai/mistral-7b-instruct-v0.2), with the following changes:\n\n- Extended vocabulary to 32768\n- Supports v3 Tokenizer\n- Supports function calling\n\nNOTE: Support for function calling depends on the provider.", - "context_length": 32768, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Mistral", - "instruct_type": "mistral" - }, - "pricing": { - "prompt": "0.000000028", - "completion": "0.000000054", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 32768, - "max_completion_tokens": 16384, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "top_k", - "repetition_penalty", - "logit_bias", - "min_p", - "response_format", - "seed", - "tools", - "tool_choice", - "logprobs" - ] - }, - { - "id": "nousresearch/hermes-2-pro-llama-3-8b", - "canonical_slug": "nousresearch/hermes-2-pro-llama-3-8b", - "hugging_face_id": "NousResearch/Hermes-2-Pro-Llama-3-8B", - "name": "NousResearch: Hermes 2 Pro - Llama-3 8B", - "created": 1716768000, - "description": "Hermes 2 Pro is an upgraded, retrained version of Nous Hermes 2, consisting of an updated and cleaned version of the OpenHermes 2.5 Dataset, as well as a newly introduced Function Calling and JSON Mode dataset developed in-house.", - "context_length": 131072, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "chatml" - }, - "pricing": { - "prompt": "0.000000025", - "completion": "0.00000004", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 131072, - "max_completion_tokens": 131072, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "seed", - "logit_bias", - "logprobs", - "top_logprobs", - "response_format", - "min_p", - "repetition_penalty", - "top_k" - ] - }, { "id": "mistralai/mistral-7b-instruct:free", "canonical_slug": "mistralai/mistral-7b-instruct", @@ -12570,6 +12187,107 @@ "tool_choice" ] }, + { + "id": "nousresearch/hermes-2-pro-llama-3-8b", + "canonical_slug": "nousresearch/hermes-2-pro-llama-3-8b", + "hugging_face_id": "NousResearch/Hermes-2-Pro-Llama-3-8B", + "name": "NousResearch: Hermes 2 Pro - Llama-3 8B", + "created": 1716768000, + "description": "Hermes 2 Pro is an upgraded, retrained version of Nous Hermes 2, consisting of an updated and cleaned version of the OpenHermes 2.5 Dataset, as well as a newly introduced Function Calling and JSON Mode dataset developed in-house.", + "context_length": 131072, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "chatml" + }, + "pricing": { + "prompt": "0.000000025", + "completion": "0.00000004", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 131072, + "max_completion_tokens": 131072, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "stop", + "frequency_penalty", + "presence_penalty", + "seed", + "logit_bias", + "logprobs", + "top_logprobs", + "response_format", + "min_p", + "repetition_penalty", + "top_k" + ] + }, + { + "id": "mistralai/mistral-7b-instruct-v0.3", + "canonical_slug": "mistralai/mistral-7b-instruct-v0.3", + "hugging_face_id": "mistralai/Mistral-7B-Instruct-v0.3", + "name": "Mistral: Mistral 7B Instruct v0.3", + "created": 1716768000, + "description": "A high-performing, industry-standard 7.3B parameter model, with optimizations for speed and context length.\n\nAn improved version of [Mistral 7B Instruct v0.2](/models/mistralai/mistral-7b-instruct-v0.2), with the following changes:\n\n- Extended vocabulary to 32768\n- Supports v3 Tokenizer\n- Supports function calling\n\nNOTE: Support for function calling depends on the provider.", + "context_length": 32768, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Mistral", + "instruct_type": "mistral" + }, + "pricing": { + "prompt": "0.000000028", + "completion": "0.000000054", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 32768, + "max_completion_tokens": 16384, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "stop", + "frequency_penalty", + "presence_penalty", + "top_k", + "repetition_penalty", + "logit_bias", + "min_p", + "response_format", + "seed", + "tools", + "tool_choice", + "logprobs" + ] + }, { "id": "microsoft/phi-3-mini-128k-instruct", "canonical_slug": "microsoft/phi-3-mini-128k-instruct", @@ -12748,53 +12466,6 @@ "structured_outputs" ] }, - { - "id": "meta-llama/llama-guard-2-8b", - "canonical_slug": "meta-llama/llama-guard-2-8b", - "hugging_face_id": "meta-llama/Meta-Llama-Guard-2-8B", - "name": "Meta: LlamaGuard 2 8B", - "created": 1715558400, - "description": "This safeguard model has 8B parameters and is based on the Llama 3 family. Just like is predecessor, [LlamaGuard 1](https://huggingface.co/meta-llama/LlamaGuard-7b), it can do both prompt and response classification.\n\nLlamaGuard 2 acts as a normal LLM would, generating text that indicates whether the given input/output is safe/unsafe. If deemed unsafe, it will also share the content categories violated.\n\nFor best results, please use raw prompt input or the `/completions` endpoint, instead of the chat API.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", - "context_length": 8192, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Llama3", - "instruct_type": "none" - }, - "pricing": { - "prompt": "0.0000002", - "completion": "0.0000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 8192, - "max_completion_tokens": null, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "top_k", - "repetition_penalty", - "logit_bias", - "min_p", - "response_format" - ] - }, { "id": "openai/gpt-4o", "canonical_slug": "openai/gpt-4o", @@ -12902,6 +12573,53 @@ "structured_outputs" ] }, + { + "id": "meta-llama/llama-guard-2-8b", + "canonical_slug": "meta-llama/llama-guard-2-8b", + "hugging_face_id": "meta-llama/Meta-Llama-Guard-2-8B", + "name": "Meta: LlamaGuard 2 8B", + "created": 1715558400, + "description": "This safeguard model has 8B parameters and is based on the Llama 3 family. Just like is predecessor, [LlamaGuard 1](https://huggingface.co/meta-llama/LlamaGuard-7b), it can do both prompt and response classification.\n\nLlamaGuard 2 acts as a normal LLM would, generating text that indicates whether the given input/output is safe/unsafe. If deemed unsafe, it will also share the content categories violated.\n\nFor best results, please use raw prompt input or the `/completions` endpoint, instead of the chat API.\n\nIt has demonstrated strong performance compared to leading closed-source models in human evaluations.\n\nTo read more about the model release, [click here](https://ai.meta.com/blog/meta-llama-3/). Usage of this model is subject to [Meta's Acceptable Use Policy](https://llama.meta.com/llama3/use-policy/).", + "context_length": 8192, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Llama3", + "instruct_type": "none" + }, + "pricing": { + "prompt": "0.0000002", + "completion": "0.0000002", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 8192, + "max_completion_tokens": null, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "stop", + "frequency_penalty", + "presence_penalty", + "top_k", + "repetition_penalty", + "logit_bias", + "min_p", + "response_format" + ] + }, { "id": "openai/gpt-4o-2024-05-13", "canonical_slug": "openai/gpt-4o-2024-05-13", @@ -13254,6 +12972,54 @@ "response_format" ] }, + { + "id": "google/gemini-pro-1.5", + "canonical_slug": "google/gemini-pro-1.5", + "hugging_face_id": null, + "name": "Google: Gemini 1.5 Pro", + "created": 1712620800, + "description": "Google's latest multimodal model, supports image and video[0] in text or chat prompts.\n\nOptimized for language tasks including:\n\n- Code generation\n- Text generation\n- Text editing\n- Problem solving\n- Recommendations\n- Information extraction\n- Data extraction or generation\n- AI agents\n\nUsage of Gemini is subject to Google's [Gemini Terms of Use](https://ai.google.dev/terms).\n\n* [0]: Video input is not available through OpenRouter at this time.", + "context_length": 2000000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Gemini", + "instruct_type": null + }, + "pricing": { + "prompt": "0.00000125", + "completion": "0.000005", + "request": "0", + "image": "0.0006575", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 2000000, + "max_completion_tokens": 8192, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "stop", + "frequency_penalty", + "presence_penalty", + "tools", + "tool_choice", + "seed", + "response_format", + "structured_outputs" + ] + }, { "id": "openai/gpt-4-turbo", "canonical_slug": "openai/gpt-4-turbo", @@ -13304,54 +13070,6 @@ "response_format" ] }, - { - "id": "google/gemini-pro-1.5", - "canonical_slug": "google/gemini-pro-1.5", - "hugging_face_id": null, - "name": "Google: Gemini 1.5 Pro", - "created": 1712620800, - "description": "Google's latest multimodal model, supports image and video[0] in text or chat prompts.\n\nOptimized for language tasks including:\n\n- Code generation\n- Text generation\n- Text editing\n- Problem solving\n- Recommendations\n- Information extraction\n- Data extraction or generation\n- AI agents\n\nUsage of Gemini is subject to Google's [Gemini Terms of Use](https://ai.google.dev/terms).\n\n* [0]: Video input is not available through OpenRouter at this time.", - "context_length": 2000000, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Gemini", - "instruct_type": null - }, - "pricing": { - "prompt": "0.00000125", - "completion": "0.000005", - "request": "0", - "image": "0.0006575", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 2000000, - "max_completion_tokens": 8192, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "tools", - "tool_choice", - "seed", - "response_format", - "structured_outputs" - ] - }, { "id": "cohere/command-r-plus", "canonical_slug": "cohere/command-r-plus", @@ -13493,6 +13211,52 @@ "logit_bias" ] }, + { + "id": "cohere/command", + "canonical_slug": "cohere/command", + "hugging_face_id": null, + "name": "Cohere: Command", + "created": 1710374400, + "description": "Command is an instruction-following conversational model that performs language tasks with high quality, more reliably and with a longer context than our base generative models.\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", + "context_length": 4096, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Cohere", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000001", + "completion": "0.000002", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 4096, + "max_completion_tokens": 4000, + "is_moderated": true + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "stop", + "frequency_penalty", + "presence_penalty", + "top_k", + "seed", + "response_format", + "structured_outputs" + ] + }, { "id": "cohere/command-r", "canonical_slug": "cohere/command-r", @@ -13540,52 +13304,6 @@ "structured_outputs" ] }, - { - "id": "cohere/command", - "canonical_slug": "cohere/command", - "hugging_face_id": null, - "name": "Cohere: Command", - "created": 1710374400, - "description": "Command is an instruction-following conversational model that performs language tasks with high quality, more reliably and with a longer context than our base generative models.\n\nUse of this model is subject to Cohere's [Usage Policy](https://docs.cohere.com/docs/usage-policy) and [SaaS Agreement](https://cohere.com/saas-agreement).", - "context_length": 4096, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Cohere", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000001", - "completion": "0.000002", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 4096, - "max_completion_tokens": 4000, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "stop", - "frequency_penalty", - "presence_penalty", - "top_k", - "seed", - "response_format", - "structured_outputs" - ] - }, { "id": "anthropic/claude-3-haiku:beta", "canonical_slug": "anthropic/claude-3-haiku", @@ -13678,98 +13396,6 @@ "stop" ] }, - { - "id": "anthropic/claude-3-sonnet:beta", - "canonical_slug": "anthropic/claude-3-sonnet", - "hugging_face_id": null, - "name": "Anthropic: Claude 3 Sonnet (self-moderated)", - "created": 1709596800, - "description": "Claude 3 Sonnet is an ideal balance of intelligence and speed for enterprise workloads. Maximum utility at a lower price, dependable, balanced for scaled deployments.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/claude-3-family)\n\n#multimodal", - "context_length": 200000, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000003", - "completion": "0.000015", - "request": "0", - "image": "0.0048", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0.0000003", - "input_cache_write": "0.00000375" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 4096, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "tools", - "tool_choice", - "top_k", - "stop" - ] - }, - { - "id": "anthropic/claude-3-sonnet", - "canonical_slug": "anthropic/claude-3-sonnet", - "hugging_face_id": null, - "name": "Anthropic: Claude 3 Sonnet", - "created": 1709596800, - "description": "Claude 3 Sonnet is an ideal balance of intelligence and speed for enterprise workloads. Maximum utility at a lower price, dependable, balanced for scaled deployments.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/claude-3-family)\n\n#multimodal", - "context_length": 200000, - "architecture": { - "modality": "text+image->text", - "input_modalities": [ - "text", - "image" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000003", - "completion": "0.000015", - "request": "0", - "image": "0.0048", - "web_search": "0", - "internal_reasoning": "0", - "input_cache_read": "0.0000003", - "input_cache_write": "0.00000375" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 4096, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "tools", - "tool_choice", - "top_k", - "stop" - ] - }, { "id": "anthropic/claude-3-opus:beta", "canonical_slug": "anthropic/claude-3-opus", @@ -13862,6 +13488,98 @@ "stop" ] }, + { + "id": "anthropic/claude-3-sonnet:beta", + "canonical_slug": "anthropic/claude-3-sonnet", + "hugging_face_id": null, + "name": "Anthropic: Claude 3 Sonnet (self-moderated)", + "created": 1709596800, + "description": "Claude 3 Sonnet is an ideal balance of intelligence and speed for enterprise workloads. Maximum utility at a lower price, dependable, balanced for scaled deployments.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/claude-3-family)\n\n#multimodal", + "context_length": 200000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Claude", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000003", + "completion": "0.000015", + "request": "0", + "image": "0.0048", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0.0000003", + "input_cache_write": "0.00000375" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 4096, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "tools", + "tool_choice", + "top_k", + "stop" + ] + }, + { + "id": "anthropic/claude-3-sonnet", + "canonical_slug": "anthropic/claude-3-sonnet", + "hugging_face_id": null, + "name": "Anthropic: Claude 3 Sonnet", + "created": 1709596800, + "description": "Claude 3 Sonnet is an ideal balance of intelligence and speed for enterprise workloads. Maximum utility at a lower price, dependable, balanced for scaled deployments.\n\nSee the launch announcement and benchmark results [here](https://www.anthropic.com/news/claude-3-family)\n\n#multimodal", + "context_length": 200000, + "architecture": { + "modality": "text+image->text", + "input_modalities": [ + "text", + "image" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Claude", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000003", + "completion": "0.000015", + "request": "0", + "image": "0.0048", + "web_search": "0", + "internal_reasoning": "0", + "input_cache_read": "0.0000003", + "input_cache_write": "0.00000375" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 4096, + "is_moderated": true + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "tools", + "tool_choice", + "top_k", + "stop" + ] + }, { "id": "cohere/command-r-03-2024", "canonical_slug": "cohere/command-r-03-2024", @@ -14104,12 +13822,12 @@ ] }, { - "id": "mistralai/mistral-tiny", - "canonical_slug": "mistralai/mistral-tiny", + "id": "mistralai/mistral-small", + "canonical_slug": "mistralai/mistral-small", "hugging_face_id": null, - "name": "Mistral Tiny", + "name": "Mistral Small", "created": 1704844800, - "description": "Note: This model is being deprecated. Recommended replacement is the newer [Ministral 8B](/mistral/ministral-8b)\n\nThis model is currently powered by Mistral-7B-v0.2, and incorporates a \"better\" fine-tuning than [Mistral 7B](/models/mistralai/mistral-7b-instruct-v0.1), inspired by community work. It's best used for large batch processing tasks where cost is a significant factor but reasoning capabilities are not crucial.", + "description": "With 22 billion parameters, Mistral Small v24.09 offers a convenient mid-point between (Mistral NeMo 12B)[/mistralai/mistral-nemo] and (Mistral Large 2)[/mistralai/mistral-large], providing a cost-effective solution that can be deployed across various platforms and environments. It has better reasoning, exhibits more capabilities, can produce and reason about code, and is multiligual, supporting English, French, German, Italian, and Spanish.", "context_length": 32768, "architecture": { "modality": "text->text", @@ -14123,8 +13841,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.00000025", - "completion": "0.00000025", + "prompt": "0.0000002", + "completion": "0.0000006", "request": "0", "image": "0", "web_search": "0", @@ -14151,12 +13869,12 @@ ] }, { - "id": "mistralai/mistral-small", - "canonical_slug": "mistralai/mistral-small", + "id": "mistralai/mistral-tiny", + "canonical_slug": "mistralai/mistral-tiny", "hugging_face_id": null, - "name": "Mistral Small", + "name": "Mistral Tiny", "created": 1704844800, - "description": "With 22 billion parameters, Mistral Small v24.09 offers a convenient mid-point between (Mistral NeMo 12B)[/mistralai/mistral-nemo] and (Mistral Large 2)[/mistralai/mistral-large], providing a cost-effective solution that can be deployed across various platforms and environments. It has better reasoning, exhibits more capabilities, can produce and reason about code, and is multiligual, supporting English, French, German, Italian, and Spanish.", + "description": "Note: This model is being deprecated. Recommended replacement is the newer [Ministral 8B](/mistral/ministral-8b)\n\nThis model is currently powered by Mistral-7B-v0.2, and incorporates a \"better\" fine-tuning than [Mistral 7B](/models/mistralai/mistral-7b-instruct-v0.1), inspired by community work. It's best used for large batch processing tasks where cost is a significant factor but reasoning capabilities are not crucial.", "context_length": 32768, "architecture": { "modality": "text->text", @@ -14170,8 +13888,8 @@ "instruct_type": null }, "pricing": { - "prompt": "0.0000002", - "completion": "0.0000006", + "prompt": "0.00000025", + "completion": "0.00000025", "request": "0", "image": "0", "web_search": "0", @@ -14342,88 +14060,6 @@ "top_a" ] }, - { - "id": "anthropic/claude-2:beta", - "canonical_slug": "anthropic/claude-2", - "hugging_face_id": null, - "name": "Anthropic: Claude v2 (self-moderated)", - "created": 1700611200, - "description": "Claude 2 delivers advancements in key capabilities for enterprises—including an industry-leading 200K token context window, significant reductions in rates of model hallucination, system prompts and a new beta feature: tool use.", - "context_length": 200000, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000008", - "completion": "0.000024", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 4096, - "is_moderated": false - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "top_k", - "stop" - ] - }, - { - "id": "anthropic/claude-2", - "canonical_slug": "anthropic/claude-2", - "hugging_face_id": null, - "name": "Anthropic: Claude v2", - "created": 1700611200, - "description": "Claude 2 delivers advancements in key capabilities for enterprises—including an industry-leading 200K token context window, significant reductions in rates of model hallucination, system prompts and a new beta feature: tool use.", - "context_length": 200000, - "architecture": { - "modality": "text->text", - "input_modalities": [ - "text" - ], - "output_modalities": [ - "text" - ], - "tokenizer": "Claude", - "instruct_type": null - }, - "pricing": { - "prompt": "0.000008", - "completion": "0.000024", - "request": "0", - "image": "0", - "web_search": "0", - "internal_reasoning": "0" - }, - "top_provider": { - "context_length": 200000, - "max_completion_tokens": 4096, - "is_moderated": true - }, - "per_request_limits": null, - "supported_parameters": [ - "max_tokens", - "temperature", - "top_p", - "top_k", - "stop" - ] - }, { "id": "anthropic/claude-2.1:beta", "canonical_slug": "anthropic/claude-2.1", @@ -14506,6 +14142,88 @@ "stop" ] }, + { + "id": "anthropic/claude-2:beta", + "canonical_slug": "anthropic/claude-2", + "hugging_face_id": null, + "name": "Anthropic: Claude v2 (self-moderated)", + "created": 1700611200, + "description": "Claude 2 delivers advancements in key capabilities for enterprises—including an industry-leading 200K token context window, significant reductions in rates of model hallucination, system prompts and a new beta feature: tool use.", + "context_length": 200000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Claude", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000008", + "completion": "0.000024", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 4096, + "is_moderated": false + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "top_k", + "stop" + ] + }, + { + "id": "anthropic/claude-2", + "canonical_slug": "anthropic/claude-2", + "hugging_face_id": null, + "name": "Anthropic: Claude v2", + "created": 1700611200, + "description": "Claude 2 delivers advancements in key capabilities for enterprises—including an industry-leading 200K token context window, significant reductions in rates of model hallucination, system prompts and a new beta feature: tool use.", + "context_length": 200000, + "architecture": { + "modality": "text->text", + "input_modalities": [ + "text" + ], + "output_modalities": [ + "text" + ], + "tokenizer": "Claude", + "instruct_type": null + }, + "pricing": { + "prompt": "0.000008", + "completion": "0.000024", + "request": "0", + "image": "0", + "web_search": "0", + "internal_reasoning": "0" + }, + "top_provider": { + "context_length": 200000, + "max_completion_tokens": 4096, + "is_moderated": true + }, + "per_request_limits": null, + "supported_parameters": [ + "max_tokens", + "temperature", + "top_p", + "top_k", + "stop" + ] + }, { "id": "undi95/toppy-m-7b", "canonical_slug": "undi95/toppy-m-7b", diff --git a/packages/kbot/dist-in/src/models/cache/openrouter.ts b/packages/kbot/dist-in/src/models/cache/openrouter.ts index e4c48f51..057159c5 100644 --- a/packages/kbot/dist-in/src/models/cache/openrouter.ts +++ b/packages/kbot/dist-in/src/models/cache/openrouter.ts @@ -1 +1 @@ -export const models = [{"id":"openrouter/cypher-alpha:free","name":"Cypher Alpha (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751336087,"top_provider":{"context_length":1000000,"max_completion_tokens":10000,"is_moderated":false}},{"id":"baidu/ernie-4.5-300b-a47b","name":"Baidu: ERNIE 4.5 300B A47B ","pricing":{"prompt":"0.00000028","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300139,"top_provider":{"context_length":123000,"max_completion_tokens":12000,"is_moderated":false}},{"id":"thedrummer/anubis-70b-v1.1","name":"TheDrummer: Anubis 70B V1.1","pricing":{"prompt":"0.0000003","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751208347,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"inception/mercury","name":"Inception: Mercury","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750973026,"top_provider":{"context_length":32000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"morph/morph-v2","name":"Morph: Fast Apply","pricing":{"prompt":"0.0000012","completion":"0.0000027","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750946108,"top_provider":{"context_length":32000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct:free","name":"Mistral: Mistral Small 3.2 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct","name":"Mistral: Mistral Small 3.2 24B","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"minimax/minimax-m1","name":"MiniMax: MiniMax M1","pricing":{"prompt":"0.0000003","completion":"0.00000165","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750200414,"top_provider":{"context_length":1000000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite-preview-06-17","name":"Google: Gemini 2.5 Flash Lite Preview 06-17","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750173831,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-flash","name":"Google: Gemini 2.5 Flash","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075","input_cache_write":"0.0000003833"},"created":1750172488,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-pro","name":"Google: Gemini 2.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1750169544,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b:free","name":"Kimi Dev 72b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o3-pro","name":"OpenAI: o3 Pro","pricing":{"prompt":"0.00002","completion":"0.00008","request":"0","image":"0.0153","web_search":"0","internal_reasoning":"0"},"created":1749598352,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"x-ai/grok-3-mini","name":"xAI: Grok 3 Mini","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1749583245,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3","name":"xAI: Grok 3","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1749582908,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/magistral-small-2506","name":"Mistral: Magistral Small 2506","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749569561,"top_provider":{"context_length":40000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506","name":"Mistral: Magistral Medium 2506","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506:thinking","name":"Mistral: Magistral Medium 2506 (thinking)","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview","name":"Google: Gemini 2.5 Pro Preview 06-05","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1749137257,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-7b","name":"DeepSeek: R1 Distill Qwen 7B","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748628237,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b:free","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B","pricing":{"prompt":"0.00000001","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":32000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528:free","name":"DeepSeek: R1 0528 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528","name":"DeepSeek: R1 0528","pricing":{"prompt":"0.0000005","completion":"0.00000215","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":128000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"sarvamai/sarvam-m:free","name":"Sarvam AI: Sarvam-M (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748188413,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"thedrummer/valkyrie-49b-v1","name":"TheDrummer: Valkyrie 49B V1","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748022670,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"anthropic/claude-opus-4","name":"Anthropic: Claude Opus 4","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1747931245,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"anthropic/claude-sonnet-4","name":"Anthropic: Claude Sonnet 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1747930371,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"mistralai/devstral-small:free","name":"Mistral: Devstral Small (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small","name":"Mistral: Devstral Small","pricing":{"prompt":"0.00000006","completion":"0.00000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it:free","name":"Google: Gemma 3n 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it","name":"Google: Gemma 3n 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-flash-preview-05-20","name":"Google: Gemini 2.5 Flash Preview 05-20","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.0006192","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000000375","input_cache_write":"0.0000002333"},"created":1747761924,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-flash-preview-05-20:thinking","name":"Google: Gemini 2.5 Flash Preview 05-20 (thinking)","pricing":{"prompt":"0.00000015","completion":"0.0000035","request":"0","image":"0.0006192","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000000375","input_cache_write":"0.0000002333"},"created":1747761924,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"openai/codex-mini","name":"OpenAI: Codex Mini","pricing":{"prompt":"0.0000015","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000375"},"created":1747409761,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-medium-3","name":"Mistral: Mistral Medium 3","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746627341,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview-05-06","name":"Google: Gemini 2.5 Pro Preview 05-06","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1746578513,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"arcee-ai/caller-large","name":"Arcee AI: Caller Large","pricing":{"prompt":"0.00000055","completion":"0.00000085","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746487869,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"arcee-ai/spotlight","name":"Arcee AI: Spotlight","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481552,"top_provider":{"context_length":131072,"max_completion_tokens":65537,"is_moderated":false}},{"id":"arcee-ai/maestro-reasoning","name":"Arcee AI: Maestro Reasoning","pricing":{"prompt":"0.0000009","completion":"0.0000033","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481269,"top_provider":{"context_length":131072,"max_completion_tokens":32000,"is_moderated":false}},{"id":"arcee-ai/virtuoso-large","name":"Arcee AI: Virtuoso Large","pricing":{"prompt":"0.00000075","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478885,"top_provider":{"context_length":131072,"max_completion_tokens":64000,"is_moderated":false}},{"id":"arcee-ai/coder-large","name":"Arcee AI: Coder Large","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478663,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"arcee-ai/virtuoso-medium-v2","name":"Arcee AI: Virtuoso Medium V2","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478434,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"arcee-ai/arcee-blitz","name":"Arcee AI: Arcee Blitz","pricing":{"prompt":"0.00000045","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746470100,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4-reasoning-plus","name":"Microsoft: Phi 4 Reasoning Plus","pricing":{"prompt":"0.00000007","completion":"0.00000035","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746130961,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"inception/mercury-coder","name":"Inception: Mercury Coder","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746033880,"top_provider":{"context_length":32000,"max_completion_tokens":null,"is_moderated":false}},{"id":"opengvlab/internvl3-14b","name":"OpenGVLab: InternVL3 14B","pricing":{"prompt":"0.0000002","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746021355,"top_provider":{"context_length":12288,"max_completion_tokens":null,"is_moderated":false}},{"id":"opengvlab/internvl3-2b","name":"OpenGVLab: InternVL3 2B","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746019807,"top_provider":{"context_length":12288,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-prover-v2","name":"DeepSeek: DeepSeek Prover V2","pricing":{"prompt":"0.0000005","completion":"0.00000218","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746013094,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-4-12b","name":"Meta: Llama Guard 4 12B","pricing":{"prompt":"0.00000005","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745975193,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b:free","name":"Qwen: Qwen3 30B A3B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b","name":"Qwen: Qwen3 30B A3B","pricing":{"prompt":"0.00000008","completion":"0.00000029","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-8b:free","name":"Qwen: Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-8b","name":"Qwen: Qwen3 8B","pricing":{"prompt":"0.000000035","completion":"0.000000138","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":128000,"max_completion_tokens":20000,"is_moderated":false}},{"id":"qwen/qwen3-14b:free","name":"Qwen: Qwen3 14B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-14b","name":"Qwen: Qwen3 14B","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-32b:free","name":"Qwen: Qwen3 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875945,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-32b","name":"Qwen: Qwen3 32B","pricing":{"prompt":"0.0000001","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875945,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b:free","name":"Qwen: Qwen3 235B A22B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b","name":"Qwen: Qwen3 235B A22B","pricing":{"prompt":"0.00000013","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera:free","name":"TNG: DeepSeek R1T Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-z1-rumination-32b","name":"THUDM: GLM Z1 Rumination 32B ","pricing":{"prompt":"0.00000024","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745601495,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"microsoft/mai-ds-r1:free","name":"Microsoft: MAI DS R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745194100,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-z1-32b:free","name":"THUDM: GLM Z1 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744924148,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-z1-32b","name":"THUDM: GLM Z1 32B","pricing":{"prompt":"0.00000024","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744924148,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"thudm/glm-4-32b:free","name":"THUDM: GLM 4 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744920915,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-4-32b","name":"THUDM: GLM 4 32B","pricing":{"prompt":"0.00000024","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744920915,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"google/gemini-2.5-flash-preview","name":"Google: Gemini 2.5 Flash Preview 04-17","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.0006192","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000000375","input_cache_write":"0.0000002333"},"created":1744914667,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-flash-preview:thinking","name":"Google: Gemini 2.5 Flash Preview 04-17 (thinking)","pricing":{"prompt":"0.00000015","completion":"0.0000035","request":"0","image":"0.0006192","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000000375","input_cache_write":"0.0000002333"},"created":1744914667,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"openai/o4-mini-high","name":"OpenAI: o4 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744824212,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o3","name":"OpenAI: o3","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0.00153","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744823457,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o4-mini","name":"OpenAI: o4 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744820942,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"shisa-ai/shisa-v2-llama3.3-70b:free","name":"Shisa AI: Shisa V2 Llama 3.3 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744754858,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4.1","name":"OpenAI: GPT-4.1","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744651385,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-mini","name":"OpenAI: GPT-4.1 Mini","pricing":{"prompt":"0.0000004","completion":"0.0000016","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000001"},"created":1744651381,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-nano","name":"OpenAI: GPT-4.1 Nano","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025"},"created":1744651369,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"eleutherai/llemma_7b","name":"EleutherAI: Llemma 7b","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744643225,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"alfredpros/codellama-7b-instruct-solidity","name":"AlfredPros: CodeLLaMa 7B Instruct Solidity","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744641874,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"arliai/qwq-32b-arliai-rpr-v1:free","name":"ArliAI: QwQ 32B RpR v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555982,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"agentica-org/deepcoder-14b-preview:free","name":"Agentica: Deepcoder 14B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555395,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-vl-a3b-thinking:free","name":"Moonshot AI: Kimi VL A3B Thinking (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744304841,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-mini-beta","name":"xAI: Grok 3 Mini Beta","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1744240195,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-beta","name":"xAI: Grok 3 Beta","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1744240068,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.3-nemotron-super-49b-v1:free","name":"NVIDIA: Llama 3.3 Nemotron Super 49B v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744119494,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.3-nemotron-super-49b-v1","name":"NVIDIA: Llama 3.3 Nemotron Super 49B v1","pricing":{"prompt":"0.00000013","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744119494,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1:free","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1","pricing":{"prompt":"0.0000006","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-maverick:free","name":"Meta: Llama 4 Maverick (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-maverick","name":"Meta: Llama 4 Maverick","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.0006684","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":1048576,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-4-scout:free","name":"Meta: Llama 4 Scout (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":64000,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-scout","name":"Meta: Llama 4 Scout","pricing":{"prompt":"0.00000008","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":1048576,"max_completion_tokens":1048576,"is_moderated":false}},{"id":"all-hands/openhands-lm-32b-v0.1","name":"OpenHands LM 32B V0.1","pricing":{"prompt":"0.0000026","completion":"0.0000034","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743613013,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"deepseek/deepseek-v3-base:free","name":"DeepSeek: DeepSeek V3 Base (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743272023,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"scb10x/llama3.1-typhoon2-70b-instruct","name":"Typhoon2 70B Instruct","pricing":{"prompt":"0.00000088","completion":"0.00000088","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743196170,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-exp-03-25","name":"Google: Gemini 2.5 Pro Experimental","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742922099,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct:free","name":"Qwen: Qwen2.5 VL 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct","name":"Qwen: Qwen2.5 VL 32B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324:free","name":"DeepSeek: DeepSeek V3 0324 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324","name":"DeepSeek: DeepSeek V3 0324","pricing":{"prompt":"0.00000028","completion":"0.00000088","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"featherless/qwerky-72b:free","name":"Qwerky 72B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742481597,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/o1-pro","name":"OpenAI: o1-pro","pricing":{"prompt":"0.00015","completion":"0.0006","request":"0","image":"0.21675","web_search":"0","internal_reasoning":"0"},"created":1742423211,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-3.1-24b-instruct:free","name":"Mistral: Mistral Small 3.1 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":96000,"max_completion_tokens":96000,"is_moderated":false}},{"id":"mistralai/mistral-small-3.1-24b-instruct","name":"Mistral: Mistral Small 3.1 24B","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-4b-it:free","name":"Google: Gemma 3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-4b-it","name":"Google: Gemma 3 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"ai21/jamba-1.6-large","name":"AI21: Jamba 1.6 Large","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905173,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"ai21/jamba-1.6-mini","name":"AI21: Jamba Mini 1.6","pricing":{"prompt":"0.0000002","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905171,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemma-3-12b-it:free","name":"Google: Gemma 3 12B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-12b-it","name":"Google: Gemma 3 12B","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-a","name":"Cohere: Command A","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741894342,"top_provider":{"context_length":256000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"openai/gpt-4o-mini-search-preview","name":"OpenAI: GPT-4o-mini Search Preview","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0.0275","image":"0.000217","web_search":"0","internal_reasoning":"0"},"created":1741818122,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-search-preview","name":"OpenAI: GPT-4o Search Preview","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0.035","image":"0.003613","web_search":"0","internal_reasoning":"0"},"created":1741817949,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"rekaai/reka-flash-3:free","name":"Reka: Flash 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741812813,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-27b-it:free","name":"Google: Gemma 3 27B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-27b-it","name":"Google: Gemma 3 27B","pricing":{"prompt":"0.00000009","completion":"0.00000017","request":"0","image":"0.0000256","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"thedrummer/anubis-pro-105b-v1","name":"TheDrummer: Anubis Pro 105B V1","pricing":{"prompt":"0.0000008","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741642290,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"thedrummer/skyfall-36b-v2","name":"TheDrummer: Skyfall 36B V2","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741636566,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"microsoft/phi-4-multimodal-instruct","name":"Microsoft: Phi 4 Multimodal Instruct","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0.00017685","web_search":"0","internal_reasoning":"0"},"created":1741396284,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-reasoning-pro","name":"Perplexity: Sonar Reasoning Pro","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741313308,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-pro","name":"Perplexity: Sonar Pro","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741312423,"top_provider":{"context_length":200000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"perplexity/sonar-deep-research","name":"Perplexity: Sonar Deep Research","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0.000003"},"created":1741311246,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b:free","name":"Qwen: QwQ 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741208814,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":true}},{"id":"qwen/qwq-32b","name":"Qwen: QwQ 32B","pricing":{"prompt":"0.000000075","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741208814,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/deephermes-3-llama-3-8b-preview:free","name":"Nous: DeepHermes 3 Llama 3 8B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740719372,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4.5-preview","name":"OpenAI: GPT-4.5 (Preview)","pricing":{"prompt":"0.000075","completion":"0.00015","request":"0","image":"0.108375","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000375"},"created":1740687810,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemini-2.0-flash-lite-001","name":"Google: Gemini 2.0 Flash Lite","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740506212,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet","name":"Anthropic: Claude 3.7 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet:beta","name":"Anthropic: Claude 3.7 Sonnet (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet:thinking","name":"Anthropic: Claude 3.7 Sonnet (thinking)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":128000,"is_moderated":true}},{"id":"perplexity/r1-1776","name":"Perplexity: R1 1776","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740004929,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-saba","name":"Mistral: Saba","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739803239,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-r1-mistral-24b:free","name":"Dolphin3.0 R1 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462498,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-mistral-24b:free","name":"Dolphin3.0 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462019,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-3-8b","name":"Llama Guard 3 8B","pricing":{"prompt":"0.00000002","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739401318,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o3-mini-high","name":"OpenAI: o3 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1739372611,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"deepseek/deepseek-r1-distill-llama-8b","name":"DeepSeek: R1 Distill Llama 8B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738937718,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"google/gemini-2.0-flash-001","name":"Google: Gemini 2.0 Flash","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0.0000258","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1738769413,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-vl-plus","name":"Qwen: Qwen VL Plus","pricing":{"prompt":"0.00000021","completion":"0.00000063","request":"0","image":"0.0002688","web_search":"0","internal_reasoning":"0"},"created":1738731255,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"aion-labs/aion-1.0","name":"AionLabs: Aion-1.0","pricing":{"prompt":"0.000004","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697557,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-1.0-mini","name":"AionLabs: Aion-1.0-Mini","pricing":{"prompt":"0.0000007","completion":"0.0000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697107,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-rp-llama-3.1-8b","name":"AionLabs: Aion-RP 1.0 (8B)","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738696718,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-vl-max","name":"Qwen: Qwen VL Max","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.001024","web_search":"0","internal_reasoning":"0"},"created":1738434304,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"qwen/qwen-turbo","name":"Qwen: Qwen-Turbo","pricing":{"prompt":"0.00000005","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1738410974,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct:free","name":"Qwen: Qwen2.5 VL 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":131072,"max_completion_tokens":2048,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct","name":"Qwen: Qwen2.5 VL 72B Instruct","pricing":{"prompt":"0.00000025","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":32000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-plus","name":"Qwen: Qwen-Plus","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000016"},"created":1738409840,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-max","name":"Qwen: Qwen-Max ","pricing":{"prompt":"0.0000016","completion":"0.0000064","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000064"},"created":1738402289,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/o3-mini","name":"OpenAI: o3 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1738351721,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"deepseek/deepseek-r1-distill-qwen-1.5b","name":"DeepSeek: R1 Distill Qwen 1.5B","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738328067,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"mistralai/mistral-small-24b-instruct-2501:free","name":"Mistral: Mistral Small 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-24b-instruct-2501","name":"Mistral: Mistral Small 3","pricing":{"prompt":"0.00000005","completion":"0.00000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-32b","name":"DeepSeek: R1 Distill Qwen 32B","pricing":{"prompt":"0.000000075","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738194830,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b:free","name":"DeepSeek: R1 Distill Qwen 14B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738193940,"top_provider":{"context_length":64000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b","name":"DeepSeek: R1 Distill Qwen 14B","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738193940,"top_provider":{"context_length":64000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"perplexity/sonar-reasoning","name":"Perplexity: Sonar Reasoning","pricing":{"prompt":"0.000001","completion":"0.000005","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738131107,"top_provider":{"context_length":127000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar","name":"Perplexity: Sonar","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738013808,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-7b","name":"Liquid: LFM 7B","pricing":{"prompt":"0.00000001","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806883,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-3b","name":"Liquid: LFM 3B","pricing":{"prompt":"0.00000002","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806501,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b:free","name":"DeepSeek: R1 Distill Llama 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b","name":"DeepSeek: R1 Distill Llama 70B","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-r1:free","name":"DeepSeek: R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1","name":"DeepSeek: R1","pricing":{"prompt":"0.00000045","completion":"0.00000215","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":128000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"minimax/minimax-01","name":"MiniMax: MiniMax-01","pricing":{"prompt":"0.0000002","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736915462,"top_provider":{"context_length":1000192,"max_completion_tokens":1000192,"is_moderated":false}},{"id":"mistralai/codestral-2501","name":"Mistral: Codestral 2501","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736895522,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4","name":"Microsoft: Phi 4","pricing":{"prompt":"0.00000007","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736489872,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-chat:free","name":"DeepSeek: DeepSeek V3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1735241320,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat","name":"DeepSeek: DeepSeek V3","pricing":{"prompt":"0.00000038","completion":"0.00000089","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1735241320,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"sao10k/l3.3-euryale-70b","name":"Sao10K: Llama 3.3 Euryale 70B","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734535928,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o1","name":"OpenAI: o1","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0.021675","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000075"},"created":1734459999,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"eva-unit-01/eva-llama-3.33-70b","name":"EVA Llama 3.33 70B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734377303,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"x-ai/grok-2-vision-1212","name":"xAI: Grok 2 Vision 1212","pricing":{"prompt":"0.000002","completion":"0.00001","request":"0","image":"0.0036","web_search":"0","internal_reasoning":"0"},"created":1734237338,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-2-1212","name":"xAI: Grok 2 1212","pricing":{"prompt":"0.000002","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734232814,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r7b-12-2024","name":"Cohere: Command R7B (12-2024)","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734158152,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"google/gemini-2.0-flash-exp:free","name":"Google: Gemini 2.0 Flash Experimental (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733937523,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct:free","name":"Meta: Llama 3.3 70B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":131072,"max_completion_tokens":2048,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct","name":"Meta: Llama 3.3 70B Instruct","pricing":{"prompt":"0.000000039","completion":"0.00000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"amazon/nova-lite-v1","name":"Amazon: Nova Lite 1.0","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0.00009","web_search":"0","internal_reasoning":"0"},"created":1733437363,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-micro-v1","name":"Amazon: Nova Micro 1.0","pricing":{"prompt":"0.000000035","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733437237,"top_provider":{"context_length":128000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-pro-v1","name":"Amazon: Nova Pro 1.0","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.0012","web_search":"0","internal_reasoning":"0"},"created":1733436303,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"qwen/qwq-32b-preview","name":"Qwen: QwQ 32B Preview","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1732754541,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"eva-unit-01/eva-qwen-2.5-72b","name":"EVA Qwen2.5 72B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1732210606,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4o-2024-11-20","name":"OpenAI: GPT-4o (2024-11-20)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1732127594,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-large-2411","name":"Mistral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978685,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-large-2407","name":"Mistral Large 2407","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978415,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/pixtral-large-2411","name":"Mistral: Pixtral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0.002888","web_search":"0","internal_reasoning":"0"},"created":1731977388,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-vision-beta","name":"xAI: Grok Vision Beta","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.009","web_search":"0","internal_reasoning":"0"},"created":1731976624,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"infermatic/mn-inferor-12b","name":"Infermatic: Mistral Nemo Inferor 12B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731464428,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct:free","name":"Qwen2.5 Coder 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct","name":"Qwen2.5 Coder 32B Instruct","pricing":{"prompt":"0.00000006","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"raifle/sorcererlm-8x22b","name":"SorcererLM 8x22B","pricing":{"prompt":"0.0000045","completion":"0.0000045","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731105083,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"eva-unit-01/eva-qwen-2.5-32b","name":"EVA Qwen2.5 32B","pricing":{"prompt":"0.0000026","completion":"0.0000034","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731104847,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"thedrummer/unslopnemo-12b","name":"TheDrummer: UnslopNemo 12B","pricing":{"prompt":"0.0000004","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731103448,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku-20241022:beta","name":"Anthropic: Claude 3.5 Haiku (2024-10-22) (self-moderated)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku-20241022","name":"Anthropic: Claude 3.5 Haiku (2024-10-22)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"anthropic/claude-3.5-haiku:beta","name":"Anthropic: Claude 3.5 Haiku (self-moderated)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku","name":"Anthropic: Claude 3.5 Haiku","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"anthracite-org/magnum-v4-72b","name":"Magnum v4 72B","pricing":{"prompt":"0.0000025","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":1024,"is_moderated":false}},{"id":"neversleep/llama-3.1-lumimaid-70b","name":"NeverSleep: Lumimaid v0.2 70B","pricing":{"prompt":"0.0000025","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet:beta","name":"Anthropic: Claude 3.5 Sonnet (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet","name":"Anthropic: Claude 3.5 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"x-ai/grok-beta","name":"xAI: Grok Beta","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729382400,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-3b","name":"Mistral: Ministral 3B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-8b","name":"Mistral: Ministral 8B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-7b-instruct","name":"Qwen2.5 7B Instruct","pricing":{"prompt":"0.00000004","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729036800,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-70b-instruct","name":"NVIDIA: Llama 3.1 Nemotron 70B Instruct","pricing":{"prompt":"0.00000012","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728950400,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"inflection/inflection-3-productivity","name":"Inflection: Inflection 3 Productivity","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"inflection/inflection-3-pi","name":"Inflection: Inflection 3 Pi","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"google/gemini-flash-1.5-8b","name":"Google: Gemini 1.5 Flash 8B","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000001","input_cache_write":"0.0000000583"},"created":1727913600,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"liquid/lfm-40b","name":"Liquid: LFM 40B MoE","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"thedrummer/rocinante-12b","name":"TheDrummer: Rocinante 12B","pricing":{"prompt":"0.0000002","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthracite-org/magnum-v2-72b","name":"Magnum v2 72B","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct:free","name":"Meta: Llama 3.2 11B Vision Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":2048,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct","name":"Meta: Llama 3.2 11B Vision Instruct","pricing":{"prompt":"0.000000049","completion":"0.000000049","request":"0","image":"0.00007948","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-1b-instruct:free","name":"Meta: Llama 3.2 1B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3.2-1b-instruct","name":"Meta: Llama 3.2 1B Instruct","pricing":{"prompt":"0.000000005","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-90b-vision-instruct","name":"Meta: Llama 3.2 90B Vision Instruct","pricing":{"prompt":"0.0000012","completion":"0.0000012","request":"0","image":"0.001734","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":2048,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct","name":"Meta: Llama 3.2 3B Instruct","pricing":{"prompt":"0.000000003","completion":"0.000000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":20000,"max_completion_tokens":20000,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct:free","name":"Qwen2.5 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct","name":"Qwen2.5 72B Instruct","pricing":{"prompt":"0.00000012","completion":"0.00000039","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/llama-3.1-lumimaid-8b","name":"NeverSleep: Lumimaid v0.2 8B","pricing":{"prompt":"0.0000002","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726358400,"top_provider":{"context_length":32768,"max_completion_tokens":2048,"is_moderated":false}},{"id":"openai/o1-mini","name":"OpenAI: o1-mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"openai/o1-preview-2024-09-12","name":"OpenAI: o1-preview (2024-09-12)","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000075"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/o1-mini-2024-09-12","name":"OpenAI: o1-mini (2024-09-12)","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"openai/o1-preview","name":"OpenAI: o1-preview","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000075"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":32768,"is_moderated":true}},{"id":"mistralai/pixtral-12b","name":"Mistral: Pixtral 12B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1725926400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r-08-2024","name":"Cohere: Command R (08-2024)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-plus-08-2024","name":"Cohere: Command R+ (08-2024)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"sao10k/l3.1-euryale-70b","name":"Sao10K: Llama 3.1 Euryale 70B v2.2","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-vl-7b-instruct","name":"Qwen: Qwen2.5-VL 7B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3.5-mini-128k-instruct","name":"Microsoft: Phi-3.5 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724198400,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-70b","name":"Nous: Hermes 3 70B Instruct","pricing":{"prompt":"0.0000001","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723939200,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-405b","name":"Nous: Hermes 3 405B Instruct","pricing":{"prompt":"0.0000007","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723766400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/chatgpt-4o-latest","name":"OpenAI: ChatGPT-4o","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1723593600,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"sao10k/l3-lunaris-8b","name":"Sao10K: Llama 3 8B Lunaris","pricing":{"prompt":"0.00000002","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723507200,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"aetherwiing/mn-starcannon-12b","name":"Aetherwiing: Starcannon 12B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723507200,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4o-2024-08-06","name":"OpenAI: GPT-4o (2024-08-06)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1722902400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"nothingiisreal/mn-celeste-12b","name":"Mistral Nemo 12B Celeste","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722556800,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b","name":"Meta: Llama 3.1 405B (base)","pricing":{"prompt":"0.000002","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722556800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/llama-3.1-sonar-small-128k-online","name":"Perplexity: Llama 3.1 Sonar 8B Online","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722470400,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/llama-3.1-sonar-large-128k-online","name":"Perplexity: Llama 3.1 Sonar 70B Online","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722470400,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-70b-instruct","name":"Meta: Llama 3.1 70B Instruct","pricing":{"prompt":"0.0000001","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct","name":"Meta: Llama 3.1 405B Instruct","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-8b-instruct:free","name":"Meta: Llama 3.1 8B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":4096,"is_moderated":false}},{"id":"meta-llama/llama-3.1-8b-instruct","name":"Meta: Llama 3.1 8B Instruct","pricing":{"prompt":"0.000000016","completion":"0.000000021","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"mistralai/mistral-nemo:free","name":"Mistral: Mistral Nemo (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":128000,"is_moderated":false}},{"id":"mistralai/mistral-nemo","name":"Mistral: Mistral Nemo","pricing":{"prompt":"0.00000001","completion":"0.000000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/gpt-4o-mini-2024-07-18","name":"OpenAI: GPT-4o-mini (2024-07-18)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-mini","name":"OpenAI: GPT-4o-mini","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.000217","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-2-27b-it","name":"Google: Gemma 2 27B","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1720828800,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"alpindale/magnum-72b","name":"Magnum 72B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1720656000,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemma-2-9b-it:free","name":"Google: Gemma 2 9B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-2-9b-it","name":"Google: Gemma 2 9B","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"01-ai/yi-large","name":"01.AI: Yi Large","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719273600,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet-20240620:beta","name":"Anthropic: Claude 3.5 Sonnet (2024-06-20) (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1718841600,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet-20240620","name":"Anthropic: Claude 3.5 Sonnet (2024-06-20)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1718841600,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"sao10k/l3-euryale-70b","name":"Sao10k: Llama 3 Euryale 70B v2.1","pricing":{"prompt":"0.00000148","completion":"0.00000148","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1718668800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mixtral-8x22b","name":"Dolphin 2.9.2 Mixtral 8x22B 🐬","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1717804800,"top_provider":{"context_length":16000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-2-72b-instruct","name":"Qwen 2 72B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1717718400,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.3","name":"Mistral: Mistral 7B Instruct v0.3","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"nousresearch/hermes-2-pro-llama-3-8b","name":"NousResearch: Hermes 2 Pro - Llama-3 8B","pricing":{"prompt":"0.000000025","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct:free","name":"Mistral: Mistral 7B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct","name":"Mistral: Mistral 7B Instruct","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"microsoft/phi-3-mini-128k-instruct","name":"Microsoft: Phi-3 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716681600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3-medium-128k-instruct","name":"Microsoft: Phi-3 Medium 128K Instruct","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716508800,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"neversleep/llama-3-lumimaid-70b","name":"NeverSleep: Llama 3 Lumimaid 70B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715817600,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemini-flash-1.5","name":"Google: Gemini 1.5 Flash ","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0.00004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000001875","input_cache_write":"0.0000001583"},"created":1715644800,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-guard-2-8b","name":"Meta: LlamaGuard 2 8B","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o","name":"OpenAI: GPT-4o","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o:extended","name":"OpenAI: GPT-4o (extended)","pricing":{"prompt":"0.000006","completion":"0.000018","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"openai/gpt-4o-2024-05-13","name":"OpenAI: GPT-4o (2024-05-13)","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"neversleep/llama-3-lumimaid-8b","name":"NeverSleep: Llama 3 Lumimaid 8B","pricing":{"prompt":"0.0000002","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1714780800,"top_provider":{"context_length":24576,"max_completion_tokens":2048,"is_moderated":false}},{"id":"sao10k/fimbulvetr-11b-v2","name":"Fimbulvetr 11B v2","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713657600,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"meta-llama/llama-3-8b-instruct","name":"Meta: Llama 3 8B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3-70b-instruct","name":"Meta: Llama 3 70B Instruct","pricing":{"prompt":"0.0000003","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mixtral-8x22b-instruct","name":"Mistral: Mixtral 8x22B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713312000,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/wizardlm-2-8x22b","name":"WizardLM-2 8x22B","pricing":{"prompt":"0.00000048","completion":"0.00000048","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713225600,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"openai/gpt-4-turbo","name":"OpenAI: GPT-4 Turbo","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0.01445","web_search":"0","internal_reasoning":"0"},"created":1712620800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"google/gemini-pro-1.5","name":"Google: Gemini 1.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.000005","request":"0","image":"0.0006575","web_search":"0","internal_reasoning":"0"},"created":1712620800,"top_provider":{"context_length":2000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"cohere/command-r-plus","name":"Cohere: Command R+","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1712188800,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-plus-04-2024","name":"Cohere: Command R+ (04-2024)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1712016000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"sophosympatheia/midnight-rose-70b","name":"Midnight Rose 70B","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1711065600,"top_provider":{"context_length":4096,"max_completion_tokens":2048,"is_moderated":false}},{"id":"cohere/command-r","name":"Cohere: Command R","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1710374400,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command","name":"Cohere: Command","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1710374400,"top_provider":{"context_length":4096,"max_completion_tokens":4000,"is_moderated":true}},{"id":"anthropic/claude-3-haiku:beta","name":"Anthropic: Claude 3 Haiku (self-moderated)","pricing":{"prompt":"0.00000025","completion":"0.00000125","request":"0","image":"0.0004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000003","input_cache_write":"0.0000003"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3-haiku","name":"Anthropic: Claude 3 Haiku","pricing":{"prompt":"0.00000025","completion":"0.00000125","request":"0","image":"0.0004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000003","input_cache_write":"0.0000003"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-sonnet:beta","name":"Anthropic: Claude 3 Sonnet (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3-sonnet","name":"Anthropic: Claude 3 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-opus:beta","name":"Anthropic: Claude 3 Opus (self-moderated)","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3-opus","name":"Anthropic: Claude 3 Opus","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"cohere/command-r-03-2024","name":"Cohere: Command R (03-2024)","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1709341200,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"mistralai/mistral-large","name":"Mistral Large","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1708905600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-0613","name":"OpenAI: GPT-3.5 Turbo (older v0613)","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4-turbo-preview","name":"OpenAI: GPT-4 Turbo Preview","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"nousresearch/nous-hermes-2-mixtral-8x7b-dpo","name":"Nous: Hermes 2 Mixtral 8x7B DPO","pricing":{"prompt":"0.0000006","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1705363200,"top_provider":{"context_length":32768,"max_completion_tokens":2048,"is_moderated":false}},{"id":"mistralai/mistral-tiny","name":"Mistral Tiny","pricing":{"prompt":"0.00000025","completion":"0.00000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small","name":"Mistral Small","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.2","name":"Mistral: Mistral 7B Instruct v0.2","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1703721600,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mixtral-8x7b-instruct","name":"Mistral: Mixtral 8x7B Instruct","pricing":{"prompt":"0.00000008","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1702166400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/noromaid-20b","name":"Noromaid 20B","pricing":{"prompt":"0.00000125","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700956800,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthropic/claude-2:beta","name":"Anthropic: Claude v2 (self-moderated)","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-2","name":"Anthropic: Claude v2","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-2.1:beta","name":"Anthropic: Claude v2.1 (self-moderated)","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-2.1","name":"Anthropic: Claude v2.1","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"undi95/toppy-m-7b","name":"Toppy M 7B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699574400,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"alpindale/goliath-120b","name":"Goliath 120B","pricing":{"prompt":"0.000009","completion":"0.000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699574400,"top_provider":{"context_length":6144,"max_completion_tokens":null,"is_moderated":false}},{"id":"openrouter/auto","name":"Auto Router","pricing":{"prompt":"-1","completion":"-1"},"created":1699401600,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-1106-preview","name":"OpenAI: GPT-4 Turbo (older v1106)","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699228800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo-instruct","name":"OpenAI: GPT-3.5 Turbo Instruct","pricing":{"prompt":"0.0000015","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-7b-instruct-v0.1","name":"Mistral: Mistral 7B Instruct v0.1","pricing":{"prompt":"0.00000011","completion":"0.00000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":2824,"max_completion_tokens":null,"is_moderated":false}},{"id":"pygmalionai/mythalion-13b","name":"Pygmalion: Mythalion 13B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1693612800,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-16k","name":"OpenAI: GPT-3.5 Turbo 16k","pricing":{"prompt":"0.000003","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1693180800,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mancer/weaver","name":"Mancer: Weaver (alpha)","pricing":{"prompt":"0.0000015","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1690934400,"top_provider":{"context_length":8000,"max_completion_tokens":1000,"is_moderated":false}},{"id":"anthropic/claude-2.0:beta","name":"Anthropic: Claude v2.0 (self-moderated)","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1690502400,"top_provider":{"context_length":100000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-2.0","name":"Anthropic: Claude v2.0","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1690502400,"top_provider":{"context_length":100000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"undi95/remm-slerp-l2-13b","name":"ReMM SLERP 13B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1689984000,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"gryphe/mythomax-l2-13b","name":"MythoMax 13B","pricing":{"prompt":"0.000000065","completion":"0.000000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1688256000,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4","name":"OpenAI: GPT-4","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4-0314","name":"OpenAI: GPT-4 (older v0314)","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}}] \ No newline at end of file +export const models = [{"id":"openrouter/cypher-alpha:free","name":"Cypher Alpha (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751336087,"top_provider":{"context_length":1000000,"max_completion_tokens":10000,"is_moderated":false}},{"id":"baidu/ernie-4.5-300b-a47b","name":"Baidu: ERNIE 4.5 300B A47B ","pricing":{"prompt":"0.00000028","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751300139,"top_provider":{"context_length":123000,"max_completion_tokens":12000,"is_moderated":false}},{"id":"thedrummer/anubis-70b-v1.1","name":"TheDrummer: Anubis 70B V1.1","pricing":{"prompt":"0.0000003","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1751208347,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"inception/mercury","name":"Inception: Mercury","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750973026,"top_provider":{"context_length":32000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"morph/morph-v2","name":"Morph: Fast Apply","pricing":{"prompt":"0.0000012","completion":"0.0000027","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750946108,"top_provider":{"context_length":32000,"max_completion_tokens":16000,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct:free","name":"Mistral: Mistral Small 3.2 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-3.2-24b-instruct","name":"Mistral: Mistral Small 3.2 24B","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750443016,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"minimax/minimax-m1","name":"MiniMax: MiniMax M1","pricing":{"prompt":"0.0000003","completion":"0.00000165","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750200414,"top_provider":{"context_length":1000000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-flash-lite-preview-06-17","name":"Google: Gemini 2.5 Flash Lite Preview 06-17","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750173831,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-flash","name":"Google: Gemini 2.5 Flash","pricing":{"prompt":"0.0000003","completion":"0.0000025","request":"0","image":"0.001238","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075","input_cache_write":"0.0000003833"},"created":1750172488,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-pro","name":"Google: Gemini 2.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1750169544,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"moonshotai/kimi-dev-72b:free","name":"Kimi Dev 72b (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1750115909,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o3-pro","name":"OpenAI: o3 Pro","pricing":{"prompt":"0.00002","completion":"0.00008","request":"0","image":"0.0153","web_search":"0","internal_reasoning":"0"},"created":1749598352,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"x-ai/grok-3-mini","name":"xAI: Grok 3 Mini","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1749583245,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3","name":"xAI: Grok 3","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1749582908,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/magistral-small-2506","name":"Mistral: Magistral Small 2506","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749569561,"top_provider":{"context_length":40000,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506","name":"Mistral: Magistral Medium 2506","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"mistralai/magistral-medium-2506:thinking","name":"Mistral: Magistral Medium 2506 (thinking)","pricing":{"prompt":"0.000002","completion":"0.000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1749354054,"top_provider":{"context_length":40960,"max_completion_tokens":40000,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview","name":"Google: Gemini 2.5 Pro Preview 06-05","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1749137257,"top_provider":{"context_length":1048576,"max_completion_tokens":65536,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-7b","name":"DeepSeek: R1 Distill Qwen 7B","pricing":{"prompt":"0.0000001","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748628237,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b:free","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528-qwen3-8b","name":"DeepSeek: Deepseek R1 0528 Qwen3 8B","pricing":{"prompt":"0.00000001","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748538543,"top_provider":{"context_length":32000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528:free","name":"DeepSeek: R1 0528 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-0528","name":"DeepSeek: R1 0528","pricing":{"prompt":"0.0000005","completion":"0.00000215","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748455170,"top_provider":{"context_length":128000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"sarvamai/sarvam-m:free","name":"Sarvam AI: Sarvam-M (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748188413,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"thedrummer/valkyrie-49b-v1","name":"TheDrummer: Valkyrie 49B V1","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1748022670,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"anthropic/claude-opus-4","name":"Anthropic: Claude Opus 4","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1747931245,"top_provider":{"context_length":200000,"max_completion_tokens":32000,"is_moderated":true}},{"id":"anthropic/claude-sonnet-4","name":"Anthropic: Claude Sonnet 4","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1747930371,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"mistralai/devstral-small:free","name":"Mistral: Devstral Small (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/devstral-small","name":"Mistral: Devstral Small","pricing":{"prompt":"0.00000006","completion":"0.00000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747837379,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it:free","name":"Google: Gemma 3n 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"google/gemma-3n-e4b-it","name":"Google: Gemma 3n 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1747776824,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-flash-preview-05-20","name":"Google: Gemini 2.5 Flash Preview 05-20","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.0006192","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000000375","input_cache_write":"0.0000002333"},"created":1747761924,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-flash-preview-05-20:thinking","name":"Google: Gemini 2.5 Flash Preview 05-20 (thinking)","pricing":{"prompt":"0.00000015","completion":"0.0000035","request":"0","image":"0.0006192","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000000375","input_cache_write":"0.0000002333"},"created":1747761924,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"openai/codex-mini","name":"OpenAI: Codex Mini","pricing":{"prompt":"0.0000015","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000375"},"created":1747409761,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-medium-3","name":"Mistral: Mistral Medium 3","pricing":{"prompt":"0.0000004","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746627341,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-preview-05-06","name":"Google: Gemini 2.5 Pro Preview 05-06","pricing":{"prompt":"0.00000125","completion":"0.00001","request":"0","image":"0.00516","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000031","input_cache_write":"0.000001625"},"created":1746578513,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"arcee-ai/caller-large","name":"Arcee AI: Caller Large","pricing":{"prompt":"0.00000055","completion":"0.00000085","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746487869,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"arcee-ai/spotlight","name":"Arcee AI: Spotlight","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481552,"top_provider":{"context_length":131072,"max_completion_tokens":65537,"is_moderated":false}},{"id":"arcee-ai/maestro-reasoning","name":"Arcee AI: Maestro Reasoning","pricing":{"prompt":"0.0000009","completion":"0.0000033","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746481269,"top_provider":{"context_length":131072,"max_completion_tokens":32000,"is_moderated":false}},{"id":"arcee-ai/virtuoso-large","name":"Arcee AI: Virtuoso Large","pricing":{"prompt":"0.00000075","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478885,"top_provider":{"context_length":131072,"max_completion_tokens":64000,"is_moderated":false}},{"id":"arcee-ai/coder-large","name":"Arcee AI: Coder Large","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478663,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"arcee-ai/virtuoso-medium-v2","name":"Arcee AI: Virtuoso Medium V2","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746478434,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"arcee-ai/arcee-blitz","name":"Arcee AI: Arcee Blitz","pricing":{"prompt":"0.00000045","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746470100,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4-reasoning-plus","name":"Microsoft: Phi 4 Reasoning Plus","pricing":{"prompt":"0.00000007","completion":"0.00000035","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746130961,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"inception/mercury-coder","name":"Inception: Mercury Coder","pricing":{"prompt":"0.00000025","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746033880,"top_provider":{"context_length":32000,"max_completion_tokens":null,"is_moderated":false}},{"id":"opengvlab/internvl3-14b","name":"OpenGVLab: InternVL3 14B","pricing":{"prompt":"0.0000002","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746021355,"top_provider":{"context_length":12288,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-prover-v2","name":"DeepSeek: DeepSeek Prover V2","pricing":{"prompt":"0.0000005","completion":"0.00000218","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1746013094,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-4-12b","name":"Meta: Llama Guard 4 12B","pricing":{"prompt":"0.00000005","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745975193,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b:free","name":"Qwen: Qwen3 30B A3B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-30b-a3b","name":"Qwen: Qwen3 30B A3B","pricing":{"prompt":"0.00000008","completion":"0.00000029","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745878604,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-8b:free","name":"Qwen: Qwen3 8B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-8b","name":"Qwen: Qwen3 8B","pricing":{"prompt":"0.000000035","completion":"0.000000138","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876632,"top_provider":{"context_length":128000,"max_completion_tokens":20000,"is_moderated":false}},{"id":"qwen/qwen3-14b:free","name":"Qwen: Qwen3 14B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-14b","name":"Qwen: Qwen3 14B","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745876478,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"qwen/qwen3-32b:free","name":"Qwen: Qwen3 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875945,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-32b","name":"Qwen: Qwen3 32B","pricing":{"prompt":"0.0000001","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875945,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b:free","name":"Qwen: Qwen3 235B A22B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":40960,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen3-235b-a22b","name":"Qwen: Qwen3 235B A22B","pricing":{"prompt":"0.00000013","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745875757,"top_provider":{"context_length":40960,"max_completion_tokens":40960,"is_moderated":false}},{"id":"tngtech/deepseek-r1t-chimera:free","name":"TNG: DeepSeek R1T Chimera (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745760875,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/mai-ds-r1:free","name":"Microsoft: MAI DS R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1745194100,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-z1-32b:free","name":"THUDM: GLM Z1 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744924148,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-4-32b:free","name":"THUDM: GLM 4 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744920915,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"thudm/glm-4-32b","name":"THUDM: GLM 4 32B","pricing":{"prompt":"0.00000024","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744920915,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"google/gemini-2.5-flash-preview","name":"Google: Gemini 2.5 Flash Preview 04-17","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.0006192","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000000375","input_cache_write":"0.0000002333"},"created":1744914667,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"google/gemini-2.5-flash-preview:thinking","name":"Google: Gemini 2.5 Flash Preview 04-17 (thinking)","pricing":{"prompt":"0.00000015","completion":"0.0000035","request":"0","image":"0.0006192","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000000375","input_cache_write":"0.0000002333"},"created":1744914667,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"openai/o4-mini-high","name":"OpenAI: o4 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744824212,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o3","name":"OpenAI: o3","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0.00153","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744823457,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"openai/o4-mini","name":"OpenAI: o4 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0.0008415","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000275"},"created":1744820942,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"shisa-ai/shisa-v2-llama3.3-70b:free","name":"Shisa AI: Shisa V2 Llama 3.3 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744754858,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4.1","name":"OpenAI: GPT-4.1","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000005"},"created":1744651385,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-mini","name":"OpenAI: GPT-4.1 Mini","pricing":{"prompt":"0.0000004","completion":"0.0000016","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000001"},"created":1744651381,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/gpt-4.1-nano","name":"OpenAI: GPT-4.1 Nano","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025"},"created":1744651369,"top_provider":{"context_length":1047576,"max_completion_tokens":32768,"is_moderated":true}},{"id":"eleutherai/llemma_7b","name":"EleutherAI: Llemma 7b","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744643225,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"alfredpros/codellama-7b-instruct-solidity","name":"AlfredPros: CodeLLaMa 7B Instruct Solidity","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744641874,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"arliai/qwq-32b-arliai-rpr-v1:free","name":"ArliAI: QwQ 32B RpR v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555982,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"agentica-org/deepcoder-14b-preview:free","name":"Agentica: Deepcoder 14B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744555395,"top_provider":{"context_length":96000,"max_completion_tokens":null,"is_moderated":false}},{"id":"moonshotai/kimi-vl-a3b-thinking:free","name":"Moonshot AI: Kimi VL A3B Thinking (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744304841,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-mini-beta","name":"xAI: Grok 3 Mini Beta","pricing":{"prompt":"0.0000003","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1744240195,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-3-beta","name":"xAI: Grok 3 Beta","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000075"},"created":1744240068,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.3-nemotron-super-49b-v1:free","name":"NVIDIA: Llama 3.3 Nemotron Super 49B v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744119494,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.3-nemotron-super-49b-v1","name":"NVIDIA: Llama 3.3 Nemotron Super 49B v1","pricing":{"prompt":"0.00000013","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744119494,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1:free","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-ultra-253b-v1","name":"NVIDIA: Llama 3.1 Nemotron Ultra 253B v1","pricing":{"prompt":"0.0000006","completion":"0.0000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1744115059,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-maverick:free","name":"Meta: Llama 4 Maverick (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-maverick","name":"Meta: Llama 4 Maverick","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.0006684","web_search":"0","internal_reasoning":"0"},"created":1743881822,"top_provider":{"context_length":1048576,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-4-scout:free","name":"Meta: Llama 4 Scout (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":64000,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-4-scout","name":"Meta: Llama 4 Scout","pricing":{"prompt":"0.00000008","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743881519,"top_provider":{"context_length":1048576,"max_completion_tokens":1048576,"is_moderated":false}},{"id":"all-hands/openhands-lm-32b-v0.1","name":"OpenHands LM 32B V0.1","pricing":{"prompt":"0.0000026","completion":"0.0000034","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743613013,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"deepseek/deepseek-v3-base:free","name":"DeepSeek: DeepSeek V3 Base (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743272023,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"scb10x/llama3.1-typhoon2-70b-instruct","name":"Typhoon2 70B Instruct","pricing":{"prompt":"0.00000088","completion":"0.00000088","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1743196170,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemini-2.5-pro-exp-03-25","name":"Google: Gemini 2.5 Pro Experimental","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742922099,"top_provider":{"context_length":1048576,"max_completion_tokens":65535,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct:free","name":"Qwen: Qwen2.5 VL 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-32b-instruct","name":"Qwen: Qwen2.5 VL 32B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742839838,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324:free","name":"DeepSeek: DeepSeek V3 0324 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-chat-v3-0324","name":"DeepSeek: DeepSeek V3 0324","pricing":{"prompt":"0.00000028","completion":"0.00000088","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742824755,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"featherless/qwerky-72b:free","name":"Qwerky 72B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742481597,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/o1-pro","name":"OpenAI: o1-pro","pricing":{"prompt":"0.00015","completion":"0.0006","request":"0","image":"0.21675","web_search":"0","internal_reasoning":"0"},"created":1742423211,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"mistralai/mistral-small-3.1-24b-instruct:free","name":"Mistral: Mistral Small 3.1 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":96000,"max_completion_tokens":96000,"is_moderated":false}},{"id":"mistralai/mistral-small-3.1-24b-instruct","name":"Mistral: Mistral Small 3.1 24B","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1742238937,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-4b-it:free","name":"Google: Gemma 3 4B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-4b-it","name":"Google: Gemma 3 4B","pricing":{"prompt":"0.00000002","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905510,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"ai21/jamba-1.6-large","name":"AI21: Jamba 1.6 Large","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905173,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"ai21/jamba-1.6-mini","name":"AI21: Jamba Mini 1.6","pricing":{"prompt":"0.0000002","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741905171,"top_provider":{"context_length":256000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemma-3-12b-it:free","name":"Google: Gemma 3 12B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-12b-it","name":"Google: Gemma 3 12B","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741902625,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-a","name":"Cohere: Command A","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741894342,"top_provider":{"context_length":256000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"openai/gpt-4o-mini-search-preview","name":"OpenAI: GPT-4o-mini Search Preview","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0.0275","image":"0.000217","web_search":"0","internal_reasoning":"0"},"created":1741818122,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-search-preview","name":"OpenAI: GPT-4o Search Preview","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0.035","image":"0.003613","web_search":"0","internal_reasoning":"0"},"created":1741817949,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"rekaai/reka-flash-3:free","name":"Reka: Flash 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741812813,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"google/gemma-3-27b-it:free","name":"Google: Gemma 3 27B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":96000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-3-27b-it","name":"Google: Gemma 3 27B","pricing":{"prompt":"0.00000009","completion":"0.00000017","request":"0","image":"0.0000256","web_search":"0","internal_reasoning":"0"},"created":1741756359,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"thedrummer/anubis-pro-105b-v1","name":"TheDrummer: Anubis Pro 105B V1","pricing":{"prompt":"0.0000008","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741642290,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"thedrummer/skyfall-36b-v2","name":"TheDrummer: Skyfall 36B V2","pricing":{"prompt":"0.0000005","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741636566,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"microsoft/phi-4-multimodal-instruct","name":"Microsoft: Phi 4 Multimodal Instruct","pricing":{"prompt":"0.00000005","completion":"0.0000001","request":"0","image":"0.00017685","web_search":"0","internal_reasoning":"0"},"created":1741396284,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-reasoning-pro","name":"Perplexity: Sonar Reasoning Pro","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741313308,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar-pro","name":"Perplexity: Sonar Pro","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0"},"created":1741312423,"top_provider":{"context_length":200000,"max_completion_tokens":8000,"is_moderated":false}},{"id":"perplexity/sonar-deep-research","name":"Perplexity: Sonar Deep Research","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0.005","internal_reasoning":"0.000003"},"created":1741311246,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwq-32b:free","name":"Qwen: QwQ 32B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741208814,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":true}},{"id":"qwen/qwq-32b","name":"Qwen: QwQ 32B","pricing":{"prompt":"0.000000075","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1741208814,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/deephermes-3-llama-3-8b-preview:free","name":"Nous: DeepHermes 3 Llama 3 8B Preview (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740719372,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4.5-preview","name":"OpenAI: GPT-4.5 (Preview)","pricing":{"prompt":"0.000075","completion":"0.00015","request":"0","image":"0.108375","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000375"},"created":1740687810,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemini-2.0-flash-lite-001","name":"Google: Gemini 2.0 Flash Lite","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740506212,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet","name":"Anthropic: Claude 3.7 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet:thinking","name":"Anthropic: Claude 3.7 Sonnet (thinking)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":64000,"is_moderated":false}},{"id":"anthropic/claude-3.7-sonnet:beta","name":"Anthropic: Claude 3.7 Sonnet (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1740422110,"top_provider":{"context_length":200000,"max_completion_tokens":128000,"is_moderated":false}},{"id":"perplexity/r1-1776","name":"Perplexity: R1 1776","pricing":{"prompt":"0.000002","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1740004929,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-saba","name":"Mistral: Saba","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739803239,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-r1-mistral-24b:free","name":"Dolphin3.0 R1 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462498,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cognitivecomputations/dolphin3.0-mistral-24b:free","name":"Dolphin3.0 Mistral 24B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739462019,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-guard-3-8b","name":"Llama Guard 3 8B","pricing":{"prompt":"0.00000002","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1739401318,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o3-mini-high","name":"OpenAI: o3 Mini High","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1739372611,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"deepseek/deepseek-r1-distill-llama-8b","name":"DeepSeek: R1 Distill Llama 8B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738937718,"top_provider":{"context_length":32000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"google/gemini-2.0-flash-001","name":"Google: Gemini 2.0 Flash","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0.0000258","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000025","input_cache_write":"0.0000001833"},"created":1738769413,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-vl-plus","name":"Qwen: Qwen VL Plus","pricing":{"prompt":"0.00000021","completion":"0.00000063","request":"0","image":"0.0002688","web_search":"0","internal_reasoning":"0"},"created":1738731255,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"aion-labs/aion-1.0","name":"AionLabs: Aion-1.0","pricing":{"prompt":"0.000004","completion":"0.000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697557,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-1.0-mini","name":"AionLabs: Aion-1.0-Mini","pricing":{"prompt":"0.0000007","completion":"0.0000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738697107,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"aion-labs/aion-rp-llama-3.1-8b","name":"AionLabs: Aion-RP 1.0 (8B)","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738696718,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"qwen/qwen-vl-max","name":"Qwen: Qwen VL Max","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.001024","web_search":"0","internal_reasoning":"0"},"created":1738434304,"top_provider":{"context_length":7500,"max_completion_tokens":1500,"is_moderated":false}},{"id":"qwen/qwen-turbo","name":"Qwen: Qwen-Turbo","pricing":{"prompt":"0.00000005","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000002"},"created":1738410974,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct:free","name":"Qwen: Qwen2.5 VL 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":131072,"max_completion_tokens":2048,"is_moderated":false}},{"id":"qwen/qwen2.5-vl-72b-instruct","name":"Qwen: Qwen2.5 VL 72B Instruct","pricing":{"prompt":"0.00000025","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738410311,"top_provider":{"context_length":32000,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-plus","name":"Qwen: Qwen-Plus","pricing":{"prompt":"0.0000004","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000016"},"created":1738409840,"top_provider":{"context_length":131072,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-max","name":"Qwen: Qwen-Max ","pricing":{"prompt":"0.0000016","completion":"0.0000064","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000064"},"created":1738402289,"top_provider":{"context_length":32768,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/o3-mini","name":"OpenAI: o3 Mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1738351721,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"deepseek/deepseek-r1-distill-qwen-1.5b","name":"DeepSeek: R1 Distill Qwen 1.5B","pricing":{"prompt":"0.00000018","completion":"0.00000018","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738328067,"top_provider":{"context_length":131072,"max_completion_tokens":32768,"is_moderated":false}},{"id":"mistralai/mistral-small-24b-instruct-2501:free","name":"Mistral: Mistral Small 3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-small-24b-instruct-2501","name":"Mistral: Mistral Small 3","pricing":{"prompt":"0.00000005","completion":"0.00000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738255409,"top_provider":{"context_length":32768,"max_completion_tokens":32768,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-32b","name":"DeepSeek: R1 Distill Qwen 32B","pricing":{"prompt":"0.000000075","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738194830,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b:free","name":"DeepSeek: R1 Distill Qwen 14B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738193940,"top_provider":{"context_length":64000,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-qwen-14b","name":"DeepSeek: R1 Distill Qwen 14B","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738193940,"top_provider":{"context_length":64000,"max_completion_tokens":32000,"is_moderated":false}},{"id":"perplexity/sonar-reasoning","name":"Perplexity: Sonar Reasoning","pricing":{"prompt":"0.000001","completion":"0.000005","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738131107,"top_provider":{"context_length":127000,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/sonar","name":"Perplexity: Sonar","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1738013808,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-7b","name":"Liquid: LFM 7B","pricing":{"prompt":"0.00000001","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806883,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-3b","name":"Liquid: LFM 3B","pricing":{"prompt":"0.00000002","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737806501,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b:free","name":"DeepSeek: R1 Distill Llama 70B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"deepseek/deepseek-r1-distill-llama-70b","name":"DeepSeek: R1 Distill Llama 70B","pricing":{"prompt":"0.0000001","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737663169,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-r1:free","name":"DeepSeek: R1 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-r1","name":"DeepSeek: R1","pricing":{"prompt":"0.00000045","completion":"0.00000215","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1737381095,"top_provider":{"context_length":128000,"max_completion_tokens":32768,"is_moderated":false}},{"id":"minimax/minimax-01","name":"MiniMax: MiniMax-01","pricing":{"prompt":"0.0000002","completion":"0.0000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736915462,"top_provider":{"context_length":1000192,"max_completion_tokens":1000192,"is_moderated":false}},{"id":"mistralai/codestral-2501","name":"Mistral: Codestral 2501","pricing":{"prompt":"0.0000003","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736895522,"top_provider":{"context_length":262144,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-4","name":"Microsoft: Phi 4","pricing":{"prompt":"0.00000007","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1736489872,"top_provider":{"context_length":16384,"max_completion_tokens":16384,"is_moderated":false}},{"id":"deepseek/deepseek-chat:free","name":"DeepSeek: DeepSeek V3 (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1735241320,"top_provider":{"context_length":163840,"max_completion_tokens":null,"is_moderated":false}},{"id":"deepseek/deepseek-chat","name":"DeepSeek: DeepSeek V3","pricing":{"prompt":"0.00000038","completion":"0.00000089","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1735241320,"top_provider":{"context_length":163840,"max_completion_tokens":163840,"is_moderated":false}},{"id":"sao10k/l3.3-euryale-70b","name":"Sao10K: Llama 3.3 Euryale 70B","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734535928,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/o1","name":"OpenAI: o1","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0.021675","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000075"},"created":1734459999,"top_provider":{"context_length":200000,"max_completion_tokens":100000,"is_moderated":true}},{"id":"eva-unit-01/eva-llama-3.33-70b","name":"EVA Llama 3.33 70B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734377303,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"x-ai/grok-2-vision-1212","name":"xAI: Grok 2 Vision 1212","pricing":{"prompt":"0.000002","completion":"0.00001","request":"0","image":"0.0036","web_search":"0","internal_reasoning":"0"},"created":1734237338,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-2-1212","name":"xAI: Grok 2 1212","pricing":{"prompt":"0.000002","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734232814,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r7b-12-2024","name":"Cohere: Command R7B (12-2024)","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1734158152,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"google/gemini-2.0-flash-exp:free","name":"Google: Gemini 2.0 Flash Experimental (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733937523,"top_provider":{"context_length":1048576,"max_completion_tokens":8192,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct:free","name":"Meta: Llama 3.3 70B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":131072,"max_completion_tokens":2048,"is_moderated":false}},{"id":"meta-llama/llama-3.3-70b-instruct","name":"Meta: Llama 3.3 70B Instruct","pricing":{"prompt":"0.000000038","completion":"0.00000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733506137,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"amazon/nova-lite-v1","name":"Amazon: Nova Lite 1.0","pricing":{"prompt":"0.00000006","completion":"0.00000024","request":"0","image":"0.00009","web_search":"0","internal_reasoning":"0"},"created":1733437363,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-micro-v1","name":"Amazon: Nova Micro 1.0","pricing":{"prompt":"0.000000035","completion":"0.00000014","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1733437237,"top_provider":{"context_length":128000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"amazon/nova-pro-v1","name":"Amazon: Nova Pro 1.0","pricing":{"prompt":"0.0000008","completion":"0.0000032","request":"0","image":"0.0012","web_search":"0","internal_reasoning":"0"},"created":1733436303,"top_provider":{"context_length":300000,"max_completion_tokens":5120,"is_moderated":true}},{"id":"qwen/qwq-32b-preview","name":"Qwen: QwQ 32B Preview","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1732754541,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"eva-unit-01/eva-qwen-2.5-72b","name":"EVA Qwen2.5 72B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1732210606,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4o-2024-11-20","name":"OpenAI: GPT-4o (2024-11-20)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1732127594,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"mistralai/mistral-large-2411","name":"Mistral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978685,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-large-2407","name":"Mistral Large 2407","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731978415,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/pixtral-large-2411","name":"Mistral: Pixtral Large 2411","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0.002888","web_search":"0","internal_reasoning":"0"},"created":1731977388,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"x-ai/grok-vision-beta","name":"xAI: Grok Vision Beta","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.009","web_search":"0","internal_reasoning":"0"},"created":1731976624,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"infermatic/mn-inferor-12b","name":"Infermatic: Mistral Nemo Inferor 12B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731464428,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct:free","name":"Qwen2.5 Coder 32B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-coder-32b-instruct","name":"Qwen2.5 Coder 32B Instruct","pricing":{"prompt":"0.00000006","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731368400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"raifle/sorcererlm-8x22b","name":"SorcererLM 8x22B","pricing":{"prompt":"0.0000045","completion":"0.0000045","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731105083,"top_provider":{"context_length":16000,"max_completion_tokens":null,"is_moderated":false}},{"id":"eva-unit-01/eva-qwen-2.5-32b","name":"EVA Qwen2.5 32B","pricing":{"prompt":"0.0000026","completion":"0.0000034","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731104847,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"thedrummer/unslopnemo-12b","name":"TheDrummer: UnslopNemo 12B","pricing":{"prompt":"0.0000004","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1731103448,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku:beta","name":"Anthropic: Claude 3.5 Haiku (self-moderated)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku","name":"Anthropic: Claude 3.5 Haiku","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"anthropic/claude-3.5-haiku-20241022:beta","name":"Anthropic: Claude 3.5 Haiku (2024-10-22) (self-moderated)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-haiku-20241022","name":"Anthropic: Claude 3.5 Haiku (2024-10-22)","pricing":{"prompt":"0.0000008","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000008","input_cache_write":"0.000001"},"created":1730678400,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"neversleep/llama-3.1-lumimaid-70b","name":"NeverSleep: Lumimaid v0.2 70B","pricing":{"prompt":"0.0000025","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthracite-org/magnum-v4-72b","name":"Magnum v4 72B","pricing":{"prompt":"0.0000025","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729555200,"top_provider":{"context_length":16384,"max_completion_tokens":1024,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet:beta","name":"Anthropic: Claude 3.5 Sonnet (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet","name":"Anthropic: Claude 3.5 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1729555200,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"mistralai/ministral-8b","name":"Mistral: Ministral 8B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/ministral-3b","name":"Mistral: Ministral 3B","pricing":{"prompt":"0.00000004","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729123200,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-7b-instruct","name":"Qwen2.5 7B Instruct","pricing":{"prompt":"0.00000004","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1729036800,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"nvidia/llama-3.1-nemotron-70b-instruct","name":"NVIDIA: Llama 3.1 Nemotron 70B Instruct","pricing":{"prompt":"0.00000012","completion":"0.0000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728950400,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"inflection/inflection-3-productivity","name":"Inflection: Inflection 3 Productivity","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"inflection/inflection-3-pi","name":"Inflection: Inflection 3 Pi","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1728604800,"top_provider":{"context_length":8000,"max_completion_tokens":1024,"is_moderated":false}},{"id":"google/gemini-flash-1.5-8b","name":"Google: Gemini 1.5 Flash 8B","pricing":{"prompt":"0.0000000375","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000001","input_cache_write":"0.0000000583"},"created":1727913600,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"thedrummer/rocinante-12b","name":"TheDrummer: Rocinante 12B","pricing":{"prompt":"0.0000002","completion":"0.0000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"anthracite-org/magnum-v2-72b","name":"Magnum v2 72B","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"liquid/lfm-40b","name":"Liquid: LFM 40B MoE","pricing":{"prompt":"0.00000015","completion":"0.00000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727654400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.2-3b-instruct","name":"Meta: Llama 3.2 3B Instruct","pricing":{"prompt":"0.000000003","completion":"0.000000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":20000,"max_completion_tokens":20000,"is_moderated":false}},{"id":"meta-llama/llama-3.2-1b-instruct","name":"Meta: Llama 3.2 1B Instruct","pricing":{"prompt":"0.000000005","completion":"0.00000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.2-90b-vision-instruct","name":"Meta: Llama 3.2 90B Vision Instruct","pricing":{"prompt":"0.0000012","completion":"0.0000012","request":"0","image":"0.001734","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":2048,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct:free","name":"Meta: Llama 3.2 11B Vision Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":2048,"is_moderated":false}},{"id":"meta-llama/llama-3.2-11b-vision-instruct","name":"Meta: Llama 3.2 11B Vision Instruct","pricing":{"prompt":"0.000000049","completion":"0.000000049","request":"0","image":"0.00007948","web_search":"0","internal_reasoning":"0"},"created":1727222400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct:free","name":"Qwen2.5 72B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"qwen/qwen-2.5-72b-instruct","name":"Qwen2.5 72B Instruct","pricing":{"prompt":"0.00000012","completion":"0.00000039","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726704000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/llama-3.1-lumimaid-8b","name":"NeverSleep: Lumimaid v0.2 8B","pricing":{"prompt":"0.0000002","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1726358400,"top_provider":{"context_length":32768,"max_completion_tokens":2048,"is_moderated":false}},{"id":"openai/o1-preview","name":"OpenAI: o1-preview","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000075"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/o1-preview-2024-09-12","name":"OpenAI: o1-preview (2024-09-12)","pricing":{"prompt":"0.000015","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000075"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":32768,"is_moderated":true}},{"id":"openai/o1-mini","name":"OpenAI: o1-mini","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"openai/o1-mini-2024-09-12","name":"OpenAI: o1-mini (2024-09-12)","pricing":{"prompt":"0.0000011","completion":"0.0000044","request":"0","image":"0","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000055"},"created":1726099200,"top_provider":{"context_length":128000,"max_completion_tokens":65536,"is_moderated":true}},{"id":"mistralai/pixtral-12b","name":"Mistral: Pixtral 12B","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1725926400,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"cohere/command-r-plus-08-2024","name":"Cohere: Command R+ (08-2024)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-08-2024","name":"Cohere: Command R (08-2024)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724976000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"qwen/qwen-2.5-vl-7b-instruct","name":"Qwen: Qwen2.5-VL 7B Instruct","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0.0001445","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"sao10k/l3.1-euryale-70b","name":"Sao10K: Llama 3.1 Euryale 70B v2.2","pricing":{"prompt":"0.00000065","completion":"0.00000075","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724803200,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3.5-mini-128k-instruct","name":"Microsoft: Phi-3.5 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1724198400,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-70b","name":"Nous: Hermes 3 70B Instruct","pricing":{"prompt":"0.0000001","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723939200,"top_provider":{"context_length":131072,"max_completion_tokens":null,"is_moderated":false}},{"id":"nousresearch/hermes-3-llama-3.1-405b","name":"Nous: Hermes 3 405B Instruct","pricing":{"prompt":"0.0000007","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723766400,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"openai/chatgpt-4o-latest","name":"OpenAI: ChatGPT-4o","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1723593600,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"sao10k/l3-lunaris-8b","name":"Sao10K: Llama 3 8B Lunaris","pricing":{"prompt":"0.00000002","completion":"0.00000005","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723507200,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"aetherwiing/mn-starcannon-12b","name":"Aetherwiing: Starcannon 12B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1723507200,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4o-2024-08-06","name":"OpenAI: GPT-4o (2024-08-06)","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1722902400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b","name":"Meta: Llama 3.1 405B (base)","pricing":{"prompt":"0.000002","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722556800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"nothingiisreal/mn-celeste-12b","name":"Mistral Nemo 12B Celeste","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722556800,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"perplexity/llama-3.1-sonar-small-128k-online","name":"Perplexity: Llama 3.1 Sonar 8B Online","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722470400,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"perplexity/llama-3.1-sonar-large-128k-online","name":"Perplexity: Llama 3.1 Sonar 70B Online","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0.005","image":"0","web_search":"0","internal_reasoning":"0"},"created":1722470400,"top_provider":{"context_length":127072,"max_completion_tokens":null,"is_moderated":false}},{"id":"meta-llama/llama-3.1-8b-instruct","name":"Meta: Llama 3.1 8B Instruct","pricing":{"prompt":"0.000000015","completion":"0.00000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131000,"max_completion_tokens":131000,"is_moderated":false}},{"id":"meta-llama/llama-3.1-405b-instruct","name":"Meta: Llama 3.1 405B Instruct","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3.1-70b-instruct","name":"Meta: Llama 3.1 70B Instruct","pricing":{"prompt":"0.0000001","completion":"0.00000028","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721692800,"top_provider":{"context_length":131072,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-nemo:free","name":"Mistral: Mistral Nemo (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":128000,"is_moderated":false}},{"id":"mistralai/mistral-nemo","name":"Mistral: Mistral Nemo","pricing":{"prompt":"0.000000008","completion":"0.000000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1721347200,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"openai/gpt-4o-mini","name":"OpenAI: GPT-4o-mini","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.000217","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o-mini-2024-07-18","name":"OpenAI: GPT-4o-mini (2024-07-18)","pricing":{"prompt":"0.00000015","completion":"0.0000006","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0","input_cache_read":"0.000000075"},"created":1721260800,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"google/gemma-2-27b-it","name":"Google: Gemma 2 27B","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1720828800,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"alpindale/magnum-72b","name":"Magnum 72B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1720656000,"top_provider":{"context_length":16384,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemma-2-9b-it:free","name":"Google: Gemma 2 9B (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"google/gemma-2-9b-it","name":"Google: Gemma 2 9B","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719532800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"01-ai/yi-large","name":"01.AI: Yi Large","pricing":{"prompt":"0.000003","completion":"0.000003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1719273600,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet-20240620:beta","name":"Anthropic: Claude 3.5 Sonnet (2024-06-20) (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1718841600,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"anthropic/claude-3.5-sonnet-20240620","name":"Anthropic: Claude 3.5 Sonnet (2024-06-20)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1718841600,"top_provider":{"context_length":200000,"max_completion_tokens":8192,"is_moderated":true}},{"id":"sao10k/l3-euryale-70b","name":"Sao10k: Llama 3 Euryale 70B v2.1","pricing":{"prompt":"0.00000148","completion":"0.00000148","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1718668800,"top_provider":{"context_length":8192,"max_completion_tokens":8192,"is_moderated":false}},{"id":"cognitivecomputations/dolphin-mixtral-8x22b","name":"Dolphin 2.9.2 Mixtral 8x22B 🐬","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1717804800,"top_provider":{"context_length":16000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"qwen/qwen-2-72b-instruct","name":"Qwen 2 72B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1717718400,"top_provider":{"context_length":32768,"max_completion_tokens":4096,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct:free","name":"Mistral: Mistral 7B Instruct (free)","pricing":{"prompt":"0","completion":"0","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct","name":"Mistral: Mistral 7B Instruct","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"nousresearch/hermes-2-pro-llama-3-8b","name":"NousResearch: Hermes 2 Pro - Llama-3 8B","pricing":{"prompt":"0.000000025","completion":"0.00000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":131072,"max_completion_tokens":131072,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.3","name":"Mistral: Mistral 7B Instruct v0.3","pricing":{"prompt":"0.000000028","completion":"0.000000054","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716768000,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"microsoft/phi-3-mini-128k-instruct","name":"Microsoft: Phi-3 Mini 128K Instruct","pricing":{"prompt":"0.0000001","completion":"0.0000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716681600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/phi-3-medium-128k-instruct","name":"Microsoft: Phi-3 Medium 128K Instruct","pricing":{"prompt":"0.000001","completion":"0.000001","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1716508800,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"neversleep/llama-3-lumimaid-70b","name":"NeverSleep: Llama 3 Lumimaid 70B","pricing":{"prompt":"0.000004","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715817600,"top_provider":{"context_length":8192,"max_completion_tokens":4096,"is_moderated":false}},{"id":"google/gemini-flash-1.5","name":"Google: Gemini 1.5 Flash ","pricing":{"prompt":"0.000000075","completion":"0.0000003","request":"0","image":"0.00004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000001875","input_cache_write":"0.0000001583"},"created":1715644800,"top_provider":{"context_length":1000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4o","name":"OpenAI: GPT-4o","pricing":{"prompt":"0.0000025","completion":"0.00001","request":"0","image":"0.003613","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000125"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":16384,"is_moderated":true}},{"id":"openai/gpt-4o:extended","name":"OpenAI: GPT-4o (extended)","pricing":{"prompt":"0.000006","completion":"0.000018","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":64000,"is_moderated":true}},{"id":"meta-llama/llama-guard-2-8b","name":"Meta: LlamaGuard 2 8B","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":8192,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4o-2024-05-13","name":"OpenAI: GPT-4o (2024-05-13)","pricing":{"prompt":"0.000005","completion":"0.000015","request":"0","image":"0.007225","web_search":"0","internal_reasoning":"0"},"created":1715558400,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"neversleep/llama-3-lumimaid-8b","name":"NeverSleep: Llama 3 Lumimaid 8B","pricing":{"prompt":"0.0000002","completion":"0.00000125","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1714780800,"top_provider":{"context_length":24576,"max_completion_tokens":2048,"is_moderated":false}},{"id":"sao10k/fimbulvetr-11b-v2","name":"Fimbulvetr 11B v2","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713657600,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"meta-llama/llama-3-8b-instruct","name":"Meta: Llama 3 8B Instruct","pricing":{"prompt":"0.00000003","completion":"0.00000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"meta-llama/llama-3-70b-instruct","name":"Meta: Llama 3 70B Instruct","pricing":{"prompt":"0.0000003","completion":"0.0000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713398400,"top_provider":{"context_length":8192,"max_completion_tokens":16384,"is_moderated":false}},{"id":"mistralai/mixtral-8x22b-instruct","name":"Mistral: Mixtral 8x22B Instruct","pricing":{"prompt":"0.0000009","completion":"0.0000009","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713312000,"top_provider":{"context_length":65536,"max_completion_tokens":null,"is_moderated":false}},{"id":"microsoft/wizardlm-2-8x22b","name":"WizardLM-2 8x22B","pricing":{"prompt":"0.00000048","completion":"0.00000048","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1713225600,"top_provider":{"context_length":65536,"max_completion_tokens":65536,"is_moderated":false}},{"id":"google/gemini-pro-1.5","name":"Google: Gemini 1.5 Pro","pricing":{"prompt":"0.00000125","completion":"0.000005","request":"0","image":"0.0006575","web_search":"0","internal_reasoning":"0"},"created":1712620800,"top_provider":{"context_length":2000000,"max_completion_tokens":8192,"is_moderated":false}},{"id":"openai/gpt-4-turbo","name":"OpenAI: GPT-4 Turbo","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0.01445","web_search":"0","internal_reasoning":"0"},"created":1712620800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"cohere/command-r-plus","name":"Cohere: Command R+","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1712188800,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r-plus-04-2024","name":"Cohere: Command R+ (04-2024)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1712016000,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"sophosympatheia/midnight-rose-70b","name":"Midnight Rose 70B","pricing":{"prompt":"0.0000008","completion":"0.0000008","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1711065600,"top_provider":{"context_length":4096,"max_completion_tokens":2048,"is_moderated":false}},{"id":"cohere/command","name":"Cohere: Command","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1710374400,"top_provider":{"context_length":4096,"max_completion_tokens":4000,"is_moderated":true}},{"id":"cohere/command-r","name":"Cohere: Command R","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1710374400,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"anthropic/claude-3-haiku:beta","name":"Anthropic: Claude 3 Haiku (self-moderated)","pricing":{"prompt":"0.00000025","completion":"0.00000125","request":"0","image":"0.0004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000003","input_cache_write":"0.0000003"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3-haiku","name":"Anthropic: Claude 3 Haiku","pricing":{"prompt":"0.00000025","completion":"0.00000125","request":"0","image":"0.0004","web_search":"0","internal_reasoning":"0","input_cache_read":"0.00000003","input_cache_write":"0.0000003"},"created":1710288000,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-opus:beta","name":"Anthropic: Claude 3 Opus (self-moderated)","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3-opus","name":"Anthropic: Claude 3 Opus","pricing":{"prompt":"0.000015","completion":"0.000075","request":"0","image":"0.024","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000015","input_cache_write":"0.00001875"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-3-sonnet:beta","name":"Anthropic: Claude 3 Sonnet (self-moderated)","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-3-sonnet","name":"Anthropic: Claude 3 Sonnet","pricing":{"prompt":"0.000003","completion":"0.000015","request":"0","image":"0.0048","web_search":"0","internal_reasoning":"0","input_cache_read":"0.0000003","input_cache_write":"0.00000375"},"created":1709596800,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"cohere/command-r-03-2024","name":"Cohere: Command R (03-2024)","pricing":{"prompt":"0.0000005","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1709341200,"top_provider":{"context_length":128000,"max_completion_tokens":4000,"is_moderated":true}},{"id":"mistralai/mistral-large","name":"Mistral Large","pricing":{"prompt":"0.000002","completion":"0.000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1708905600,"top_provider":{"context_length":128000,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-0613","name":"OpenAI: GPT-3.5 Turbo (older v0613)","pricing":{"prompt":"0.000001","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4-turbo-preview","name":"OpenAI: GPT-4 Turbo Preview","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1706140800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"nousresearch/nous-hermes-2-mixtral-8x7b-dpo","name":"Nous: Hermes 2 Mixtral 8x7B DPO","pricing":{"prompt":"0.0000006","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1705363200,"top_provider":{"context_length":32768,"max_completion_tokens":2048,"is_moderated":false}},{"id":"mistralai/mistral-small","name":"Mistral Small","pricing":{"prompt":"0.0000002","completion":"0.0000006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-tiny","name":"Mistral Tiny","pricing":{"prompt":"0.00000025","completion":"0.00000025","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1704844800,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mistral-7b-instruct-v0.2","name":"Mistral: Mistral 7B Instruct v0.2","pricing":{"prompt":"0.0000002","completion":"0.0000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1703721600,"top_provider":{"context_length":32768,"max_completion_tokens":null,"is_moderated":false}},{"id":"mistralai/mixtral-8x7b-instruct","name":"Mistral: Mixtral 8x7B Instruct","pricing":{"prompt":"0.00000008","completion":"0.00000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1702166400,"top_provider":{"context_length":32768,"max_completion_tokens":16384,"is_moderated":false}},{"id":"neversleep/noromaid-20b","name":"Noromaid 20B","pricing":{"prompt":"0.00000125","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700956800,"top_provider":{"context_length":8192,"max_completion_tokens":2048,"is_moderated":false}},{"id":"anthropic/claude-2.1:beta","name":"Anthropic: Claude v2.1 (self-moderated)","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-2.1","name":"Anthropic: Claude v2.1","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"anthropic/claude-2:beta","name":"Anthropic: Claude v2 (self-moderated)","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-2","name":"Anthropic: Claude v2","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1700611200,"top_provider":{"context_length":200000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"undi95/toppy-m-7b","name":"Toppy M 7B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699574400,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"alpindale/goliath-120b","name":"Goliath 120B","pricing":{"prompt":"0.000009","completion":"0.000011","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699574400,"top_provider":{"context_length":6144,"max_completion_tokens":null,"is_moderated":false}},{"id":"openrouter/auto","name":"Auto Router","pricing":{"prompt":"-1","completion":"-1"},"created":1699401600,"top_provider":{"context_length":null,"max_completion_tokens":null,"is_moderated":false}},{"id":"openai/gpt-4-1106-preview","name":"OpenAI: GPT-4 Turbo (older v1106)","pricing":{"prompt":"0.00001","completion":"0.00003","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1699228800,"top_provider":{"context_length":128000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-3.5-turbo-instruct","name":"OpenAI: GPT-3.5 Turbo Instruct","pricing":{"prompt":"0.0000015","completion":"0.000002","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":4095,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mistralai/mistral-7b-instruct-v0.1","name":"Mistral: Mistral 7B Instruct v0.1","pricing":{"prompt":"0.00000011","completion":"0.00000019","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1695859200,"top_provider":{"context_length":2824,"max_completion_tokens":null,"is_moderated":false}},{"id":"pygmalionai/mythalion-13b","name":"Pygmalion: Mythalion 13B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1693612800,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-3.5-turbo-16k","name":"OpenAI: GPT-3.5 Turbo 16k","pricing":{"prompt":"0.000003","completion":"0.000004","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1693180800,"top_provider":{"context_length":16385,"max_completion_tokens":4096,"is_moderated":true}},{"id":"mancer/weaver","name":"Mancer: Weaver (alpha)","pricing":{"prompt":"0.0000015","completion":"0.0000015","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1690934400,"top_provider":{"context_length":8000,"max_completion_tokens":1000,"is_moderated":false}},{"id":"anthropic/claude-2.0:beta","name":"Anthropic: Claude v2.0 (self-moderated)","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1690502400,"top_provider":{"context_length":100000,"max_completion_tokens":4096,"is_moderated":false}},{"id":"anthropic/claude-2.0","name":"Anthropic: Claude v2.0","pricing":{"prompt":"0.000008","completion":"0.000024","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1690502400,"top_provider":{"context_length":100000,"max_completion_tokens":4096,"is_moderated":true}},{"id":"undi95/remm-slerp-l2-13b","name":"ReMM SLERP 13B","pricing":{"prompt":"0.0000008","completion":"0.0000012","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1689984000,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"gryphe/mythomax-l2-13b","name":"MythoMax 13B","pricing":{"prompt":"0.000000065","completion":"0.000000065","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1688256000,"top_provider":{"context_length":4096,"max_completion_tokens":4096,"is_moderated":false}},{"id":"openai/gpt-4","name":"OpenAI: GPT-4","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}},{"id":"openai/gpt-4-0314","name":"OpenAI: GPT-4 (older v0314)","pricing":{"prompt":"0.00003","completion":"0.00006","request":"0","image":"0","web_search":"0","internal_reasoning":"0"},"created":1685232000,"top_provider":{"context_length":8191,"max_completion_tokens":4096,"is_moderated":true}}] \ No newline at end of file diff --git a/packages/kbot/dist/package-lock.json b/packages/kbot/dist/package-lock.json index a2b5fe36..55d1f979 100644 --- a/packages/kbot/dist/package-lock.json +++ b/packages/kbot/dist/package-lock.json @@ -1,12 +1,12 @@ { "name": "@plastichub/kbot", - "version": "1.1.43", + "version": "1.1.44", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@plastichub/kbot", - "version": "1.1.43", + "version": "1.1.44", "license": "ISC", "dependencies": { "node-emoji": "^2.2.0" diff --git a/packages/kbot/dist/package.json b/packages/kbot/dist/package.json index 43be36e6..191bcab0 100644 --- a/packages/kbot/dist/package.json +++ b/packages/kbot/dist/package.json @@ -1,6 +1,6 @@ { "name": "@plastichub/kbot", - "version": "1.1.43", + "version": "1.1.44", "main": "main_node.js", "author": "", "license": "ISC", diff --git a/packages/kbot/src/models/cache/openrouter-models-free.ts b/packages/kbot/src/models/cache/openrouter-models-free.ts index c7a7a02f..a7a1e898 100644 --- a/packages/kbot/src/models/cache/openrouter-models-free.ts +++ b/packages/kbot/src/models/cache/openrouter-models-free.ts @@ -48,9 +48,7 @@ export enum E_OPENROUTER_MODEL_FREE { MODEL_FREE_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE = "meta-llama/llama-3.3-70b-instruct:free", MODEL_FREE_QWEN_QWEN_2_5_CODER_32B_INSTRUCT_FREE = "qwen/qwen-2.5-coder-32b-instruct:free", MODEL_FREE_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free", - MODEL_FREE_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE = "meta-llama/llama-3.2-1b-instruct:free", MODEL_FREE_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free", - MODEL_FREE_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE = "meta-llama/llama-3.1-8b-instruct:free", MODEL_FREE_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free", MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free", MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free" diff --git a/packages/kbot/src/models/cache/openrouter-models.ts b/packages/kbot/src/models/cache/openrouter-models.ts index 440ca416..a441828f 100644 --- a/packages/kbot/src/models/cache/openrouter-models.ts +++ b/packages/kbot/src/models/cache/openrouter-models.ts @@ -46,7 +46,6 @@ export enum E_OPENROUTER_MODEL { MODEL_MICROSOFT_PHI_4_REASONING_PLUS = "microsoft/phi-4-reasoning-plus", MODEL_INCEPTION_MERCURY_CODER = "inception/mercury-coder", MODEL_OPENGVLAB_INTERNVL3_14B = "opengvlab/internvl3-14b", - MODEL_OPENGVLAB_INTERNVL3_2B = "opengvlab/internvl3-2b", MODEL_DEEPSEEK_DEEPSEEK_PROVER_V2 = "deepseek/deepseek-prover-v2", MODEL_META_LLAMA_LLAMA_GUARD_4_12B = "meta-llama/llama-guard-4-12b", MODEL_QWEN_QWEN3_30B_A3B_FREE = "qwen/qwen3-30b-a3b:free", @@ -60,10 +59,8 @@ export enum E_OPENROUTER_MODEL { MODEL_QWEN_QWEN3_235B_A22B_FREE = "qwen/qwen3-235b-a22b:free", MODEL_QWEN_QWEN3_235B_A22B = "qwen/qwen3-235b-a22b", MODEL_TNGTECH_DEEPSEEK_R1T_CHIMERA_FREE = "tngtech/deepseek-r1t-chimera:free", - MODEL_THUDM_GLM_Z1_RUMINATION_32B = "thudm/glm-z1-rumination-32b", MODEL_MICROSOFT_MAI_DS_R1_FREE = "microsoft/mai-ds-r1:free", MODEL_THUDM_GLM_Z1_32B_FREE = "thudm/glm-z1-32b:free", - MODEL_THUDM_GLM_Z1_32B = "thudm/glm-z1-32b", MODEL_THUDM_GLM_4_32B_FREE = "thudm/glm-4-32b:free", MODEL_THUDM_GLM_4_32B = "thudm/glm-4-32b", MODEL_GOOGLE_GEMINI_2_5_FLASH_PREVIEW = "google/gemini-2.5-flash-preview", @@ -126,8 +123,8 @@ export enum E_OPENROUTER_MODEL { MODEL_OPENAI_GPT_4_5_PREVIEW = "openai/gpt-4.5-preview", MODEL_GOOGLE_GEMINI_2_0_FLASH_LITE_001 = "google/gemini-2.0-flash-lite-001", MODEL_ANTHROPIC_CLAUDE_3_7_SONNET = "anthropic/claude-3.7-sonnet", - MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_BETA = "anthropic/claude-3.7-sonnet:beta", MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_THINKING = "anthropic/claude-3.7-sonnet:thinking", + MODEL_ANTHROPIC_CLAUDE_3_7_SONNET_BETA = "anthropic/claude-3.7-sonnet:beta", MODEL_PERPLEXITY_R1_1776 = "perplexity/r1-1776", MODEL_MISTRALAI_MISTRAL_SABA = "mistralai/mistral-saba", MODEL_COGNITIVECOMPUTATIONS_DOLPHIN3_0_R1_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free", @@ -191,43 +188,41 @@ export enum E_OPENROUTER_MODEL { MODEL_RAIFLE_SORCERERLM_8X22B = "raifle/sorcererlm-8x22b", MODEL_EVA_UNIT_01_EVA_QWEN_2_5_32B = "eva-unit-01/eva-qwen-2.5-32b", MODEL_THEDRUMMER_UNSLOPNEMO_12B = "thedrummer/unslopnemo-12b", - MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022_BETA = "anthropic/claude-3.5-haiku-20241022:beta", - MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022", MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_BETA = "anthropic/claude-3.5-haiku:beta", MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU = "anthropic/claude-3.5-haiku", - MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022_BETA = "anthropic/claude-3.5-haiku-20241022:beta", + MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022", MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_70B = "neversleep/llama-3.1-lumimaid-70b", + MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b", MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_BETA = "anthropic/claude-3.5-sonnet:beta", MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet", - MODEL_X_AI_GROK_BETA = "x-ai/grok-beta", - MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b", MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b", + MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b", MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct", MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct", MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity", MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi", MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b", - MODEL_LIQUID_LFM_40B = "liquid/lfm-40b", MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b", MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b", - MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free", - MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct", - MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT_FREE = "meta-llama/llama-3.2-1b-instruct:free", + MODEL_LIQUID_LFM_40B = "liquid/lfm-40b", + MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct", MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct", MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct", - MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct", + MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT_FREE = "meta-llama/llama-3.2-11b-vision-instruct:free", + MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct", MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free", MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct", MODEL_NEVERSLEEP_LLAMA_3_1_LUMIMAID_8B = "neversleep/llama-3.1-lumimaid-8b", - MODEL_OPENAI_O1_MINI = "openai/o1-mini", - MODEL_OPENAI_O1_PREVIEW_2024_09_12 = "openai/o1-preview-2024-09-12", - MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12", MODEL_OPENAI_O1_PREVIEW = "openai/o1-preview", + MODEL_OPENAI_O1_PREVIEW_2024_09_12 = "openai/o1-preview-2024-09-12", + MODEL_OPENAI_O1_MINI = "openai/o1-mini", + MODEL_OPENAI_O1_MINI_2024_09_12 = "openai/o1-mini-2024-09-12", MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b", - MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024", MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024", - MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b", + MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024", MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct", + MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b", MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct", MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B = "nousresearch/hermes-3-llama-3.1-70b", MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B = "nousresearch/hermes-3-llama-3.1-405b", @@ -235,18 +230,17 @@ export enum E_OPENROUTER_MODEL { MODEL_SAO10K_L3_LUNARIS_8B = "sao10k/l3-lunaris-8b", MODEL_AETHERWIING_MN_STARCANNON_12B = "aetherwiing/mn-starcannon-12b", MODEL_OPENAI_GPT_4O_2024_08_06 = "openai/gpt-4o-2024-08-06", - MODEL_NOTHINGIISREAL_MN_CELESTE_12B = "nothingiisreal/mn-celeste-12b", MODEL_META_LLAMA_LLAMA_3_1_405B = "meta-llama/llama-3.1-405b", + MODEL_NOTHINGIISREAL_MN_CELESTE_12B = "nothingiisreal/mn-celeste-12b", MODEL_PERPLEXITY_LLAMA_3_1_SONAR_SMALL_128K_ONLINE = "perplexity/llama-3.1-sonar-small-128k-online", MODEL_PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE = "perplexity/llama-3.1-sonar-large-128k-online", - MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct", - MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct", - MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT_FREE = "meta-llama/llama-3.1-8b-instruct:free", MODEL_META_LLAMA_LLAMA_3_1_8B_INSTRUCT = "meta-llama/llama-3.1-8b-instruct", + MODEL_META_LLAMA_LLAMA_3_1_405B_INSTRUCT = "meta-llama/llama-3.1-405b-instruct", + MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct", MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free", MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo", - MODEL_OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18", MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini", + MODEL_OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18", MODEL_GOOGLE_GEMMA_2_27B_IT = "google/gemma-2-27b-it", MODEL_ALPINDALE_MAGNUM_72B = "alpindale/magnum-72b", MODEL_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free", @@ -257,17 +251,17 @@ export enum E_OPENROUTER_MODEL { MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b", MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b", MODEL_QWEN_QWEN_2_72B_INSTRUCT = "qwen/qwen-2-72b-instruct", - MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3", - MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b", MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free", MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct", + MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b", + MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3", MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct", MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct", MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b", MODEL_GOOGLE_GEMINI_FLASH_1_5 = "google/gemini-flash-1.5", - MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b", MODEL_OPENAI_GPT_4O = "openai/gpt-4o", MODEL_OPENAI_GPT_4O_EXTENDED = "openai/gpt-4o:extended", + MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b", MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13", MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_8B = "neversleep/llama-3-lumimaid-8b", MODEL_SAO10K_FIMBULVETR_11B_V2 = "sao10k/fimbulvetr-11b-v2", @@ -275,33 +269,33 @@ export enum E_OPENROUTER_MODEL { MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT = "meta-llama/llama-3-70b-instruct", MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT = "mistralai/mixtral-8x22b-instruct", MODEL_MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b", - MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo", MODEL_GOOGLE_GEMINI_PRO_1_5 = "google/gemini-pro-1.5", + MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo", MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus", MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024", MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b", - MODEL_COHERE_COMMAND_R = "cohere/command-r", MODEL_COHERE_COMMAND = "cohere/command", + MODEL_COHERE_COMMAND_R = "cohere/command-r", MODEL_ANTHROPIC_CLAUDE_3_HAIKU_BETA = "anthropic/claude-3-haiku:beta", MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku", - MODEL_ANTHROPIC_CLAUDE_3_SONNET_BETA = "anthropic/claude-3-sonnet:beta", - MODEL_ANTHROPIC_CLAUDE_3_SONNET = "anthropic/claude-3-sonnet", MODEL_ANTHROPIC_CLAUDE_3_OPUS_BETA = "anthropic/claude-3-opus:beta", MODEL_ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus", + MODEL_ANTHROPIC_CLAUDE_3_SONNET_BETA = "anthropic/claude-3-sonnet:beta", + MODEL_ANTHROPIC_CLAUDE_3_SONNET = "anthropic/claude-3-sonnet", MODEL_COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024", MODEL_MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large", MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613", MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview", MODEL_NOUSRESEARCH_NOUS_HERMES_2_MIXTRAL_8X7B_DPO = "nousresearch/nous-hermes-2-mixtral-8x7b-dpo", - MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny", MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small", + MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny", MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_2 = "mistralai/mistral-7b-instruct-v0.2", MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct", MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b", - MODEL_ANTHROPIC_CLAUDE_2_BETA = "anthropic/claude-2:beta", - MODEL_ANTHROPIC_CLAUDE_2 = "anthropic/claude-2", MODEL_ANTHROPIC_CLAUDE_2_1_BETA = "anthropic/claude-2.1:beta", MODEL_ANTHROPIC_CLAUDE_2_1 = "anthropic/claude-2.1", + MODEL_ANTHROPIC_CLAUDE_2_BETA = "anthropic/claude-2:beta", + MODEL_ANTHROPIC_CLAUDE_2 = "anthropic/claude-2", MODEL_UNDI95_TOPPY_M_7B = "undi95/toppy-m-7b", MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b", MODEL_OPENROUTER_AUTO = "openrouter/auto",