maintainence love:)

This commit is contained in:
babayaga 2025-09-21 17:54:24 +02:00
parent 611af4a0be
commit 7eb25bf342
62 changed files with 108187 additions and 1110 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 122 KiB

File diff suppressed because one or more lines are too long

View File

@ -1,5 +1,5 @@
{
"timestamp": 1758221595993,
"timestamp": 1758470050446,
"models": [
{
"id": "gpt-4-0613",

View File

@ -1,6 +1,55 @@
{
"timestamp": 1758221596257,
"timestamp": 1758470050775,
"models": [
{
"id": "x-ai/grok-4-fast:free",
"canonical_slug": "x-ai/grok-4-fast",
"hugging_face_id": "",
"name": "xAI: Grok 4 Fast (free)",
"created": 1758240090,
"description": "Grok 4 Fast is xAI's latest multimodal model with SOTA cost-efficiency and a 2M token context window. It comes in two flavors: non-reasoning and reasoning. Read more about the model on xAI's [news post](http://x.ai/news/grok-4-fast). Reasoning can be enabled using the `reasoning` `enabled` parameter in the API. [Learn more in our docs](https://openrouter.ai/docs/use-cases/reasoning-tokens#controlling-reasoning-tokens)\n\nPrompts and completions may be used by xAI or OpenRouter to improve future models.",
"context_length": 2000000,
"architecture": {
"modality": "text+image->text",
"input_modalities": [
"text",
"image"
],
"output_modalities": [
"text"
],
"tokenizer": "Grok",
"instruct_type": null
},
"pricing": {
"prompt": "0",
"completion": "0",
"request": "0",
"image": "0",
"web_search": "0",
"internal_reasoning": "0"
},
"top_provider": {
"context_length": 2000000,
"max_completion_tokens": 30000,
"is_moderated": false
},
"per_request_limits": null,
"supported_parameters": [
"include_reasoning",
"logprobs",
"max_tokens",
"reasoning",
"response_format",
"seed",
"structured_outputs",
"temperature",
"tool_choice",
"tools",
"top_logprobs",
"top_p"
]
},
{
"id": "alibaba/tongyi-deepresearch-30b-a3b",
"canonical_slug": "alibaba/tongyi-deepresearch-30b-a3b",
@ -581,92 +630,6 @@
"top_p"
]
},
{
"id": "openrouter/sonoma-dusk-alpha",
"canonical_slug": "openrouter/sonoma-dusk-alpha",
"hugging_face_id": "",
"name": "Sonoma Dusk Alpha",
"created": 1757093247,
"description": "This is a cloaked model provided to the community to gather feedback. A fast and intelligent general-purpose frontier model with a 2 million token context window. Supports image inputs and parallel tool calling.\n\nNote: Its free to use during this testing period, and prompts and completions are logged by the model creator for feedback and training.",
"context_length": 2000000,
"architecture": {
"modality": "text+image->text",
"input_modalities": [
"text",
"image"
],
"output_modalities": [
"text"
],
"tokenizer": "Other",
"instruct_type": null
},
"pricing": {
"prompt": "0",
"completion": "0",
"request": "0",
"image": "0",
"web_search": "0",
"internal_reasoning": "0"
},
"top_provider": {
"context_length": 2000000,
"max_completion_tokens": null,
"is_moderated": false
},
"per_request_limits": null,
"supported_parameters": [
"max_tokens",
"response_format",
"structured_outputs",
"tool_choice",
"tools"
]
},
{
"id": "openrouter/sonoma-sky-alpha",
"canonical_slug": "openrouter/sonoma-sky-alpha",
"hugging_face_id": "",
"name": "Sonoma Sky Alpha",
"created": 1757093001,
"description": "This is a cloaked model provided to the community to gather feedback. A maximally intelligent general-purpose frontier model with a 2 million token context window. Supports image inputs and parallel tool calling.\n\nNote: Its free to use during this testing period, and prompts and completions are logged by the model creator for feedback and training.",
"context_length": 2000000,
"architecture": {
"modality": "text+image->text",
"input_modalities": [
"text",
"image"
],
"output_modalities": [
"text"
],
"tokenizer": "Other",
"instruct_type": null
},
"pricing": {
"prompt": "0",
"completion": "0",
"request": "0",
"image": "0",
"web_search": "0",
"internal_reasoning": "0"
},
"top_provider": {
"context_length": 2000000,
"max_completion_tokens": null,
"is_moderated": false
},
"per_request_limits": null,
"supported_parameters": [
"include_reasoning",
"max_tokens",
"reasoning",
"response_format",
"structured_outputs",
"tool_choice",
"tools"
]
},
{
"id": "qwen/qwen3-max",
"canonical_slug": "qwen/qwen3-max",
@ -2038,7 +2001,7 @@
"name": "OpenAI: gpt-oss-20b",
"created": 1754414229,
"description": "gpt-oss-20b is an open-weight 21B parameter model released by OpenAI under the Apache 2.0 license. It uses a Mixture-of-Experts (MoE) architecture with 3.6B active parameters per forward pass, optimized for lower-latency inference and deployability on consumer or single-GPU hardware. The model is trained in OpenAIs Harmony response format and supports reasoning level configuration, fine-tuning, and agentic capabilities including function calling, tool use, and structured outputs.",
"context_length": 131000,
"context_length": 131072,
"architecture": {
"modality": "text->text",
"input_modalities": [
@ -2051,7 +2014,7 @@
"instruct_type": null
},
"pricing": {
"prompt": "0.00000004",
"prompt": "0.00000003",
"completion": "0.00000015",
"request": "0",
"image": "0",
@ -2059,8 +2022,8 @@
"internal_reasoning": "0"
},
"top_provider": {
"context_length": 131000,
"max_completion_tokens": 131000,
"context_length": 131072,
"max_completion_tokens": 32768,
"is_moderated": false
},
"per_request_limits": null,
@ -7084,8 +7047,8 @@
"instruct_type": null
},
"pricing": {
"prompt": "0.00000003",
"completion": "0.00000012",
"prompt": "0.00000002",
"completion": "0.00000007",
"request": "0",
"image": "0",
"web_search": "0",
@ -10916,7 +10879,7 @@
},
"top_provider": {
"context_length": 131072,
"max_completion_tokens": 8192,
"max_completion_tokens": 131072,
"is_moderated": false
},
"per_request_limits": null,

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,6 @@
export declare enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_X_AI_GROK_4_FAST_FREE = "x-ai/grok-4-fast:free",
MODEL_FREE_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free",
MODEL_FREE_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha",
MODEL_FREE_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha",
MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE = "deepseek/deepseek-chat-v3.1:free",
MODEL_FREE_OPENAI_GPT_OSS_120B_FREE = "openai/gpt-oss-120b:free",
MODEL_FREE_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",

View File

@ -1,8 +1,7 @@
export var E_OPENROUTER_MODEL_FREE;
(function (E_OPENROUTER_MODEL_FREE) {
E_OPENROUTER_MODEL_FREE["MODEL_FREE_X_AI_GROK_4_FAST_FREE"] = "x-ai/grok-4-fast:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_NVIDIA_NEMOTRON_NANO_9B_V2_FREE"] = "nvidia/nemotron-nano-9b-v2:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENROUTER_SONOMA_DUSK_ALPHA"] = "openrouter/sonoma-dusk-alpha";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENROUTER_SONOMA_SKY_ALPHA"] = "openrouter/sonoma-sky-alpha";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE"] = "deepseek/deepseek-chat-v3.1:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENAI_GPT_OSS_120B_FREE"] = "openai/gpt-oss-120b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_OPENAI_GPT_OSS_20B_FREE"] = "openai/gpt-oss-20b:free";
@ -57,4 +56,4 @@ export var E_OPENROUTER_MODEL_FREE;
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE"] = "google/gemma-2-9b-it:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free";
})(E_OPENROUTER_MODEL_FREE || (E_OPENROUTER_MODEL_FREE = {}));
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkF5RFg7QUF6REQsV0FBWSx1QkFBdUI7SUFDakMseUdBQThFLENBQUE7SUFDOUUsbUdBQXdFLENBQUE7SUFDeEUsaUdBQXNFLENBQUE7SUFDdEUsMkdBQWdGLENBQUE7SUFDaEYsMkZBQWdFLENBQUE7SUFDaEUseUZBQThELENBQUE7SUFDOUQscUZBQTBELENBQUE7SUFDMUQscUZBQTBELENBQUE7SUFDMUQseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUscUhBQTBGLENBQUE7SUFDMUYsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsMkdBQWdGLENBQUE7SUFDaEYscUdBQTBFLENBQUE7SUFDMUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQXpEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBeURsQyJ9
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkF3RFg7QUF4REQsV0FBWSx1QkFBdUI7SUFDakMscUZBQTBELENBQUE7SUFDMUQseUdBQThFLENBQUE7SUFDOUUsMkdBQWdGLENBQUE7SUFDaEYsMkZBQWdFLENBQUE7SUFDaEUseUZBQThELENBQUE7SUFDOUQscUZBQTBELENBQUE7SUFDMUQscUZBQTBELENBQUE7SUFDMUQseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUscUhBQTBGLENBQUE7SUFDMUYsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsMkdBQWdGLENBQUE7SUFDaEYscUdBQTBFLENBQUE7SUFDMUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQXhEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBd0RsQyJ9

View File

@ -1,4 +1,5 @@
export declare enum E_OPENROUTER_MODEL {
MODEL_X_AI_GROK_4_FAST_FREE = "x-ai/grok-4-fast:free",
MODEL_ALIBABA_TONGYI_DEEPRESEARCH_30B_A3B = "alibaba/tongyi-deepresearch-30b-a3b",
MODEL_QWEN_QWEN3_CODER_FLASH = "qwen/qwen3-coder-flash",
MODEL_QWEN_QWEN3_CODER_PLUS = "qwen/qwen3-coder-plus",
@ -11,8 +12,6 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_QWEN_QWEN_PLUS_2025_07_28_THINKING = "qwen/qwen-plus-2025-07-28:thinking",
MODEL_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free",
MODEL_NVIDIA_NEMOTRON_NANO_9B_V2 = "nvidia/nemotron-nano-9b-v2",
MODEL_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha",
MODEL_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha",
MODEL_QWEN_QWEN3_MAX = "qwen/qwen3-max",
MODEL_MOONSHOTAI_KIMI_K2_0905 = "moonshotai/kimi-k2-0905",
MODEL_BYTEDANCE_SEED_OSS_36B_INSTRUCT = "bytedance/seed-oss-36b-instruct",

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -55,6 +55,7 @@ export interface IKBotOptions {
anthropic/claude-opus-4 | paid
anthropic/claude-opus-4.1 | paid
anthropic/claude-sonnet-4 | paid
arcee-ai/afm-4.5b | paid
arcee-ai/coder-large | paid
arcee-ai/maestro-reasoning | paid
arcee-ai/spotlight | paid
@ -96,9 +97,7 @@ export interface IKBotOptions {
deepseek/deepseek-r1-distill-llama-70b:free | free
deepseek/deepseek-r1-distill-llama-8b | paid
deepseek/deepseek-r1-distill-qwen-14b | paid
deepseek/deepseek-r1-distill-qwen-14b:free | free
deepseek/deepseek-r1-distill-qwen-32b | paid
cognitivecomputations/dolphin-mixtral-8x22b | paid
cognitivecomputations/dolphin3.0-mistral-24b | paid
cognitivecomputations/dolphin3.0-mistral-24b:free | free
cognitivecomputations/dolphin3.0-r1-mistral-24b | paid
@ -170,7 +169,6 @@ export interface IKBotOptions {
microsoft/phi-3-medium-128k-instruct | paid
microsoft/phi-3-mini-128k-instruct | paid
microsoft/phi-3.5-mini-128k-instruct | paid
sophosympatheia/midnight-rose-70b | paid
minimax/minimax-m1 | paid
minimax/minimax-01 | paid
mistralai/mistral-large | paid
@ -230,7 +228,6 @@ export interface IKBotOptions {
nousresearch/hermes-2-pro-llama-3-8b | paid
nvidia/llama-3.1-nemotron-70b-instruct | paid
nvidia/llama-3.1-nemotron-ultra-253b-v1 | paid
nvidia/llama-3.1-nemotron-ultra-253b-v1:free | free
nvidia/nemotron-nano-9b-v2 | paid
nvidia/nemotron-nano-9b-v2:free | free
openai/chatgpt-4o-latest | paid
@ -275,6 +272,7 @@ export interface IKBotOptions {
openai/o3-pro | paid
openai/o4-mini | paid
openai/o4-mini-high | paid
opengvlab/internvl3-78b | paid
perplexity/r1-1776 | paid
perplexity/sonar | paid
perplexity/sonar-deep-research | paid
@ -310,6 +308,8 @@ export interface IKBotOptions {
qwen/qwen3-coder-30b-a3b-instruct | paid
qwen/qwen3-coder | paid
qwen/qwen3-coder:free | free
qwen/qwen3-coder-flash | paid
qwen/qwen3-coder-plus | paid
qwen/qwen3-max | paid
qwen/qwen3-next-80b-a3b-instruct | paid
qwen/qwen3-next-80b-a3b-thinking | paid
@ -321,7 +321,6 @@ export interface IKBotOptions {
qwen/qwen-2.5-7b-instruct | paid
qwen/qwen-2.5-coder-32b-instruct | paid
qwen/qwen-2.5-coder-32b-instruct:free | free
rekaai/reka-flash-3:free | free
undi95/remm-slerp-l2-13b | paid
sao10k/l3-lunaris-8b | paid
sao10k/l3-euryale-70b | paid
@ -329,8 +328,6 @@ export interface IKBotOptions {
sao10k/l3.3-euryale-70b | paid
shisa-ai/shisa-v2-llama3.3-70b | paid
shisa-ai/shisa-v2-llama3.3-70b:free | free
openrouter/sonoma-dusk-alpha | paid
openrouter/sonoma-sky-alpha | paid
raifle/sorcererlm-8x22b | paid
stepfun-ai/step3 | paid
switchpoint/router | paid
@ -341,21 +338,20 @@ export interface IKBotOptions {
thedrummer/rocinante-12b | paid
thedrummer/skyfall-36b-v2 | paid
thedrummer/unslopnemo-12b | paid
thudm/glm-4-32b | paid
thudm/glm-4.1v-9b-thinking | paid
thudm/glm-z1-32b | paid
tngtech/deepseek-r1t-chimera | paid
tngtech/deepseek-r1t-chimera:free | free
tngtech/deepseek-r1t2-chimera:free | free
alibaba/tongyi-deepresearch-30b-a3b | paid
cognitivecomputations/dolphin-mistral-24b-venice-edition:free | free
microsoft/wizardlm-2-8x22b | paid
x-ai/grok-2-1212 | paid
x-ai/grok-2-vision-1212 | paid
x-ai/grok-3 | paid
x-ai/grok-3-beta | paid
x-ai/grok-3-mini | paid
x-ai/grok-3-mini-beta | paid
x-ai/grok-4 | paid
x-ai/grok-4-fast:free | free
x-ai/grok-code-fast-1 | paid
z-ai/glm-4-32b | paid
z-ai/glm-4.5 | paid

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,12 +1,12 @@
{
"name": "@plastichub/kbot",
"version": "1.1.53",
"version": "1.1.54",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@plastichub/kbot",
"version": "1.1.53",
"version": "1.1.54",
"license": "ISC",
"dependencies": {
"node-emoji": "^2.2.0"

View File

@ -1,6 +1,6 @@
{
"name": "@plastichub/kbot",
"version": "1.1.53",
"version": "1.1.54",
"main": "main_node.js",
"author": "",
"license": "ISC",

Binary file not shown.

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.6 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1024 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 MiB

View File

@ -52,9 +52,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.99"
version = "1.0.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100"
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
[[package]]
name = "ashpd"
@ -400,11 +400,11 @@ dependencies = [
[[package]]
name = "camino"
version = "1.1.12"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd0b03af37dad7a14518b7691d81acb0f8222604ad3d1b02f6b4bed5188c0cd5"
checksum = "e1de8bc0aa9e9385ceb3bf0c152e3a9b9544f6c4a912c8ae504e80c1f0368603"
dependencies = [
"serde",
"serde_core",
]
[[package]]
@ -437,14 +437,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "374b7c592d9c00c1f4972ea58390ac6b18cbb6ab79011f3bdc90a0b82ca06b77"
dependencies = [
"serde",
"toml 0.9.5",
"toml 0.9.7",
]
[[package]]
name = "cc"
version = "1.2.37"
version = "1.2.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65193589c6404eb80b450d618eaf9a2cafaaafd57ecce47370519ef674a7bd44"
checksum = "80f41ae168f955c12fb8960b057d70d0ca153fb83182b57d86380443527be7e9"
dependencies = [
"find-msvc-tools",
"shlex",
@ -687,9 +687,9 @@ dependencies = [
[[package]]
name = "darling"
version = "0.20.11"
version = "0.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee"
checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0"
dependencies = [
"darling_core",
"darling_macro",
@ -697,9 +697,9 @@ dependencies = [
[[package]]
name = "darling_core"
version = "0.20.11"
version = "0.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e"
checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4"
dependencies = [
"fnv",
"ident_case",
@ -711,9 +711,9 @@ dependencies = [
[[package]]
name = "darling_macro"
version = "0.20.11"
version = "0.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead"
checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81"
dependencies = [
"darling_core",
"quote",
@ -836,7 +836,7 @@ version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "330c60081dcc4c72131f8eb70510f1ac07223e5d4163db481a04a0befcffa412"
dependencies = [
"libloading",
"libloading 0.8.9",
]
[[package]]
@ -915,14 +915,14 @@ checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555"
[[package]]
name = "embed-resource"
version = "3.0.5"
version = "3.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c6d81016d6c977deefb2ef8d8290da019e27cc26167e102185da528e6c0ab38"
checksum = "55a075fc573c64510038d7ee9abc7990635863992f83ebc52c8b433b8411a02e"
dependencies = [
"cc",
"memchr",
"rustc_version",
"toml 0.9.5",
"toml 0.9.7",
"vswhom",
"winreg",
]
@ -977,10 +977,11 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "erased-serde"
version = "0.4.7"
version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "110ca254af04e46794fcc4be0991e72e13fdd8c78119e02c76a5473f6f74e049"
checksum = "259d404d09818dec19332e31d94558aeb442fea04c817006456c24b5460bbd4b"
dependencies = [
"serde",
"serde_core",
"typeid",
]
@ -1043,9 +1044,9 @@ dependencies = [
[[package]]
name = "find-msvc-tools"
version = "0.1.1"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fd99930f64d146689264c637b5af2f0233a933bef0d8570e2526bf9e083192d"
checksum = "1ced73b1dacfc750a6db6c0a0c3a3853c8b41997e2e2c563dc90804ae6867959"
[[package]]
name = "flate2"
@ -1346,7 +1347,7 @@ dependencies = [
"js-sys",
"libc",
"r-efi",
"wasi 0.14.5+wasi-0.2.4",
"wasi 0.14.7+wasi-0.2.4",
"wasm-bindgen",
]
@ -1418,7 +1419,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bb0228f477c0900c880fd78c8759b95c7636dbd7842707f49e132378aa2acdc"
dependencies = [
"heck 0.4.1",
"proc-macro-crate 2.0.0",
"proc-macro-crate 2.0.2",
"proc-macro-error",
"proc-macro2",
"quote",
@ -1516,7 +1517,7 @@ dependencies = [
"futures-core",
"futures-sink",
"http",
"indexmap 2.11.1",
"indexmap 2.11.4",
"slab",
"tokio",
"tokio-util",
@ -1531,9 +1532,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.15.5"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
[[package]]
name = "heck"
@ -1658,9 +1659,9 @@ dependencies = [
[[package]]
name = "hyper-util"
version = "0.1.16"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e"
checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8"
dependencies = [
"base64 0.22.1",
"bytes",
@ -1842,13 +1843,14 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.11.1"
version = "2.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "206a8042aec68fa4a62e8d3f7aa4ceb508177d9324faf261e1959e495b7a1921"
checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5"
dependencies = [
"equivalent",
"hashbrown 0.15.5",
"hashbrown 0.16.0",
"serde",
"serde_core",
]
[[package]]
@ -1959,9 +1961,9 @@ checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130"
[[package]]
name = "js-sys"
version = "0.3.78"
version = "0.3.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c0b063578492ceec17683ef2f8c5e89121fbd0b172cbc280635ab7567db2738"
checksum = "852f13bec5eba4ba9afbeb93fd7c13fe56147f055939ae21c43a29a0ecb2702e"
dependencies = [
"once_cell",
"wasm-bindgen",
@ -2008,7 +2010,7 @@ checksum = "02cb977175687f33fa4afa0c95c112b987ea1443e5a51c8f8ff27dc618270cc2"
dependencies = [
"cssparser",
"html5ever",
"indexmap 2.11.1",
"indexmap 2.11.4",
"selectors",
]
@ -2038,7 +2040,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e9ec52138abedcc58dc17a7c6c0c00a2bdb4f3427c7f63fa97fd0d859155caf"
dependencies = [
"gtk-sys",
"libloading",
"libloading 0.7.4",
"once_cell",
]
@ -2059,10 +2061,20 @@ dependencies = [
]
[[package]]
name = "libredox"
version = "0.1.9"
name = "libloading"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3"
checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55"
dependencies = [
"cfg-if",
"windows-link 0.2.0",
]
[[package]]
name = "libredox"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb"
dependencies = [
"bitflags 2.9.4",
"libc",
@ -2294,7 +2306,7 @@ version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77e878c846a8abae00dd069496dbe8751b16ac1c3d6bd2a7283a938e8228f90d"
dependencies = [
"proc-macro-crate 3.3.0",
"proc-macro-crate 3.4.0",
"proc-macro2",
"quote",
"syn 2.0.106",
@ -2811,12 +2823,12 @@ checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
[[package]]
name = "plist"
version = "1.7.4"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3af6b589e163c5a788fab00ce0c0366f6efbb9959c2f9874b224936af7fce7e1"
checksum = "740ebea15c5d1428f910cd1a5f52cebf8d25006245ed8ade92702f4943d91e07"
dependencies = [
"base64 0.22.1",
"indexmap 2.11.1",
"indexmap 2.11.4",
"quick-xml 0.38.3",
"serde",
"time",
@ -2891,20 +2903,21 @@ dependencies = [
[[package]]
name = "proc-macro-crate"
version = "2.0.0"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8"
checksum = "b00f26d3400549137f92511a46ac1cd8ce37cb5598a96d382381458b992a5d24"
dependencies = [
"toml_edit 0.20.7",
"toml_datetime 0.6.3",
"toml_edit 0.20.2",
]
[[package]]
name = "proc-macro-crate"
version = "3.3.0"
version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35"
checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983"
dependencies = [
"toml_edit 0.22.27",
"toml_edit 0.23.6",
]
[[package]]
@ -3367,9 +3380,9 @@ dependencies = [
[[package]]
name = "rustls"
version = "0.23.31"
version = "0.23.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0ebcbd2f03de0fc1122ad9bb24b127a5a6cd51d72604a3f3c50ac459762b6cc"
checksum = "cd3c25631629d034ce7cd9940adc9d45762d46de2b0f57193c4443b92c6d4d40"
dependencies = [
"once_cell",
"ring",
@ -3391,9 +3404,9 @@ dependencies = [
[[package]]
name = "rustls-webpki"
version = "0.103.5"
version = "0.103.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5a37813727b78798e53c2bec3f5e8fe12a6d6f8389bf9ca7802add4c9905ad8"
checksum = "8572f3c2cb9934231157b45499fc41e1f58c589fdfb81a844ba873265e80f8eb"
dependencies = [
"ring",
"rustls-pki-types",
@ -3504,18 +3517,19 @@ dependencies = [
[[package]]
name = "semver"
version = "1.0.26"
version = "1.0.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
dependencies = [
"serde",
"serde_core",
]
[[package]]
name = "serde"
version = "1.0.221"
version = "1.0.226"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "341877e04a22458705eb4e131a1508483c877dca2792b3781d4e5d8a6019ec43"
checksum = "0dca6411025b24b60bfa7ec1fe1f8e710ac09782dca409ee8237ba74b51295fd"
dependencies = [
"serde_core",
"serde_derive",
@ -3523,29 +3537,30 @@ dependencies = [
[[package]]
name = "serde-untagged"
version = "0.1.8"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34836a629bcbc6f1afdf0907a744870039b1e14c0561cb26094fa683b158eff3"
checksum = "f9faf48a4a2d2693be24c6289dbe26552776eb7737074e6722891fadbe6c5058"
dependencies = [
"erased-serde",
"serde",
"serde_core",
"typeid",
]
[[package]]
name = "serde_core"
version = "1.0.221"
version = "1.0.226"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c459bc0a14c840cb403fc14b148620de1e0778c96ecd6e0c8c3cacb6d8d00fe"
checksum = "ba2ba63999edb9dac981fb34b3e5c0d111a69b0924e253ed29d83f7c99e966a4"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.221"
version = "1.0.226"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6185cf75117e20e62b1ff867b9518577271e58abe0037c40bb4794969355ab0"
checksum = "8db53ae22f34573731bafa1db20f04027b2d25e02d8205921b569171699cdb33"
dependencies = [
"proc-macro2",
"quote",
@ -3565,13 +3580,14 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.144"
version = "1.0.145"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56177480b00303e689183f110b4e727bb4211d692c62d4fcd16d02be93077d40"
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
"serde_core",
]
@ -3597,11 +3613,11 @@ dependencies = [
[[package]]
name = "serde_spanned"
version = "1.0.0"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83"
checksum = "5417783452c2be558477e104686f7de5dae53dba813c28435e0e70f82d9b04ee"
dependencies = [
"serde",
"serde_core",
]
[[package]]
@ -3618,15 +3634,15 @@ dependencies = [
[[package]]
name = "serde_with"
version = "3.14.0"
version = "3.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5"
checksum = "c522100790450cf78eeac1507263d0a350d4d5b30df0c8e1fe051a10c22b376e"
dependencies = [
"base64 0.22.1",
"chrono",
"hex",
"indexmap 1.9.3",
"indexmap 2.11.1",
"indexmap 2.11.4",
"schemars 0.9.0",
"schemars 1.0.4",
"serde",
@ -3638,9 +3654,9 @@ dependencies = [
[[package]]
name = "serde_with_macros"
version = "3.14.0"
version = "3.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f"
checksum = "327ada00f7d64abaac1e55a6911e90cf665aa051b9a561c7006c157f4633135e"
dependencies = [
"darling",
"proc-macro2",
@ -3926,7 +3942,7 @@ dependencies = [
"cfg-expr",
"heck 0.5.0",
"pkg-config",
"toml 0.8.23",
"toml 0.8.2",
"version-compare",
]
@ -4075,7 +4091,7 @@ dependencies = [
"serde_json",
"tauri-utils",
"tauri-winres",
"toml 0.9.5",
"toml 0.9.7",
"walkdir",
]
@ -4133,7 +4149,7 @@ dependencies = [
"serde",
"serde_json",
"tauri-utils",
"toml 0.9.5",
"toml 0.9.7",
"walkdir",
]
@ -4173,7 +4189,7 @@ dependencies = [
"tauri-plugin",
"tauri-utils",
"thiserror 2.0.16",
"toml 0.9.5",
"toml 0.9.7",
"url",
]
@ -4306,7 +4322,7 @@ dependencies = [
"serde_with",
"swift-rs",
"thiserror 2.0.16",
"toml 0.9.5",
"toml 0.9.7",
"url",
"urlpattern",
"uuid",
@ -4320,7 +4336,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd21509dd1fa9bd355dc29894a6ff10635880732396aa38c0066c1e6c1ab8074"
dependencies = [
"embed-resource",
"toml 0.9.5",
"toml 0.9.7",
]
[[package]]
@ -4389,11 +4405,12 @@ dependencies = [
[[package]]
name = "time"
version = "0.3.43"
version = "0.3.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83bde6f1ec10e72d583d91623c939f623002284ef622b87de38cfd546cbf2031"
checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d"
dependencies = [
"deranged",
"itoa",
"num-conv",
"powerfmt",
"serde",
@ -4475,9 +4492,9 @@ dependencies = [
[[package]]
name = "tokio-rustls"
version = "0.26.2"
version = "0.26.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b"
checksum = "05f63835928ca123f1bef57abbcd23bb2ba0ac9ae1235f1e65bda0d06e7786bd"
dependencies = [
"rustls",
"tokio",
@ -4498,26 +4515,26 @@ dependencies = [
[[package]]
name = "toml"
version = "0.8.23"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
checksum = "185d8ab0dfbb35cf1399a6344d8484209c088f75f8f68230da55d48d95d43e3d"
dependencies = [
"serde",
"serde_spanned 0.6.9",
"toml_datetime 0.6.11",
"toml_edit 0.22.27",
"toml_datetime 0.6.3",
"toml_edit 0.20.2",
]
[[package]]
name = "toml"
version = "0.9.5"
version = "0.9.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75129e1dc5000bfbaa9fee9d1b21f974f9fbad9daec557a521ee6e080825f6e8"
checksum = "00e5e5d9bf2475ac9d4f0d9edab68cc573dc2fd644b0dba36b0c30a92dd9eaa0"
dependencies = [
"indexmap 2.11.1",
"serde",
"serde_spanned 1.0.0",
"toml_datetime 0.7.0",
"indexmap 2.11.4",
"serde_core",
"serde_spanned 1.0.2",
"toml_datetime 0.7.2",
"toml_parser",
"toml_writer",
"winnow 0.7.13",
@ -4525,20 +4542,20 @@ dependencies = [
[[package]]
name = "toml_datetime"
version = "0.6.11"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b"
dependencies = [
"serde",
]
[[package]]
name = "toml_datetime"
version = "0.7.0"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3"
checksum = "32f1085dec27c2b6632b04c80b3bb1b4300d6495d1e129693bdda7d91e72eec1"
dependencies = [
"serde",
"serde_core",
]
[[package]]
@ -4547,49 +4564,50 @@ version = "0.19.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
dependencies = [
"indexmap 2.11.1",
"toml_datetime 0.6.11",
"indexmap 2.11.4",
"toml_datetime 0.6.3",
"winnow 0.5.40",
]
[[package]]
name = "toml_edit"
version = "0.20.7"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81"
checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338"
dependencies = [
"indexmap 2.11.1",
"toml_datetime 0.6.11",
"winnow 0.5.40",
]
[[package]]
name = "toml_edit"
version = "0.22.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
dependencies = [
"indexmap 2.11.1",
"indexmap 2.11.4",
"serde",
"serde_spanned 0.6.9",
"toml_datetime 0.6.11",
"toml_datetime 0.6.3",
"winnow 0.5.40",
]
[[package]]
name = "toml_edit"
version = "0.23.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3effe7c0e86fdff4f69cdd2ccc1b96f933e24811c5441d44904e8683e27184b"
dependencies = [
"indexmap 2.11.4",
"toml_datetime 0.7.2",
"toml_parser",
"winnow 0.7.13",
]
[[package]]
name = "toml_parser"
version = "1.0.2"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b551886f449aa90d4fe2bdaa9f4a2577ad2dde302c61ecf262d80b116db95c10"
checksum = "4cf893c33be71572e0e9aa6dd15e6677937abd686b066eac3f8cd3531688a627"
dependencies = [
"winnow 0.7.13",
]
[[package]]
name = "toml_writer"
version = "1.0.2"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64"
checksum = "d163a63c116ce562a22cda521fcc4d79152e7aba014456fb5eb442f6d6a10109"
[[package]]
name = "tower"
@ -4890,27 +4908,27 @@ checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
[[package]]
name = "wasi"
version = "0.14.5+wasi-0.2.4"
version = "0.14.7+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4494f6290a82f5fe584817a676a34b9d6763e8d9d18204009fb31dceca98fd4"
checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c"
dependencies = [
"wasip2",
]
[[package]]
name = "wasip2"
version = "1.0.0+wasi-0.2.4"
version = "1.0.1+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03fa2761397e5bd52002cd7e73110c71af2109aca4e521a9f40473fe685b0a24"
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
dependencies = [
"wit-bindgen",
]
[[package]]
name = "wasm-bindgen"
version = "0.2.101"
version = "0.2.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e14915cadd45b529bb8d1f343c4ed0ac1de926144b746e2710f9cd05df6603b"
checksum = "ab10a69fbd0a177f5f649ad4d8d3305499c42bab9aef2f7ff592d0ec8f833819"
dependencies = [
"cfg-if",
"once_cell",
@ -4921,9 +4939,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.101"
version = "0.2.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e28d1ba982ca7923fd01448d5c30c6864d0a14109560296a162f80f305fb93bb"
checksum = "0bb702423545a6007bbc368fde243ba47ca275e549c8a28617f56f6ba53b1d1c"
dependencies = [
"bumpalo",
"log",
@ -4935,9 +4953,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.51"
version = "0.4.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ca85039a9b469b38336411d6d6ced91f3fc87109a2a27b0c197663f5144dffe"
checksum = "a0b221ff421256839509adbb55998214a70d829d3a28c69b4a6672e9d2a42f67"
dependencies = [
"cfg-if",
"js-sys",
@ -4948,9 +4966,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.101"
version = "0.2.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c3d463ae3eff775b0c45df9da45d68837702ac35af998361e2c84e7c5ec1b0d"
checksum = "fc65f4f411d91494355917b605e1480033152658d71f722a90647f56a70c88a0"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@ -4958,9 +4976,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.101"
version = "0.2.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7bb4ce89b08211f923caf51d527662b75bdc9c9c7aab40f86dcb9fb85ac552aa"
checksum = "ffc003a991398a8ee604a401e194b6b3a39677b3173d6e74495eb51b82e99a32"
dependencies = [
"proc-macro2",
"quote",
@ -4971,9 +4989,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.101"
version = "0.2.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f143854a3b13752c6950862c906306adb27c7e839f7414cec8fea35beab624c1"
checksum = "293c37f4efa430ca14db3721dfbe48d8c33308096bd44d80ebaa775ab71ba1cf"
dependencies = [
"unicode-ident",
]
@ -5053,9 +5071,9 @@ dependencies = [
[[package]]
name = "web-sys"
version = "0.3.78"
version = "0.3.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77e4b637749ff0d92b8fad63aa1f7cff3cbe125fd49c175cd6345e7272638b12"
checksum = "fbe734895e869dc429d78c4b433f8d17d95f8d05317440b4fad5ab2d33e596dc"
dependencies = [
"js-sys",
"wasm-bindgen",
@ -5701,9 +5719,9 @@ dependencies = [
[[package]]
name = "wit-bindgen"
version = "0.45.1"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c573471f125075647d03df72e026074b7203790d41351cd6edc96f46bcccd36"
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
[[package]]
name = "writeable"
@ -5841,7 +5859,7 @@ version = "5.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57e797a9c847ed3ccc5b6254e8bcce056494b375b511b3d6edcec0aeb4defaca"
dependencies = [
"proc-macro-crate 3.3.0",
"proc-macro-crate 3.4.0",
"proc-macro2",
"quote",
"syn 2.0.106",
@ -5963,7 +5981,7 @@ version = "5.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6643fd0b26a46d226bd90d3f07c1b5321fe9bb7f04673cb37ac6d6883885b68e"
dependencies = [
"proc-macro-crate 3.3.0",
"proc-macro-crate 3.4.0",
"proc-macro2",
"quote",
"syn 2.0.106",

View File

@ -12,15 +12,19 @@
{ "path": "$APPDATA/**" }
]
},
"core:default",
"fs:default",
"fs:allow-write-text-file",
"fs:allow-read-text-file",
"fs:allow-create",
"fs:allow-mkdir",
"fs:allow-exists",
"core:default",
"fs:allow-open",
"fs:allow-write",
"fs:allow-read",
"fs:allow-rename",
"fs:allow-mkdir",
"fs:allow-remove",
"fs:allow-stat",
"fs:allow-fstat",
"fs:allow-lstat",
"fs:allow-write-text-file",
"fs:read-meta",
"fs:scope-download-recursive",
{
"identifier": "fs:scope-appdata-recursive",
"allow": [
@ -44,6 +48,10 @@
"allow": [
{ "url": "https://**" }
]
},
{
"identifier": "opener:allow-open-path",
"allow": [{ "path": "$APPDATA" }, { "path": "$APPDATA/**" }]
}
]
}

View File

@ -19,7 +19,7 @@ pub fn log_json(level: &str, message: &str, data: Option<serde_json::Value>) {
let log_msg = LogMessage {
level: level.to_string(),
message: message.to_string(),
data,
data: data.filter(|v| !v.is_null()), // Filter out null values to avoid undefined in logs
timestamp: std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()

View File

@ -28,6 +28,7 @@ function App() {
const [isDarkMode, setIsDarkMode] = useState(false);
const [debugMessages, setDebugMessages] = useState<any[]>([]);
const [showDebugPanel, setShowDebugPanel] = useState(false); // Hidden in production
const [errorMessage, setErrorMessage] = useState<string | null>(null);
// Initialize logging system and connect to UI
useEffect(() => {
@ -348,6 +349,7 @@ function App() {
setPromptHistory,
setFileHistory: setFileHistoryWithLogging,
addToFileHistory,
setErrorMessage,
});
const addFiles = async (newPaths: string[]) => {
@ -466,6 +468,7 @@ function App() {
}
setIsGenerating(true);
setErrorMessage(null); // Clear any previous error messages
log.info(`🎨 Starting image generation via backend: "${promptText}"`);
// Add placeholder image with spinner to the files grid
@ -735,6 +738,41 @@ function App() {
toggleTheme={toggleTheme}
apiKey={apiKey}
/>
{/* Error Message Display */}
{errorMessage && (
<div className="mb-6 p-4 bg-red-50 dark:bg-red-900/30 border border-red-200 dark:border-red-700 rounded-xl">
<div className="flex justify-between items-start">
<div className="flex">
<div className="flex-shrink-0">
<svg className="h-5 w-5 text-red-400" viewBox="0 0 20 20" fill="currentColor">
<path fillRule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zM8.707 7.293a1 1 0 00-1.414 1.414L8.586 10l-1.293 1.293a1 1 0 101.414 1.414L10 11.414l1.293 1.293a1 1 0 001.414-1.414L11.414 10l1.293-1.293a1 1 0 00-1.414-1.414L10 8.586 8.707 7.293z" clipRule="evenodd" />
</svg>
</div>
<div className="ml-3">
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
Generation Error
</h3>
<div className="mt-2 text-sm text-red-700 dark:text-red-300">
<p>{errorMessage}</p>
</div>
</div>
</div>
<div className="ml-auto pl-3">
<button
type="button"
className="inline-flex bg-red-50 dark:bg-red-900/30 rounded-md p-1.5 text-red-500 hover:bg-red-100 dark:hover:bg-red-900/50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-red-500"
onClick={() => setErrorMessage(null)}
>
<span className="sr-only">Dismiss</span>
<svg className="h-5 w-5" viewBox="0 0 20 20" fill="currentColor">
<path fillRule="evenodd" d="M4.293 4.293a1 1 0 011.414 0L10 8.586l4.293-4.293a1 1 0 111.414 1.414L11.414 10l4.293 4.293a1 1 0 01-1.414 1.414L10 11.414l-4.293 4.293a1 1 0 01-1.414-1.414L8.586 10 4.293 5.707a1 1 0 010-1.414z" clipRule="evenodd" />
</svg>
</button>
</div>
</div>
</div>
)}
<PromptForm
prompt={prompt}
setPrompt={setPrompt}
@ -774,6 +812,8 @@ function App() {
openFileFromHistory={openFileFromHistory}
onFileHistoryCleanup={onFileHistoryCleanup}
onLightboxPromptSubmit={handleLightboxPromptSubmit}
errorMessage={errorMessage}
setErrorMessage={setErrorMessage}
/>
{/* Debug Panel */}

View File

@ -18,6 +18,8 @@ interface ImageGalleryProps {
historyIndex?: number;
navigateHistory?: (direction: 'up' | 'down') => void;
isGenerating?: boolean;
errorMessage?: string | null;
setErrorMessage?: (message: string | null) => void;
}
export default function ImageGallery({
@ -34,7 +36,9 @@ export default function ImageGallery({
promptHistory = [],
historyIndex = -1,
navigateHistory,
isGenerating = false
isGenerating = false,
errorMessage,
setErrorMessage
}: ImageGalleryProps) {
const [lightboxOpen, setLightboxOpen] = useState(false);
const [lightboxLoaded, setLightboxLoaded] = useState(false);
@ -462,9 +466,48 @@ export default function ImageGallery({
</button>
)}
{/* Lightbox Error Message */}
{lightboxLoaded && errorMessage && (
<div className="absolute bottom-40 left-1/2 transform -translate-x-1/2 w-[80vw] max-w-4xl z-[60]">
<div className="bg-red-900/95 backdrop-blur-sm rounded-xl p-4 shadow-2xl border border-red-500/50 ring-2 ring-red-500/30">
<div className="flex justify-between items-start">
<div className="flex">
<div className="flex-shrink-0">
<svg className="h-5 w-5 text-red-400" viewBox="0 0 20 20" fill="currentColor">
<path fillRule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zM8.707 7.293a1 1 0 00-1.414 1.414L8.586 10l-1.293 1.293a1 1 0 101.414 1.414L10 11.414l1.293 1.293a1 1 0 001.414-1.414L11.414 10l1.293-1.293a1 1 0 00-1.414-1.414L10 8.586 8.707 7.293z" clipRule="evenodd" />
</svg>
</div>
<div className="ml-3">
<h3 className="text-sm font-medium text-red-200">
Generation Error
</h3>
<div className="mt-2 text-sm text-red-100">
<p>{errorMessage}</p>
</div>
</div>
</div>
{setErrorMessage && (
<div className="ml-auto pl-3">
<button
type="button"
className="inline-flex bg-red-900/50 rounded-md p-1.5 text-red-400 hover:bg-red-800/50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-red-500"
onClick={() => setErrorMessage(null)}
>
<span className="sr-only">Dismiss</span>
<svg className="h-5 w-5" viewBox="0 0 20 20" fill="currentColor">
<path fillRule="evenodd" d="M4.293 4.293a1 1 0 011.414 0L10 8.586l4.293-4.293a1 1 0 111.414 1.414L11.414 10l4.293 4.293a1 1 0 01-1.414 1.414L10 11.414l-4.293 4.293a1 1 0 01-1.414-1.414L8.586 10 4.293 5.707a1 1 0 010-1.414z" clipRule="evenodd" />
</svg>
</button>
</div>
)}
</div>
</div>
</div>
)}
{/* Lightbox Prompt Field */}
{lightboxLoaded && onLightboxPromptSubmit && (
<div className="absolute bottom-16 left-1/2 transform -translate-x-1/2 w-[80vw] max-w-4xl">
<div className="absolute bottom-16 left-1/2 transform -translate-x-1/2 w-[80vw] max-w-4xl z-40">
<div className="bg-black/80 backdrop-blur-sm rounded-xl p-4 shadow-2xl border border-white/20">
<div className="flex gap-3">
<div className="flex-1 relative">
@ -481,10 +524,12 @@ export default function ImageGallery({
if (e.ctrlKey) {
// Ctrl+Enter: Submit and keep prompt for iteration
e.preventDefault();
setErrorMessage?.(null); // Clear error message
onLightboxPromptSubmit(lightboxPrompt, images[safeIndex].path);
} else {
// Enter: Submit and clear prompt
e.preventDefault();
setErrorMessage?.(null); // Clear error message
onLightboxPromptSubmit(lightboxPrompt, images[safeIndex].path);
setLightboxPrompt('');
}
@ -500,8 +545,14 @@ export default function ImageGallery({
e.stopPropagation();
e.preventDefault();
navigateHistory('down');
} else if (e.key.startsWith('Arrow')) {
// Stop all arrow key propagation when textarea is focused (except the Ctrl+ variants above)
e.stopPropagation();
} else if (e.key === 'Delete' || e.key === 'Backspace') {
// Stop Delete and Backspace propagation when textarea is focused
// Users should be able to delete text normally without triggering image deletion
e.stopPropagation();
}
// Delete key: Let it bubble up to lightbox handler for image deletion
// Shift+Enter: Allow new line (default textarea behavior)
// All other keys: Normal textarea behavior without interference
}}
@ -541,6 +592,7 @@ export default function ImageGallery({
onClick={(e) => {
e.stopPropagation();
if (lightboxPrompt.trim() && !isGenerating) {
setErrorMessage?.(null); // Clear error message
onLightboxPromptSubmit(lightboxPrompt, images[safeIndex].path);
setLightboxPrompt('');
// Keep lightbox open to show generation progress
@ -581,14 +633,34 @@ export default function ImageGallery({
{/* Delete Confirmation Dialog */}
{showDeleteConfirm && (
<div className="absolute inset-0 bg-black/90 flex items-center justify-center z-50">
<div
className="absolute inset-0 bg-black/90 flex items-center justify-center z-50"
onKeyDown={(e) => {
if (e.key === 'Enter') {
e.stopPropagation();
e.preventDefault();
confirmDelete(rememberChoice);
setRememberChoice(false);
} else if (e.key === 'Escape') {
e.stopPropagation();
e.preventDefault();
setShowDeleteConfirm(false);
setRememberChoice(false);
}
}}
tabIndex={0}
autoFocus
>
<div className="bg-white dark:bg-gray-800 rounded-xl p-6 max-w-md mx-4 shadow-2xl">
<h3 className="text-lg font-semibold text-gray-900 dark:text-white mb-2">
Delete Image?
</h3>
<p className="text-gray-600 dark:text-gray-300 mb-4">
<p className="text-gray-600 dark:text-gray-300 mb-2">
Are you sure you want to delete "{images[Math.max(0, Math.min(currentIndex, images.length - 1))]?.path.split(/[/\\]/).pop()}"?
</p>
<p className="text-sm text-gray-500 dark:text-gray-400 mb-4">
Press <kbd className="px-1.5 py-0.5 bg-gray-100 dark:bg-gray-700 rounded text-xs">Enter</kbd> to confirm or <kbd className="px-1.5 py-0.5 bg-gray-100 dark:bg-gray-700 rounded text-xs">Escape</kbd> to cancel
</p>
<div className="space-y-3">
<label className="flex items-center gap-2 text-sm text-gray-600 dark:text-gray-300">
<input

View File

@ -5,7 +5,6 @@ import ImageGallery from './ImageGallery';
import { useDropZone } from '../hooks/useDropZone';
import TemplateManager from './TemplateManager';
import { tauriApi } from '../lib/tauriApi';
import { saveToStore } from '../lib/init';
import log from '../lib/log';
import { Eraser, Sparkles, Crop, Palette, Package, FolderOpen, Plus, History, ChevronUp, ChevronDown } from 'lucide-react';
@ -70,6 +69,8 @@ interface PromptFormProps {
openFileFromHistory: (filePath: string) => Promise<void>;
onFileHistoryCleanup: (validFiles: string[]) => Promise<void>;
onLightboxPromptSubmit: (prompt: string, imagePath: string) => Promise<void>;
errorMessage?: string | null;
setErrorMessage?: (message: string | null) => void;
}
const PromptForm: React.FC<PromptFormProps> = ({
@ -111,6 +112,8 @@ const PromptForm: React.FC<PromptFormProps> = ({
openFileFromHistory,
onFileHistoryCleanup,
onLightboxPromptSubmit,
errorMessage,
setErrorMessage,
}) => {
const selectedCount = getSelectedImages().length;
const { ref: dropZoneRef, dragIn } = useDropZone({ onDrop: addFiles });
@ -293,9 +296,9 @@ const PromptForm: React.FC<PromptFormProps> = ({
disabled={isGenerating || !hasSelectedImages}
className={`text-lg px-2 py-2 rounded transition-colors duration-200 ${
!hasAnyImages
? 'bg-slate-50 text-slate-400 cursor-not-allowed'
? 'bg-slate-50 dark:bg-slate-800 text-slate-400 dark:text-slate-500 cursor-not-allowed'
: !hasSelectedImages
? 'bg-orange-50 text-orange-600 cursor-not-allowed'
? 'bg-orange-50 dark:bg-orange-900/30 text-orange-600 dark:text-orange-400 cursor-not-allowed'
: 'bg-blue-100 hover:bg-blue-200 dark:bg-blue-900 dark:hover:bg-blue-800 text-blue-700 dark:text-blue-300'
} ${isGenerating ? 'opacity-50 cursor-not-allowed' : ''}`}
title={!hasAnyImages ? 'Add an image first' : !hasSelectedImages ? 'Select an image first' : action.name}
@ -426,6 +429,8 @@ const PromptForm: React.FC<PromptFormProps> = ({
historyIndex={historyIndex}
navigateHistory={navigateHistory}
isGenerating={isGenerating}
errorMessage={errorMessage}
setErrorMessage={setErrorMessage}
/>
</div>
</div>
@ -490,6 +495,8 @@ const PromptForm: React.FC<PromptFormProps> = ({
// Double click picks the image and closes modal
openFileFromHistory(imagePath);
}}
errorMessage={errorMessage}
setErrorMessage={setErrorMessage}
/>
)}
</div>

View File

@ -16,6 +16,7 @@ interface TauriListenersProps extends InitCallbacks {
setPromptHistory: (history: string[]) => void;
setFileHistory: (fileHistory: string[]) => void;
addToFileHistory: (filePath: string) => Promise<void>;
setErrorMessage?: (message: string | null) => void;
}
export function useTauriListeners({
@ -33,7 +34,8 @@ export function useTauriListeners({
setCurrentIndex,
setPromptHistory,
setFileHistory,
addToFileHistory
addToFileHistory,
setErrorMessage
}: TauriListenersProps) {
useEffect(() => {
let unlistenConfig: (() => void) | undefined;
@ -131,6 +133,11 @@ export function useTauriListeners({
log.error('❌ Generation failed', errorData);
setIsGenerating(false);
setFiles(prev => prev.filter(file => !file.path.startsWith('generating_')));
// Display error message to user
if (setErrorMessage && errorData?.error) {
setErrorMessage(errorData.error);
}
}),
tauriApi.listen(TauriEvent.GENERATION_COMPLETE, (event: any) => {
const completionData = event.payload;

View File

@ -92,12 +92,21 @@ export async function loadStore(
}
log.info('📂 Attempting to load prompts from store...');
const configDir = await tauriApi.path.appDataDir();
log.debug(`📁 Data directory: ${configDir}`);
const storePath = await tauriApi.path.join(configDir, STORE_FILE_NAME);
log.debug(`📄 Store path resolved to: ${storePath}`);
const content = await tauriApi.fs.readTextFile(storePath);
// Try BaseDirectory approach first
const BaseDir = tauriApi.fs.BaseDirectory;
let content: string;
if (BaseDir && BaseDir.AppData !== undefined) {
log.debug(`📄 Reading store using BaseDirectory.AppData`);
content = await tauriApi.fs.readTextFile(STORE_FILE_NAME, { baseDir: BaseDir.AppData });
} else {
// Fallback to absolute path
const configDir = await tauriApi.path.appDataDir();
const storePath = await tauriApi.path.join(configDir, STORE_FILE_NAME);
log.debug(`📄 Reading store using absolute path: ${storePath}`);
content = await tauriApi.fs.readTextFile(storePath);
}
if (content) {
const data = JSON.parse(content);
@ -140,19 +149,36 @@ export async function loadStore(
// Create initial empty store
try {
const initialPrompts: PromptTemplate[] = [];
const configDir = await tauriApi.path.appDataDir();
const storePath = await tauriApi.path.join(configDir, STORE_FILE_NAME);
log.debug(`📁 Ensuring directory exists: ${configDir}`);
// Tauri should create the APPDATA directory automatically, but let's verify
log.debug(`📁 APPDATA directory should exist: ${configDir}`);
// Use BaseDirectory.AppData approach for better compatibility
const BaseDir = tauriApi.fs.BaseDirectory;
log.debug(`📁 BaseDirectory available:`, BaseDir ? Object.keys(BaseDir) : 'none');
const initialData = JSON.stringify({ prompts: initialPrompts }, null, 2);
log.debug(`💾 Writing initial store data to: ${storePath}`);
await tauriApi.fs.writeTextFile(storePath, initialData);
log.info(`✅ Initial store created successfully at ${storePath}`);
// Try to write using BaseDirectory first (most reliable)
if (BaseDir && BaseDir.AppData !== undefined) {
log.debug(`💾 Writing store using BaseDirectory.AppData (value: ${BaseDir.AppData})`);
await tauriApi.fs.writeTextFile(STORE_FILE_NAME, initialData, { baseDir: BaseDir.AppData });
log.info(`✅ Initial store created successfully using BaseDirectory.AppData`);
} else {
// Fallback to absolute path approach with directory creation
const configDir = await tauriApi.path.appDataDir();
const storePath = await tauriApi.path.join(configDir, STORE_FILE_NAME);
log.debug(`💾 Writing store using absolute path: ${storePath}`);
// Try to create the directory first using mkdir
try {
await tauriApi.fs.createDir(configDir, { recursive: true });
log.debug(`✅ Directory created: ${configDir}`);
} catch (dirError) {
log.debug(`📁 Directory creation failed or already exists: ${(dirError as Error).message}`);
}
// Now try to write the file
await tauriApi.fs.writeTextFile(storePath, initialData);
log.info(`✅ Initial store created successfully at ${storePath}`);
}
setPrompts(initialPrompts);
return initialPrompts;
@ -298,15 +324,21 @@ export async function saveToStore(updates: {
try {
log.debug('💾 Starting save prompts process...');
const dataDir = await tauriApi.path.appDataDir();
log.debug(`📁 Got data dir: ${dataDir}`);
const storePath = await tauriApi.path.join(dataDir, STORE_FILE_NAME);
log.debug(`📄 Store path: ${storePath}`);
// Try BaseDirectory approach first
const BaseDir = tauriApi.fs.BaseDirectory;
// Load existing data first to merge
let existingData = { prompts: [], history: [], fileHistory: [] };
try {
const existingContent = await tauriApi.fs.readTextFile(storePath);
let existingContent: string;
if (BaseDir && BaseDir.AppData !== undefined) {
existingContent = await tauriApi.fs.readTextFile(STORE_FILE_NAME, { baseDir: BaseDir.AppData });
} else {
const dataDir = await tauriApi.path.appDataDir();
const storePath = await tauriApi.path.join(dataDir, STORE_FILE_NAME);
existingContent = await tauriApi.fs.readTextFile(storePath);
}
if (existingContent) {
existingData = JSON.parse(existingContent);
}
@ -329,7 +361,23 @@ export async function saveToStore(updates: {
dataLength: dataToSave.length
});
await tauriApi.fs.writeTextFile(storePath, dataToSave);
// Write using the same approach as loading
if (BaseDir && BaseDir.AppData !== undefined) {
await tauriApi.fs.writeTextFile(STORE_FILE_NAME, dataToSave, { baseDir: BaseDir.AppData });
} else {
const dataDir = await tauriApi.path.appDataDir();
const storePath = await tauriApi.path.join(dataDir, STORE_FILE_NAME);
// Ensure directory exists before writing
try {
await tauriApi.fs.createDir(dataDir, { recursive: true });
log.debug(`✅ Directory ensured: ${dataDir}`);
} catch (dirError) {
log.debug(`📁 Directory creation failed or already exists: ${(dirError as Error).message}`);
}
await tauriApi.fs.writeTextFile(storePath, dataToSave);
}
log.info(`✅ Store saved with ${mergedData.prompts.length} prompts, ${mergedData.history.length} history, ${mergedData.fileHistory.length} files`);
} catch (error) {
log.error('Failed to save prompts', {

View File

@ -3,7 +3,7 @@ import {
TauriEvent
} from '../constants';
import { PromptTemplate } from '../types';
// import { BaseDirectory } from '@tauri-apps/plugin-fs'
// Dynamically import Tauri APIs
let invoke: any;
let open: any;
@ -12,6 +12,7 @@ let readFile: any;
let writeFile: any;
let readTextFile: any;
let writeTextFile: any;
let mkdir: any;
let BaseDirectory: any;
let listen: any;
let getCurrentWindow: any;
@ -59,6 +60,12 @@ const apiInitializationPromise = (async () => {
// Test if we can actually use the APIs (this will throw if not in Tauri)
await windowApi.getCurrentWindow();
// Debug: Check what's available in pathApi
console.log('🔍 pathApi keys:', Object.keys(pathApi));
console.log('🔍 pathApi.BaseDirectory:', pathApi.BaseDirectory);
console.log('🔍 fsApi keys:', Object.keys(fsApi));
console.log('🔍 fsApi.BaseDirectory:', fsApi.BaseDirectory);
// Assign all APIs
getCurrentWindow = windowApi.getCurrentWindow;
getCurrentWebview = webviewApi.getCurrentWebview;
@ -70,7 +77,8 @@ const apiInitializationPromise = (async () => {
writeFile = fsApi.writeFile;
readTextFile = fsApi.readTextFile;
writeTextFile = fsApi.writeTextFile;
BaseDirectory = fsApi.BaseDirectory;
mkdir = fsApi.mkdir;
BaseDirectory = pathApi.BaseDirectory || fsApi.BaseDirectory;
fetch = httpApi.fetch;
appConfigDir = pathApi.appConfigDir;
appDataDir = pathApi.appDataDir;
@ -155,7 +163,13 @@ export const tauriApi = {
return writeTextFile(...args);
}
},
BaseDirectory: () => BaseDirectory,
createDir: async (path: string, options?: { baseDir?: any; recursive?: boolean }) => {
await ensureTauriApi();
if (mkdir) {
return mkdir(path, options);
}
},
BaseDirectory: BaseDirectory,
},
dialog: {
open: async (...args: Parameters<typeof open>) => {

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -253,11 +253,13 @@ async function launchGuiAndGetPrompt(argv: any): Promise<string | null> {
} else if (message.type === 'generate_request') {
logger.info('📨 Received generation request from GUI');
// Extract variables outside try block for error handling
const genPrompt = message.prompt;
const genFiles = message.files || [];
const genDst = message.dst;
// Process the generation request using our existing image generation logic
try {
const genPrompt = message.prompt;
const genFiles = message.files || [];
const genDst = message.dst;
const finalDstPath = generateUniqueFilename(genDst, genFiles);
logger.info(`📝 Determined destination path for generated image: ${finalDstPath}`);
@ -316,12 +318,28 @@ async function launchGuiAndGetPrompt(argv: any): Promise<string | null> {
tauriProcess.stdin?.write(JSON.stringify(errorResponse) + '\n');
}
} catch (error) {
logger.error('❌ Generation error:', error.message);
const errorMessage = error instanceof Error ? error.message : String(error);
const errorStack = error instanceof Error ? error.stack : undefined;
console.log('🔴 Generation error:', error,errorMessage);
// Send error back to GUI
logger.error('❌ Generation error:', {
message: errorMessage,
stack: errorStack,
prompt: genPrompt?.substring(0, 100) + '...',
fileCount: genFiles?.length || 0,
files: genFiles?.map(f => path.basename(f))
});
// Send detailed error back to GUI
const errorResponse = {
cmd: 'generation_error',
error: error.message
error: errorMessage,
details: {
prompt: genPrompt?.substring(0, 100) + '...',
fileCount: genFiles?.length || 0,
timestamp: new Date().toISOString()
}
};
tauriProcess.stdin?.write(JSON.stringify(errorResponse) + '\n');
}

View File

@ -33,20 +33,90 @@ export const createImage = async (prompt: string, options: IKBotOptions): Promis
const model = ai.getGenerativeModel({ model: options.model || 'gemini-2.5-flash-image-preview' });
const result = await model.generateContent(prompt);
const response = result.response;
const parts = response.candidates[0].content.parts;
for (const part of parts) {
if ('inlineData' in part) {
const inlineData = part.inlineData;
if (inlineData) {
return Buffer.from(inlineData.data, "base64");
try {
const result = await model.generateContent(prompt);
const response = result.response;
logger.debug('Google AI API response structure:', {
hasResponse: !!response,
hasCandidates: !!response?.candidates,
candidatesLength: response?.candidates?.length,
fullResponse: JSON.stringify(response, null, 2)
});
if (!response || !response.candidates || response.candidates.length === 0) {
logger.error('Invalid API response structure - no candidates found', {
response: JSON.stringify(response, null, 2)
});
throw new Error('No candidates returned from Google AI API. The content may have been blocked due to safety filters or other restrictions.');
}
const candidate = response.candidates[0];
// Check for safety filter rejections first
if (candidate.finishReason && candidate.finishReason !== 'STOP') {
const finishReasonMessages = {
'IMAGE_SAFETY': 'Content blocked by image safety filters. The image or prompt contains content that violates Google AI safety policies.',
'SAFETY': 'Content blocked by safety filters. The prompt contains content that violates Google AI safety policies.',
'RECITATION': 'Content blocked due to recitation concerns. The generated content may be too similar to existing copyrighted material.',
'OTHER': 'Content generation stopped for other safety or policy reasons.'
};
const message = finishReasonMessages[candidate.finishReason] ||
`Content generation stopped. Reason: ${candidate.finishReason}`;
logger.error('Google AI blocked content due to safety filters:', {
finishReason: candidate.finishReason,
rejectionMessage: message,
candidate: JSON.stringify(candidate, null, 2)
});
throw new Error(`Request blocked by Google AI: ${message}`);
}
if (!candidate.content || !candidate.content.parts) {
logger.error('Invalid candidate structure - no content parts found', {
candidate: JSON.stringify(candidate, null, 2)
});
throw new Error('Invalid response structure from Google AI API - no content parts found.');
}
const parts = candidate.content.parts;
for (const part of parts) {
if ('inlineData' in part) {
const inlineData = part.inlineData;
if (inlineData) {
return Buffer.from(inlineData.data, "base64");
}
} else if ('text' in part && part.text) {
// Check if this is a rejection message
const text = part.text.toLowerCase();
if (text.includes('cannot fulfill') || text.includes('not able to create') ||
text.includes('unable to generate') || text.includes('cannot generate') ||
text.includes('cannot create') || text.includes('not appropriate')) {
logger.error('Google AI rejected the request:', {
rejectionMessage: part.text,
finishReason: candidate.finishReason
});
throw new Error(`Request rejected by Google AI: ${part.text}`);
}
}
}
logger.warn('No image data found in API response parts', {
partsCount: parts.length,
parts: JSON.stringify(parts, null, 2),
finishReason: candidate.finishReason
});
throw new Error('No image data found in API response. The model may not have generated an image.');
} catch (error) {
logger.error('Google AI API error in createImage:', {
error: error.message,
stack: error.stack,
prompt: prompt.substring(0, 100) + '...'
});
throw error; // Re-throw to let the caller handle it
}
return null;
}
export const editImage = async (prompt: string, imagePaths: string[], options: IKBotOptions): Promise<Buffer | null> => {
@ -57,34 +127,115 @@ export const editImage = async (prompt: string, imagePaths: string[], options: I
const model = ai.getGenerativeModel({ model: options.model || 'gemini-2.5-flash-image-preview' });
const imageParts: Part[] = imagePaths.map(imagePath => {
const imageData = fs.readFileSync(imagePath);
const base64Image = imageData.toString("base64");
const mimeType = lookup(imagePath) || 'image/png';
return {
inlineData: {
mimeType,
data: base64Image,
},
};
});
try {
const imageParts: Part[] = imagePaths.map(imagePath => {
const imageData = fs.readFileSync(imagePath);
const base64Image = imageData.toString("base64");
const mimeType = lookup(imagePath) || 'image/png';
return {
inlineData: {
mimeType,
data: base64Image,
},
};
});
const textPart: Part = { text: prompt };
const textPart: Part = { text: prompt };
const promptParts = [...imageParts, textPart];
const promptParts = [...imageParts, textPart];
const result = await model.generateContent(promptParts);
const response = result.response;
logger.debug('Google AI API response structure (editImage):', {
hasResponse: !!response,
hasCandidates: !!response?.candidates,
candidatesLength: response?.candidates?.length,
fullResponse: JSON.stringify(response, null, 2)
});
const result = await model.generateContent(promptParts);
const response = result.response;
const parts = response.candidates[0].content.parts;
for (const part of parts) {
if ('inlineData' in part) {
const inlineData = part.inlineData;
if (inlineData) {
return Buffer.from(inlineData.data, "base64");
if (!response || !response.candidates || response.candidates.length === 0) {
logger.error('Invalid API response structure - no candidates found (editImage)', {
response: JSON.stringify(response, null, 2),
prompt: prompt.substring(0, 100) + '...',
imageCount: imagePaths.length
});
throw new Error('No candidates returned from Google AI API. The content may have been blocked due to safety filters or other restrictions.');
}
const candidate = response.candidates[0];
// Check for safety filter rejections first
if (candidate.finishReason && candidate.finishReason !== 'STOP') {
const finishReasonMessages = {
'IMAGE_SAFETY': 'Content blocked by image safety filters. The image or prompt contains content that violates Google AI safety policies.',
'SAFETY': 'Content blocked by safety filters. The prompt contains content that violates Google AI safety policies.',
'RECITATION': 'Content blocked due to recitation concerns. The generated content may be too similar to existing copyrighted material.',
'OTHER': 'Content generation stopped for other safety or policy reasons.'
};
const message = finishReasonMessages[candidate.finishReason] ||
`Content generation stopped. Reason: ${candidate.finishReason}`;
logger.error('Google AI blocked image edit due to safety filters:', {
finishReason: candidate.finishReason,
rejectionMessage: message,
candidate: JSON.stringify(candidate, null, 2),
prompt: prompt.substring(0, 100) + '...',
imageCount: imagePaths.length
});
throw new Error(`Request blocked by Google AI: ${message}`);
}
if (!candidate.content || !candidate.content.parts) {
logger.error('Invalid candidate structure - no content parts found (editImage)', {
candidate: JSON.stringify(candidate, null, 2),
prompt: prompt.substring(0, 100) + '...',
imageCount: imagePaths.length
});
throw new Error('Invalid response structure from Google AI API - no content parts found.');
}
const parts = candidate.content.parts;
for (const part of parts) {
if ('inlineData' in part) {
const inlineData = part.inlineData;
if (inlineData) {
return Buffer.from(inlineData.data, "base64");
}
} else if ('text' in part && part.text) {
// Check if this is a rejection message
const text = part.text.toLowerCase();
if (text.includes('cannot fulfill') || text.includes('not able to create') ||
text.includes('unable to generate') || text.includes('cannot generate') ||
text.includes('cannot create') || text.includes('not appropriate')) {
logger.error('Google AI rejected the image edit request:', {
rejectionMessage: part.text,
finishReason: candidate.finishReason,
prompt: prompt.substring(0, 100) + '...',
imageCount: imagePaths.length
});
throw new Error(`Request rejected by Google AI: ${part.text}`);
}
}
}
}
return null;
logger.warn('No image data found in API response parts (editImage)', {
partsCount: parts.length,
parts: JSON.stringify(parts, null, 2),
prompt: prompt.substring(0, 100) + '...',
imageCount: imagePaths.length,
finishReason: candidate.finishReason
});
throw new Error('No image data found in API response. The model may not have generated an image.');
} catch (error) {
logger.error('Google AI API error in editImage:', {
error: error.message,
stack: error.stack,
prompt: prompt.substring(0, 100) + '...',
imageCount: imagePaths.length,
imagePaths: imagePaths.map(p => p.split(/[/\\]/).pop())
});
throw error; // Re-throw to let the caller handle it
}
}

View File

@ -1,7 +1,6 @@
export enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_X_AI_GROK_4_FAST_FREE = "x-ai/grok-4-fast:free",
MODEL_FREE_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free",
MODEL_FREE_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha",
MODEL_FREE_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha",
MODEL_FREE_DEEPSEEK_DEEPSEEK_CHAT_V3_1_FREE = "deepseek/deepseek-chat-v3.1:free",
MODEL_FREE_OPENAI_GPT_OSS_120B_FREE = "openai/gpt-oss-120b:free",
MODEL_FREE_OPENAI_GPT_OSS_20B_FREE = "openai/gpt-oss-20b:free",

View File

@ -1,4 +1,5 @@
export enum E_OPENROUTER_MODEL {
MODEL_X_AI_GROK_4_FAST_FREE = "x-ai/grok-4-fast:free",
MODEL_ALIBABA_TONGYI_DEEPRESEARCH_30B_A3B = "alibaba/tongyi-deepresearch-30b-a3b",
MODEL_QWEN_QWEN3_CODER_FLASH = "qwen/qwen3-coder-flash",
MODEL_QWEN_QWEN3_CODER_PLUS = "qwen/qwen3-coder-plus",
@ -11,8 +12,6 @@ export enum E_OPENROUTER_MODEL {
MODEL_QWEN_QWEN_PLUS_2025_07_28_THINKING = "qwen/qwen-plus-2025-07-28:thinking",
MODEL_NVIDIA_NEMOTRON_NANO_9B_V2_FREE = "nvidia/nemotron-nano-9b-v2:free",
MODEL_NVIDIA_NEMOTRON_NANO_9B_V2 = "nvidia/nemotron-nano-9b-v2",
MODEL_OPENROUTER_SONOMA_DUSK_ALPHA = "openrouter/sonoma-dusk-alpha",
MODEL_OPENROUTER_SONOMA_SKY_ALPHA = "openrouter/sonoma-sky-alpha",
MODEL_QWEN_QWEN3_MAX = "qwen/qwen3-max",
MODEL_MOONSHOTAI_KIMI_K2_0905 = "moonshotai/kimi-k2-0905",
MODEL_BYTEDANCE_SEED_OSS_36B_INSTRUCT = "bytedance/seed-oss-36b-instruct",

View File

@ -55,6 +55,7 @@ export interface IKBotOptions {
anthropic/claude-opus-4 | paid
anthropic/claude-opus-4.1 | paid
anthropic/claude-sonnet-4 | paid
arcee-ai/afm-4.5b | paid
arcee-ai/coder-large | paid
arcee-ai/maestro-reasoning | paid
arcee-ai/spotlight | paid
@ -96,9 +97,7 @@ export interface IKBotOptions {
deepseek/deepseek-r1-distill-llama-70b:free | free
deepseek/deepseek-r1-distill-llama-8b | paid
deepseek/deepseek-r1-distill-qwen-14b | paid
deepseek/deepseek-r1-distill-qwen-14b:free | free
deepseek/deepseek-r1-distill-qwen-32b | paid
cognitivecomputations/dolphin-mixtral-8x22b | paid
cognitivecomputations/dolphin3.0-mistral-24b | paid
cognitivecomputations/dolphin3.0-mistral-24b:free | free
cognitivecomputations/dolphin3.0-r1-mistral-24b | paid
@ -170,7 +169,6 @@ export interface IKBotOptions {
microsoft/phi-3-medium-128k-instruct | paid
microsoft/phi-3-mini-128k-instruct | paid
microsoft/phi-3.5-mini-128k-instruct | paid
sophosympatheia/midnight-rose-70b | paid
minimax/minimax-m1 | paid
minimax/minimax-01 | paid
mistralai/mistral-large | paid
@ -230,7 +228,6 @@ export interface IKBotOptions {
nousresearch/hermes-2-pro-llama-3-8b | paid
nvidia/llama-3.1-nemotron-70b-instruct | paid
nvidia/llama-3.1-nemotron-ultra-253b-v1 | paid
nvidia/llama-3.1-nemotron-ultra-253b-v1:free | free
nvidia/nemotron-nano-9b-v2 | paid
nvidia/nemotron-nano-9b-v2:free | free
openai/chatgpt-4o-latest | paid
@ -275,6 +272,7 @@ export interface IKBotOptions {
openai/o3-pro | paid
openai/o4-mini | paid
openai/o4-mini-high | paid
opengvlab/internvl3-78b | paid
perplexity/r1-1776 | paid
perplexity/sonar | paid
perplexity/sonar-deep-research | paid
@ -310,6 +308,8 @@ export interface IKBotOptions {
qwen/qwen3-coder-30b-a3b-instruct | paid
qwen/qwen3-coder | paid
qwen/qwen3-coder:free | free
qwen/qwen3-coder-flash | paid
qwen/qwen3-coder-plus | paid
qwen/qwen3-max | paid
qwen/qwen3-next-80b-a3b-instruct | paid
qwen/qwen3-next-80b-a3b-thinking | paid
@ -321,7 +321,6 @@ export interface IKBotOptions {
qwen/qwen-2.5-7b-instruct | paid
qwen/qwen-2.5-coder-32b-instruct | paid
qwen/qwen-2.5-coder-32b-instruct:free | free
rekaai/reka-flash-3:free | free
undi95/remm-slerp-l2-13b | paid
sao10k/l3-lunaris-8b | paid
sao10k/l3-euryale-70b | paid
@ -329,8 +328,6 @@ export interface IKBotOptions {
sao10k/l3.3-euryale-70b | paid
shisa-ai/shisa-v2-llama3.3-70b | paid
shisa-ai/shisa-v2-llama3.3-70b:free | free
openrouter/sonoma-dusk-alpha | paid
openrouter/sonoma-sky-alpha | paid
raifle/sorcererlm-8x22b | paid
stepfun-ai/step3 | paid
switchpoint/router | paid
@ -341,21 +338,20 @@ export interface IKBotOptions {
thedrummer/rocinante-12b | paid
thedrummer/skyfall-36b-v2 | paid
thedrummer/unslopnemo-12b | paid
thudm/glm-4-32b | paid
thudm/glm-4.1v-9b-thinking | paid
thudm/glm-z1-32b | paid
tngtech/deepseek-r1t-chimera | paid
tngtech/deepseek-r1t-chimera:free | free
tngtech/deepseek-r1t2-chimera:free | free
alibaba/tongyi-deepresearch-30b-a3b | paid
cognitivecomputations/dolphin-mistral-24b-venice-edition:free | free
microsoft/wizardlm-2-8x22b | paid
x-ai/grok-2-1212 | paid
x-ai/grok-2-vision-1212 | paid
x-ai/grok-3 | paid
x-ai/grok-3-beta | paid
x-ai/grok-3-mini | paid
x-ai/grok-3-mini-beta | paid
x-ai/grok-4 | paid
x-ai/grok-4-fast:free | free
x-ai/grok-code-fast-1 | paid
z-ai/glm-4-32b | paid
z-ai/glm-4.5 | paid