tauri cleanup

This commit is contained in:
lovebird 2025-09-20 09:17:52 +02:00
parent bbac3d6874
commit 005a909534
13 changed files with 648 additions and 227 deletions

File diff suppressed because one or more lines are too long

View File

@ -30,7 +30,6 @@ export declare enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_ARLIAI_QWQ_32B_ARLIAI_RPR_V1_FREE = "arliai/qwq-32b-arliai-rpr-v1:free",
MODEL_FREE_AGENTICA_ORG_DEEPCODER_14B_PREVIEW_FREE = "agentica-org/deepcoder-14b-preview:free",
MODEL_FREE_MOONSHOTAI_KIMI_VL_A3B_THINKING_FREE = "moonshotai/kimi-vl-a3b-thinking:free",
MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free",
MODEL_FREE_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free",
MODEL_FREE_META_LLAMA_LLAMA_4_SCOUT_FREE = "meta-llama/llama-4-scout:free",
MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE = "qwen/qwen2.5-vl-32b-instruct:free",
@ -38,7 +37,6 @@ export declare enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE = "mistralai/mistral-small-3.1-24b-instruct:free",
MODEL_FREE_GOOGLE_GEMMA_3_4B_IT_FREE = "google/gemma-3-4b-it:free",
MODEL_FREE_GOOGLE_GEMMA_3_12B_IT_FREE = "google/gemma-3-12b-it:free",
MODEL_FREE_REKAAI_REKA_FLASH_3_FREE = "rekaai/reka-flash-3:free",
MODEL_FREE_GOOGLE_GEMMA_3_27B_IT_FREE = "google/gemma-3-27b-it:free",
MODEL_FREE_QWEN_QWQ_32B_FREE = "qwen/qwq-32b:free",
MODEL_FREE_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE = "nousresearch/deephermes-3-llama-3-8b-preview:free",
@ -46,7 +44,6 @@ export declare enum E_OPENROUTER_MODEL_FREE {
MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE = "cognitivecomputations/dolphin3.0-mistral-24b:free",
MODEL_FREE_QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE = "qwen/qwen2.5-vl-72b-instruct:free",
MODEL_FREE_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE = "mistralai/mistral-small-24b-instruct-2501:free",
MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE = "deepseek/deepseek-r1-distill-qwen-14b:free",
MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B_FREE = "deepseek/deepseek-r1-distill-llama-70b:free",
MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_FREE = "deepseek/deepseek-r1:free",
MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE = "google/gemini-2.0-flash-exp:free",

View File

@ -31,7 +31,6 @@ export var E_OPENROUTER_MODEL_FREE;
E_OPENROUTER_MODEL_FREE["MODEL_FREE_ARLIAI_QWQ_32B_ARLIAI_RPR_V1_FREE"] = "arliai/qwq-32b-arliai-rpr-v1:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_AGENTICA_ORG_DEEPCODER_14B_PREVIEW_FREE"] = "agentica-org/deepcoder-14b-preview:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MOONSHOTAI_KIMI_VL_A3B_THINKING_FREE"] = "moonshotai/kimi-vl-a3b-thinking:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE"] = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_4_MAVERICK_FREE"] = "meta-llama/llama-4-maverick:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_META_LLAMA_LLAMA_4_SCOUT_FREE"] = "meta-llama/llama-4-scout:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN2_5_VL_32B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-32b-instruct:free";
@ -39,7 +38,6 @@ export var E_OPENROUTER_MODEL_FREE;
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_SMALL_3_1_24B_INSTRUCT_FREE"] = "mistralai/mistral-small-3.1-24b-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_4B_IT_FREE"] = "google/gemma-3-4b-it:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_12B_IT_FREE"] = "google/gemma-3-12b-it:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_REKAAI_REKA_FLASH_3_FREE"] = "rekaai/reka-flash-3:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_3_27B_IT_FREE"] = "google/gemma-3-27b-it:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWQ_32B_FREE"] = "qwen/qwq-32b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_NOUSRESEARCH_DEEPHERMES_3_LLAMA_3_8B_PREVIEW_FREE"] = "nousresearch/deephermes-3-llama-3-8b-preview:free";
@ -47,7 +45,6 @@ export var E_OPENROUTER_MODEL_FREE;
E_OPENROUTER_MODEL_FREE["MODEL_FREE_COGNITIVECOMPUTATIONS_DOLPHIN3_0_MISTRAL_24B_FREE"] = "cognitivecomputations/dolphin3.0-mistral-24b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_QWEN_QWEN2_5_VL_72B_INSTRUCT_FREE"] = "qwen/qwen2.5-vl-72b-instruct:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE"] = "mistralai/mistral-small-24b-instruct-2501:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE"] = "deepseek/deepseek-r1-distill-qwen-14b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_DISTILL_LLAMA_70B_FREE"] = "deepseek/deepseek-r1-distill-llama-70b:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_DEEPSEEK_DEEPSEEK_R1_FREE"] = "deepseek/deepseek-r1:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE"] = "google/gemini-2.0-flash-exp:free";
@ -60,4 +57,4 @@ export var E_OPENROUTER_MODEL_FREE;
E_OPENROUTER_MODEL_FREE["MODEL_FREE_GOOGLE_GEMMA_2_9B_IT_FREE"] = "google/gemma-2-9b-it:free";
E_OPENROUTER_MODEL_FREE["MODEL_FREE_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE"] = "mistralai/mistral-7b-instruct:free";
})(E_OPENROUTER_MODEL_FREE || (E_OPENROUTER_MODEL_FREE = {}));
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkE0RFg7QUE1REQsV0FBWSx1QkFBdUI7SUFDakMseUdBQThFLENBQUE7SUFDOUUsbUdBQXdFLENBQUE7SUFDeEUsaUdBQXNFLENBQUE7SUFDdEUsMkdBQWdGLENBQUE7SUFDaEYsMkZBQWdFLENBQUE7SUFDaEUseUZBQThELENBQUE7SUFDOUQscUZBQTBELENBQUE7SUFDMUQscUZBQTBELENBQUE7SUFDMUQseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUscUhBQTBGLENBQUE7SUFDMUYsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsbUlBQXdHLENBQUE7SUFDeEcsMkdBQWdGLENBQUE7SUFDaEYscUdBQTBFLENBQUE7SUFDMUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsMkZBQWdFLENBQUE7SUFDaEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsK0hBQW9HLENBQUE7SUFDcEcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQTVEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBNERsQyJ9
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3BlbnJvdXRlci1tb2RlbHMtZnJlZS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9tb2RlbHMvY2FjaGUvb3BlbnJvdXRlci1tb2RlbHMtZnJlZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQU4sSUFBWSx1QkF5RFg7QUF6REQsV0FBWSx1QkFBdUI7SUFDakMseUdBQThFLENBQUE7SUFDOUUsbUdBQXdFLENBQUE7SUFDeEUsaUdBQXNFLENBQUE7SUFDdEUsMkdBQWdGLENBQUE7SUFDaEYsMkZBQWdFLENBQUE7SUFDaEUseUZBQThELENBQUE7SUFDOUQscUZBQTBELENBQUE7SUFDMUQscUZBQTBELENBQUE7SUFDMUQseUZBQThELENBQUE7SUFDOUQscUtBQTBJLENBQUE7SUFDMUksaUdBQXNFLENBQUE7SUFDdEUsK0dBQW9GLENBQUE7SUFDcEYsK0dBQW9GLENBQUE7SUFDcEYscUlBQTBHLENBQUE7SUFDMUcsbUdBQXdFLENBQUE7SUFDeEUseUhBQThGLENBQUE7SUFDOUYsdUdBQTRFLENBQUE7SUFDNUUsK0dBQW9GLENBQUE7SUFDcEYsaUdBQXNFLENBQUE7SUFDdEUscUhBQTBGLENBQUE7SUFDMUYsK0VBQW9ELENBQUE7SUFDcEQseUZBQThELENBQUE7SUFDOUQsK0VBQW9ELENBQUE7SUFDcEQsaUZBQXNELENBQUE7SUFDdEQsNkZBQWtFLENBQUE7SUFDbEUsNkdBQWtGLENBQUE7SUFDbEYsMkZBQWdFLENBQUE7SUFDaEUsaUhBQXNGLENBQUE7SUFDdEYsNkdBQWtGLENBQUE7SUFDbEYseUhBQThGLENBQUE7SUFDOUYsbUhBQXdGLENBQUE7SUFDeEYsMkdBQWdGLENBQUE7SUFDaEYscUdBQTBFLENBQUE7SUFDMUUsNkdBQWtGLENBQUE7SUFDbEYsaUhBQXNGLENBQUE7SUFDdEYscUlBQTBHLENBQUE7SUFDMUcsNkZBQWtFLENBQUE7SUFDbEUsK0ZBQW9FLENBQUE7SUFDcEUsK0ZBQW9FLENBQUE7SUFDcEUsNkVBQWtELENBQUE7SUFDbEQsNklBQWtILENBQUE7SUFDbEgsbUpBQXdILENBQUE7SUFDeEgsNklBQWtILENBQUE7SUFDbEgsNkdBQWtGLENBQUE7SUFDbEYsdUlBQTRHLENBQUE7SUFDNUcsaUlBQXNHLENBQUE7SUFDdEcsNkZBQWtFLENBQUE7SUFDbEUsMkdBQWdGLENBQUE7SUFDaEYsdUhBQTRGLENBQUE7SUFDNUYscUhBQTBGLENBQUE7SUFDMUYscUhBQTBGLENBQUE7SUFDMUYseUdBQThFLENBQUE7SUFDOUUseUhBQThGLENBQUE7SUFDOUYsaUdBQXNFLENBQUE7SUFDdEUsNkZBQWtFLENBQUE7SUFDbEUsK0dBQW9GLENBQUE7QUFDdEYsQ0FBQyxFQXpEVyx1QkFBdUIsS0FBdkIsdUJBQXVCLFFBeURsQyJ9

View File

@ -1,4 +1,9 @@
export declare enum E_OPENROUTER_MODEL {
MODEL_ALIBABA_TONGYI_DEEPRESEARCH_30B_A3B = "alibaba/tongyi-deepresearch-30b-a3b",
MODEL_QWEN_QWEN3_CODER_FLASH = "qwen/qwen3-coder-flash",
MODEL_QWEN_QWEN3_CODER_PLUS = "qwen/qwen3-coder-plus",
MODEL_ARCEE_AI_AFM_4_5B = "arcee-ai/afm-4.5b",
MODEL_OPENGVLAB_INTERNVL3_78B = "opengvlab/internvl3-78b",
MODEL_QWEN_QWEN3_NEXT_80B_A3B_THINKING = "qwen/qwen3-next-80b-a3b-thinking",
MODEL_QWEN_QWEN3_NEXT_80B_A3B_INSTRUCT = "qwen/qwen3-next-80b-a3b-instruct",
MODEL_MEITUAN_LONGCAT_FLASH_CHAT = "meituan/longcat-flash-chat",
@ -122,7 +127,6 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_MICROSOFT_MAI_DS_R1_FREE = "microsoft/mai-ds-r1:free",
MODEL_MICROSOFT_MAI_DS_R1 = "microsoft/mai-ds-r1",
MODEL_THUDM_GLM_Z1_32B = "thudm/glm-z1-32b",
MODEL_THUDM_GLM_4_32B = "thudm/glm-4-32b",
MODEL_OPENAI_O4_MINI_HIGH = "openai/o4-mini-high",
MODEL_OPENAI_O3 = "openai/o3",
MODEL_OPENAI_O4_MINI = "openai/o4-mini",
@ -141,7 +145,6 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_MOONSHOTAI_KIMI_VL_A3B_THINKING = "moonshotai/kimi-vl-a3b-thinking",
MODEL_X_AI_GROK_3_MINI_BETA = "x-ai/grok-3-mini-beta",
MODEL_X_AI_GROK_3_BETA = "x-ai/grok-3-beta",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1_FREE = "nvidia/llama-3.1-nemotron-ultra-253b-v1:free",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_ULTRA_253B_V1 = "nvidia/llama-3.1-nemotron-ultra-253b-v1",
MODEL_META_LLAMA_LLAMA_4_MAVERICK_FREE = "meta-llama/llama-4-maverick:free",
MODEL_META_LLAMA_LLAMA_4_MAVERICK = "meta-llama/llama-4-maverick",
@ -163,7 +166,6 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_COHERE_COMMAND_A = "cohere/command-a",
MODEL_OPENAI_GPT_4O_MINI_SEARCH_PREVIEW = "openai/gpt-4o-mini-search-preview",
MODEL_OPENAI_GPT_4O_SEARCH_PREVIEW = "openai/gpt-4o-search-preview",
MODEL_REKAAI_REKA_FLASH_3_FREE = "rekaai/reka-flash-3:free",
MODEL_GOOGLE_GEMMA_3_27B_IT_FREE = "google/gemma-3-27b-it:free",
MODEL_GOOGLE_GEMMA_3_27B_IT = "google/gemma-3-27b-it",
MODEL_THEDRUMMER_ANUBIS_PRO_105B_V1 = "thedrummer/anubis-pro-105b-v1",
@ -202,7 +204,6 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501_FREE = "mistralai/mistral-small-24b-instruct-2501:free",
MODEL_MISTRALAI_MISTRAL_SMALL_24B_INSTRUCT_2501 = "mistralai/mistral-small-24b-instruct-2501",
MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_32B = "deepseek/deepseek-r1-distill-qwen-32b",
MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B_FREE = "deepseek/deepseek-r1-distill-qwen-14b:free",
MODEL_DEEPSEEK_DEEPSEEK_R1_DISTILL_QWEN_14B = "deepseek/deepseek-r1-distill-qwen-14b",
MODEL_PERPLEXITY_SONAR_REASONING = "perplexity/sonar-reasoning",
MODEL_PERPLEXITY_SONAR = "perplexity/sonar",
@ -218,8 +219,6 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_DEEPSEEK_DEEPSEEK_CHAT = "deepseek/deepseek-chat",
MODEL_SAO10K_L3_3_EURYALE_70B = "sao10k/l3.3-euryale-70b",
MODEL_OPENAI_O1 = "openai/o1",
MODEL_X_AI_GROK_2_VISION_1212 = "x-ai/grok-2-vision-1212",
MODEL_X_AI_GROK_2_1212 = "x-ai/grok-2-1212",
MODEL_COHERE_COMMAND_R7B_12_2024 = "cohere/command-r7b-12-2024",
MODEL_GOOGLE_GEMINI_2_0_FLASH_EXP_FREE = "google/gemini-2.0-flash-exp:free",
MODEL_META_LLAMA_LLAMA_3_3_70B_INSTRUCT_FREE = "meta-llama/llama-3.3-70b-instruct:free",
@ -240,19 +239,19 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_ANTHROPIC_CLAUDE_3_5_HAIKU_20241022 = "anthropic/claude-3.5-haiku-20241022",
MODEL_ANTHRACITE_ORG_MAGNUM_V4_72B = "anthracite-org/magnum-v4-72b",
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET = "anthropic/claude-3.5-sonnet",
MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b",
MODEL_MISTRALAI_MINISTRAL_8B = "mistralai/ministral-8b",
MODEL_MISTRALAI_MINISTRAL_3B = "mistralai/ministral-3b",
MODEL_QWEN_QWEN_2_5_7B_INSTRUCT = "qwen/qwen-2.5-7b-instruct",
MODEL_NVIDIA_LLAMA_3_1_NEMOTRON_70B_INSTRUCT = "nvidia/llama-3.1-nemotron-70b-instruct",
MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi",
MODEL_INFLECTION_INFLECTION_3_PRODUCTIVITY = "inflection/inflection-3-productivity",
MODEL_INFLECTION_INFLECTION_3_PI = "inflection/inflection-3-pi",
MODEL_GOOGLE_GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5-8b",
MODEL_THEDRUMMER_ROCINANTE_12B = "thedrummer/rocinante-12b",
MODEL_ANTHRACITE_ORG_MAGNUM_V2_72B = "anthracite-org/magnum-v2-72b",
MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct",
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT_FREE = "meta-llama/llama-3.2-3b-instruct:free",
MODEL_META_LLAMA_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3.2-3b-instruct",
MODEL_META_LLAMA_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3.2-1b-instruct",
MODEL_META_LLAMA_LLAMA_3_2_90B_VISION_INSTRUCT = "meta-llama/llama-3.2-90b-vision-instruct",
MODEL_META_LLAMA_LLAMA_3_2_11B_VISION_INSTRUCT = "meta-llama/llama-3.2-11b-vision-instruct",
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT_FREE = "qwen/qwen-2.5-72b-instruct:free",
MODEL_QWEN_QWEN_2_5_72B_INSTRUCT = "qwen/qwen-2.5-72b-instruct",
@ -262,8 +261,8 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_MISTRALAI_PIXTRAL_12B = "mistralai/pixtral-12b",
MODEL_COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024",
MODEL_COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024",
MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b",
MODEL_QWEN_QWEN_2_5_VL_7B_INSTRUCT = "qwen/qwen-2.5-vl-7b-instruct",
MODEL_SAO10K_L3_1_EURYALE_70B = "sao10k/l3.1-euryale-70b",
MODEL_MICROSOFT_PHI_3_5_MINI_128K_INSTRUCT = "microsoft/phi-3.5-mini-128k-instruct",
MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B = "nousresearch/hermes-3-llama-3.1-70b",
MODEL_NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B = "nousresearch/hermes-3-llama-3.1-405b",
@ -277,45 +276,43 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_META_LLAMA_LLAMA_3_1_70B_INSTRUCT = "meta-llama/llama-3.1-70b-instruct",
MODEL_MISTRALAI_MISTRAL_NEMO_FREE = "mistralai/mistral-nemo:free",
MODEL_MISTRALAI_MISTRAL_NEMO = "mistralai/mistral-nemo",
MODEL_OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18",
MODEL_OPENAI_GPT_4O_MINI = "openai/gpt-4o-mini",
MODEL_OPENAI_GPT_4O_MINI_2024_07_18 = "openai/gpt-4o-mini-2024-07-18",
MODEL_GOOGLE_GEMMA_2_27B_IT = "google/gemma-2-27b-it",
MODEL_GOOGLE_GEMMA_2_9B_IT_FREE = "google/gemma-2-9b-it:free",
MODEL_GOOGLE_GEMMA_2_9B_IT = "google/gemma-2-9b-it",
MODEL_ANTHROPIC_CLAUDE_3_5_SONNET_20240620 = "anthropic/claude-3.5-sonnet-20240620",
MODEL_SAO10K_L3_EURYALE_70B = "sao10k/l3-euryale-70b",
MODEL_COGNITIVECOMPUTATIONS_DOLPHIN_MIXTRAL_8X22B = "cognitivecomputations/dolphin-mixtral-8x22b",
MODEL_NOUSRESEARCH_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_FREE = "mistralai/mistral-7b-instruct:free",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT = "mistralai/mistral-7b-instruct",
MODEL_MISTRALAI_MISTRAL_7B_INSTRUCT_V0_3 = "mistralai/mistral-7b-instruct-v0.3",
MODEL_MICROSOFT_PHI_3_MINI_128K_INSTRUCT = "microsoft/phi-3-mini-128k-instruct",
MODEL_MICROSOFT_PHI_3_MEDIUM_128K_INSTRUCT = "microsoft/phi-3-medium-128k-instruct",
MODEL_NEVERSLEEP_LLAMA_3_LUMIMAID_70B = "neversleep/llama-3-lumimaid-70b",
MODEL_GOOGLE_GEMINI_FLASH_1_5 = "google/gemini-flash-1.5",
MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13",
MODEL_OPENAI_GPT_4O = "openai/gpt-4o",
MODEL_OPENAI_GPT_4O_EXTENDED = "openai/gpt-4o:extended",
MODEL_META_LLAMA_LLAMA_GUARD_2_8B = "meta-llama/llama-guard-2-8b",
MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT = "meta-llama/llama-3-70b-instruct",
MODEL_OPENAI_GPT_4O_2024_05_13 = "openai/gpt-4o-2024-05-13",
MODEL_META_LLAMA_LLAMA_3_8B_INSTRUCT = "meta-llama/llama-3-8b-instruct",
MODEL_META_LLAMA_LLAMA_3_70B_INSTRUCT = "meta-llama/llama-3-70b-instruct",
MODEL_MISTRALAI_MIXTRAL_8X22B_INSTRUCT = "mistralai/mixtral-8x22b-instruct",
MODEL_MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b",
MODEL_GOOGLE_GEMINI_PRO_1_5 = "google/gemini-pro-1.5",
MODEL_OPENAI_GPT_4_TURBO = "openai/gpt-4-turbo",
MODEL_COHERE_COMMAND_R_PLUS = "cohere/command-r-plus",
MODEL_COHERE_COMMAND_R_PLUS_04_2024 = "cohere/command-r-plus-04-2024",
MODEL_SOPHOSYMPATHEIA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b",
MODEL_COHERE_COMMAND = "cohere/command",
MODEL_COHERE_COMMAND_R = "cohere/command-r",
MODEL_ANTHROPIC_CLAUDE_3_HAIKU = "anthropic/claude-3-haiku",
MODEL_ANTHROPIC_CLAUDE_3_OPUS = "anthropic/claude-3-opus",
MODEL_COHERE_COMMAND_R_03_2024 = "cohere/command-r-03-2024",
MODEL_MISTRALAI_MISTRAL_LARGE = "mistralai/mistral-large",
MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview",
MODEL_OPENAI_GPT_3_5_TURBO_0613 = "openai/gpt-3.5-turbo-0613",
MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny",
MODEL_OPENAI_GPT_4_TURBO_PREVIEW = "openai/gpt-4-turbo-preview",
MODEL_MISTRALAI_MISTRAL_SMALL = "mistralai/mistral-small",
MODEL_MISTRALAI_MISTRAL_TINY = "mistralai/mistral-tiny",
MODEL_MISTRALAI_MIXTRAL_8X7B_INSTRUCT = "mistralai/mixtral-8x7b-instruct",
MODEL_NEVERSLEEP_NOROMAID_20B = "neversleep/noromaid-20b",
MODEL_ALPINDALE_GOLIATH_120B = "alpindale/goliath-120b",
@ -328,6 +325,6 @@ export declare enum E_OPENROUTER_MODEL {
MODEL_UNDI95_REMM_SLERP_L2_13B = "undi95/remm-slerp-l2-13b",
MODEL_GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b",
MODEL_OPENAI_GPT_3_5_TURBO = "openai/gpt-3.5-turbo",
MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314",
MODEL_OPENAI_GPT_4 = "openai/gpt-4"
MODEL_OPENAI_GPT_4 = "openai/gpt-4",
MODEL_OPENAI_GPT_4_0314 = "openai/gpt-4-0314"
}

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 MiB

View File

@ -2,6 +2,8 @@ import { useState, useEffect } from "react";
import { ImageFile, PromptTemplate } from "./types";
import { useTauriListeners } from "./hooks/useTauriListeners";
import { tauriApi } from "./lib/tauriApi";
import { saveStore } from "./lib/init";
import log from "./lib/log";
import Header from "./components/Header";
import PromptForm from "./components/PromptForm";
import DebugPanel from "./components/DebugPanel";
@ -25,54 +27,23 @@ function App() {
const [isDarkMode, setIsDarkMode] = useState(false);
const [debugMessages, setDebugMessages] = useState<any[]>([]);
const [showDebugPanel, setShowDebugPanel] = useState(true); // Default open for debugging
// Initialize logging system and connect to UI
useEffect(() => {
log.setMessageCallback((message) => {
setDebugMessages(prev => [...prev.slice(-99), message]); // Keep last 100 messages
});
// Load existing messages
setDebugMessages(log.getLocalMessages());
}, []);
const [ipcInitialized, setIpcInitialized] = useState(false);
const [messageToSend, setMessageToSend] = useState("");
const [generationTimeoutId, setGenerationTimeoutId] = useState<NodeJS.Timeout | null>(null);
const [currentIndex, setCurrentIndex] = useState(0);
const [prompts, setPrompts] = useState<PromptTemplate[]>([]);
const STORE_FILE_NAME = '.kbot-gui.json';
useEffect(() => {
const loadPrompts = async () => {
addDebugMessage('debug', '🔄 Store loading useEffect triggered');
try {
if (tauriApi.isTauri()) {
addDebugMessage('info', '📂 Attempting to load prompts from store...');
const configDir = await tauriApi.path.appDataDir();
addDebugMessage('debug', `📁 Data directory: ${configDir}`);
const storePath = await tauriApi.path.join(configDir, STORE_FILE_NAME);
addDebugMessage('debug', `📄 Store path resolved to: ${storePath}`);
const content = await tauriApi.fs.readTextFile(storePath);
addDebugMessage('debug', `📖 File content length: ${content?.length || 0}`);
if (content) {
const data = JSON.parse(content);
addDebugMessage('debug', `📋 Parsed store data:`, data);
if (data.prompts) {
setPrompts(data.prompts);
addDebugMessage('info', `✅ Loaded ${data.prompts.length} prompts from store`);
} else {
addDebugMessage('warn', '⚠️ Store file exists but has no prompts array');
}
} else {
addDebugMessage('info', '📭 Store file is empty');
}
} else {
addDebugMessage('warn', '🌐 Not in Tauri environment, skipping store load');
}
} catch (e) {
const error = e as Error;
addDebugMessage('info', `📂 Prompt store not found or failed to load. A new one will be created on save.`, {
error: error.message,
errorName: error.name,
storePath: STORE_FILE_NAME
});
}
};
loadPrompts();
}, []);
const importPrompts = async () => {
try {
@ -86,18 +57,18 @@ function App() {
if (newPrompts.prompts && Array.isArray(newPrompts.prompts)) {
setPrompts(newPrompts.prompts);
savePrompts(newPrompts.prompts);
addDebugMessage('info', `✅ Prompts imported successfully from: ${selected}`);
log.info(`✅ Prompts imported successfully from: ${selected}`);
} else {
addDebugMessage('error', 'Invalid prompts file format.');
log.error('Invalid prompts file format.');
}
}
} catch (error) {
addDebugMessage('error', 'Failed to import prompts', { error: (error as Error).message });
log.error('Failed to import prompts', { error: (error as Error).message });
}
};
const exportPrompts = async () => {
addDebugMessage('info', 'Attempting to export prompts...');
log.info('Attempting to export prompts...');
try {
const path = await tauriApi.dialog.save({
defaultPath: 'kbot-prompts.json',
@ -105,22 +76,22 @@ function App() {
});
if (path) {
addDebugMessage('debug', `📂 Export path selected: ${path}`);
log.debug(`📂 Export path selected: ${path}`);
const dataToWrite = JSON.stringify({ prompts }, null, 2);
addDebugMessage('debug', '📋 Data to be exported:', { promptCount: prompts.length, dataLength: dataToWrite.length });
addDebugMessage('debug', '💾 About to call writeTextFile...');
log.debug('📋 Data to be exported:', { promptCount: prompts.length, dataLength: dataToWrite.length });
log.debug('💾 About to call writeTextFile...');
await tauriApi.fs.writeTextFile(path, dataToWrite);
addDebugMessage('info', `✅ Prompts exported successfully to: ${path}`);
log.info(`✅ Prompts exported successfully to: ${path}`);
} else {
addDebugMessage('info', 'Export dialog was cancelled.');
log.info('Export dialog was cancelled.');
}
} catch (error) {
addDebugMessage('error', 'Failed to export prompts', { error: (error as Error).message });
log.error('Failed to export prompts', { error: (error as Error).message });
}
};
const deleteFilePermanently = async (pathToDelete: string) => {
addDebugMessage('info', `Requesting deletion of file: ${pathToDelete}`);
log.info(`Requesting deletion of file: ${pathToDelete}`);
// This will be the new tauri command
await tauriApi.requestFileDeletion({ path: pathToDelete });
};
@ -128,7 +99,7 @@ function App() {
const saveImageAs = async (imagePath: string) => {
const imageFile = files.find(f => f.path === imagePath);
if (!imageFile) {
addDebugMessage('error', `Could not find image to save: ${imagePath}`);
log.error(`Could not find image to save: ${imagePath}`);
return;
}
@ -147,12 +118,12 @@ function App() {
const uint8Array = new Uint8Array(buffer);
await tauriApi.fs.writeFile(newPath, uint8Array);
addDebugMessage('info', `✅ Image saved successfully to: ${newPath}`);
log.info(`✅ Image saved successfully to: ${newPath}`);
} else {
addDebugMessage('info', 'Save dialog was cancelled.');
log.info('Save dialog was cancelled.');
}
} catch (error) {
addDebugMessage('error', `Failed to save image: ${(error as Error).message}`);
log.error(`Failed to save image: ${(error as Error).message}`);
}
};
@ -169,14 +140,9 @@ function App() {
}
};
const addDebugMessage = async (level: 'info' | 'warn' | 'error' | 'debug', message: string, data?: any) => {
const timestamp = new Date().toLocaleTimeString();
const debugMsg = { level, message, data, timestamp };
// Add to local state
setDebugMessages(prev => [...prev.slice(-99), debugMsg]); // Keep last 100 messages
await tauriApi.addDebugMessage(message, level, data);
// Legacy function for compatibility - just use log directly now
const addDebugMessage = (level: 'info' | 'warn' | 'error' | 'debug', message: string, data?: any) => {
log[level](message, data);
};
const addImageFromUrl = async (url: string) => {
@ -206,7 +172,7 @@ function App() {
setDst,
setApiKey,
setIpcInitialized,
addDebugMessage,
setPrompts,
setFiles,
isGenerating,
generationTimeoutId,
@ -293,18 +259,16 @@ function App() {
return files.filter(file => file.selected);
};
const saveAndClose = async () => {
// Find the last generated image
const generatedFiles = files.filter(file => file.isGenerated);
if (generatedFiles.length === 0) {
addDebugMessage('warn', 'No generated images to save');
log.warn('No generated images to save');
return;
}
const lastGenerated = generatedFiles[generatedFiles.length - 1];
addDebugMessage('info', `💾 Saving and closing with: ${lastGenerated.path}`);
log.info(`💾 Saving and closing with: ${lastGenerated.path}`);
try {
// Send the final result back to images.ts for saving
@ -320,21 +284,20 @@ function App() {
await tauriApi.submitPrompt(result);
addDebugMessage('info', '✅ Final result sent, closing app');
log.info('✅ Final result sent, closing app');
} catch (error) {
addDebugMessage('error', 'Failed to save and close', { error: (error as Error).message });
log.error('Failed to save and close', { error: (error as Error).message });
}
};
const generateImage = async (promptText: string, includeImages: ImageFile[] = []) => {
if (!apiKey) {
addDebugMessage('error', 'No API key available for image generation');
log.error('No API key available for image generation');
return;
}
setIsGenerating(true);
addDebugMessage('info', `🎨 Starting image generation via backend: "${promptText}"`);
log.info(`🎨 Starting image generation via backend: "${promptText}"`);
// Add placeholder image with spinner to the files grid
const placeholderFile: ImageFile = {
@ -358,7 +321,7 @@ function App() {
const filePaths = includeImages.map(img => img.path);
const genDst = dst || `generated_${Date.now()}.png`;
addDebugMessage('info', 'Sending generation request to images.ts backend', {
log.info('Sending generation request to images.ts backend', {
prompt: promptText,
files: filePaths,
dst: genDst
@ -371,7 +334,7 @@ function App() {
dst: genDst
});
addDebugMessage('info', '📤 Generation request sent to backend');
log.info('📤 Generation request sent to backend');
// Clear any existing timeout
if (generationTimeoutId) {
@ -379,7 +342,7 @@ function App() {
}
const timeoutId = setTimeout(() => {
addDebugMessage('warn', '⏰ Generation timeout - resetting state');
log.warn('⏰ Generation timeout - resetting state');
setIsGenerating(false);
setFiles(prev => prev.filter(file => !file.path.startsWith('generating_')));
setGenerationTimeoutId(null);
@ -388,7 +351,7 @@ function App() {
setGenerationTimeoutId(timeoutId);
} catch (error) {
addDebugMessage('error', 'Failed to send generation request', {
log.error('Failed to send generation request', {
error: error instanceof Error ? error.message : JSON.stringify(error)
});
setIsGenerating(false);
@ -459,13 +422,13 @@ function App() {
const clearDebugMessages = async () => {
setDebugMessages([]);
log.clearLocalMessages();
await tauriApi.clearDebugMessages();
addDebugMessage('info', 'Debug messages cleared');
};
const sendIPCMessage = async (messageType: string, data: any) => {
await tauriApi.sendIPCMessage(messageType, data);
addDebugMessage('info', `IPC message sent: ${messageType}`, data);
log.info(`IPC message sent: ${messageType}`, data);
};
const sendMessageToImages = async () => {
@ -479,10 +442,10 @@ function App() {
try {
await tauriApi.sendMessageToStdout(JSON.stringify(message));
addDebugMessage('info', `📤 Sent to images.ts: ${messageToSend}`, message);
log.info(`📤 Sent to images.ts: ${messageToSend}`, message);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : JSON.stringify(error);
addDebugMessage('error', `Failed to send message: ${errorMessage}`);
log.error(`Failed to send message: ${errorMessage}`);
}
// Clear the input
@ -490,32 +453,18 @@ function App() {
};
const savePrompts = async (promptsToSave: PromptTemplate[]) => {
if (tauriApi.isTauri()) {
try {
addDebugMessage('debug', '💾 Starting save prompts process...');
const dataDir = await tauriApi.path.appDataDir();
addDebugMessage('debug', `📁 Got data dir: ${dataDir}`);
const storePath = await tauriApi.path.join(dataDir, STORE_FILE_NAME);
addDebugMessage('debug', `📄 Store path: ${storePath}`);
const dataToSave = JSON.stringify({ prompts: promptsToSave }, null, 2);
addDebugMessage('debug', `💾 Data to save:`, { promptCount: promptsToSave.length, dataLength: dataToSave.length });
await tauriApi.fs.writeTextFile(storePath, dataToSave);
addDebugMessage('info', `✅ Prompts saved to ${storePath}`);
} catch (error) {
addDebugMessage('error', 'Failed to save prompts', {
error: (error as Error).message,
errorName: (error as Error).name,
errorStack: (error as Error).stack
});
}
} else {
addDebugMessage('warn', '🌐 Not in Tauri, cannot save prompts');
try {
await saveStore(promptsToSave);
} catch (error) {
log.error('Failed to save prompts', {
error: (error as Error).message
});
}
};
async function openFilePicker() {
if (!tauriApi.isTauri()) {
const { isTauri: isTauriEnv } = await tauriApi.ensureTauriApi();
if (!isTauriEnv) {
// Browser fallback: create file input
const input = document.createElement('input');
input.type = 'file';

View File

@ -1,14 +1,11 @@
import { useEffect } from 'react';
import { tauriApi } from '../lib/tauriApi';
import { initializeApp, completeInitialization, InitCallbacks } from '../lib/init';
import log from '../lib/log';
import { TauriEvent } from '../constants';
import { ImageFile } from '../types';
interface TauriListenersProps {
setPrompt: (prompt: string) => void;
setDst: (dst: string) => void;
setApiKey: (key: string) => void;
setIpcInitialized: (initialized: boolean) => void;
addDebugMessage: (level: 'info' | 'warn' | 'error' | 'debug', message: string, data?: any) => void;
interface TauriListenersProps extends InitCallbacks {
setFiles: React.Dispatch<React.SetStateAction<ImageFile[]>>;
isGenerating: boolean;
generationTimeoutId: NodeJS.Timeout | null;
@ -22,7 +19,7 @@ export function useTauriListeners({
setDst,
setApiKey,
setIpcInitialized,
addDebugMessage,
setPrompts,
setFiles,
isGenerating,
generationTimeoutId,
@ -39,30 +36,38 @@ export function useTauriListeners({
let unlistenDeleteError: (() => void) | undefined;
const setupListeners = async () => {
await tauriApi.ensureTauriApi();
// Initialize the app using the centralized init system
const initCallbacks: InitCallbacks = {
setPrompt,
setDst,
setApiKey,
setPrompts,
setIpcInitialized
};
if (!tauriApi.isTauri()) {
addDebugMessage('warn', 'Tauri APIs not available, running in browser mode.');
const initState = await initializeApp(initCallbacks);
if (!initState.isTauriEnv) {
log.warn('Tauri APIs not available, running in browser mode.');
return;
}
const listeners = await Promise.all([
tauriApi.listen(TauriEvent.CONFIG_RECEIVED, (event: any) => {
tauriApi.listen(TauriEvent.CONFIG_RECEIVED, async (event: any) => {
const data = event.payload;
if (data.prompt) setPrompt(data.prompt);
if (data.dst) setDst(data.dst);
if (data.apiKey) setApiKey(data.apiKey);
setIpcInitialized(true);
addDebugMessage('info', '📨 Config received from images.ts', {
hasPrompt: !!data.prompt,
hasDst: !!data.dst,
hasApiKey: !!data.apiKey,
fileCount: data.files?.length || 0,
});
// Complete initialization using the centralized system
try {
await completeInitialization(data, initCallbacks);
} catch (error) {
log.error('Failed to complete initialization after config received', {
error: (error as Error).message
});
}
}),
tauriApi.listen(TauriEvent.IMAGE_RECEIVED, (event: any) => {
const imageData = event.payload;
addDebugMessage('debug', '🖼️ Processing image data', {
log.debug('🖼️ Processing image data', {
filename: imageData.filename,
mimeType: imageData.mimeType,
base64Length: imageData.base64?.length,
@ -85,21 +90,21 @@ export function useTauriListeners({
setGenerationTimeoutId(null);
}
setIsGenerating(false);
addDebugMessage('info', '✅ Generated image added to files', { filename: imageData.filename, prompt });
log.info('✅ Generated image added to files', { filename: imageData.filename, prompt });
} else {
const newImageFile: ImageFile = { path: imageData.filename, src, isGenerated: false };
setFiles(prevFiles => {
const exists = prevFiles.some(f => f.path === imageData.filename);
if (!exists) {
addDebugMessage('info', `📁 Adding input image: ${imageData.filename}`);
log.info(`📁 Adding input image: ${imageData.filename}`);
return [...prevFiles, newImageFile];
}
addDebugMessage('warn', `🔄 Image already exists: ${imageData.filename}`);
log.warn(`🔄 Image already exists: ${imageData.filename}`);
return prevFiles;
});
}
} else {
addDebugMessage('error', '❌ Invalid image data received', {
log.error('❌ Invalid image data received', {
hasBase64: !!imageData.base64,
hasMimeType: !!imageData.mimeType,
hasFilename: !!imageData.filename,
@ -108,13 +113,13 @@ export function useTauriListeners({
}),
tauriApi.listen(TauriEvent.GENERATION_ERROR, (event: any) => {
const errorData = event.payload;
addDebugMessage('error', '❌ Generation failed', errorData);
log.error('❌ Generation failed', errorData);
setIsGenerating(false);
setFiles(prev => prev.filter(file => !file.path.startsWith('generating_')));
}),
tauriApi.listen(TauriEvent.GENERATION_COMPLETE, (event: any) => {
const completionData = event.payload;
addDebugMessage('info', '✅ Simple mode: Image generation completed', {
log.info('✅ Simple mode: Image generation completed', {
dst: completionData.dst,
prompt: completionData.prompt
});
@ -123,23 +128,16 @@ export function useTauriListeners({
}),
tauriApi.listen(TauriEvent.FILE_DELETED_SUCCESSFULLY, (event: any) => {
const deletedPath = event.payload.path;
addDebugMessage('info', `✅ File deleted successfully: ${deletedPath}`);
log.info(`✅ File deleted successfully: ${deletedPath}`);
setFiles(prevFiles => prevFiles.filter(file => file.path !== deletedPath));
}),
tauriApi.listen(TauriEvent.FILE_DELETION_ERROR, (event: any) => {
const { path, error } = event.payload;
addDebugMessage('error', `Failed to delete file: ${path}`, { error });
log.error(`Failed to delete file: ${path}`, { error });
})
]);
[unlistenConfig, unlistenImage, unlistenError, unlistenComplete, unlistenDeleted, unlistenDeleteError] = listeners;
try {
await tauriApi.requestConfigFromImages();
addDebugMessage('info', 'Config request sent to images.ts');
} catch (e) {
addDebugMessage('error', `Failed to request config: ${e}`);
}
};
setupListeners();

View File

@ -0,0 +1,276 @@
import { tauriApi } from './tauriApi';
import { PromptTemplate } from '../types';
import log from './log';
export interface InitConfig {
prompt?: string;
dst?: string;
apiKey?: string;
files?: string[];
}
export interface InitState {
isInitialized: boolean;
isTauriEnv: boolean;
apiInitialized: boolean;
config: InitConfig | null;
prompts: PromptTemplate[];
error: string | null;
}
export interface InitCallbacks {
setPrompt: (prompt: string) => void;
setDst: (dst: string) => void;
setApiKey: (key: string) => void;
setPrompts: (prompts: PromptTemplate[]) => void;
setIpcInitialized: (initialized: boolean) => void;
}
const STORE_FILE_NAME = '.kbot-gui.json';
/**
* Step 1: Initialize Tauri APIs
*/
export async function initAPI(): Promise<{ isTauri: boolean; apiInitialized: boolean }> {
log.info('🚀 Starting API initialization...');
try {
const result = await tauriApi.ensureTauriApi();
log.info('✅ API initialization complete', {
isTauri: result.isTauri,
apiInitialized: result.apiInitialized,
windowTauri: !!(window as any).__TAURI__
});
return result;
} catch (error) {
log.error('❌ API initialization failed', {
error: (error as Error).message
});
throw error;
}
}
/**
* Step 2: Get configuration from images.ts backend
*/
export async function getConfig(): Promise<void> {
log.info('📡 Requesting config from images.ts...');
try {
await tauriApi.requestConfigFromImages();
log.info('📤 Config request sent to images.ts');
} catch (error) {
log.error('❌ Failed to request config', {
error: (error as Error).message
});
throw error;
}
}
/**
* Step 3: Load prompts store after config is received
*/
export async function loadStore(callbacks: Pick<InitCallbacks, 'setPrompts'>): Promise<PromptTemplate[]> {
const { setPrompts } = callbacks;
log.debug('🔄 Loading prompts from store...');
try {
const { isTauri: isTauriEnv } = await tauriApi.ensureTauriApi();
if (!isTauriEnv) {
log.warn('🌐 Not in Tauri environment, skipping store load');
return [];
}
log.info('📂 Attempting to load prompts from store...');
const configDir = await tauriApi.path.appDataDir();
log.debug(`📁 Data directory: ${configDir}`);
const storePath = await tauriApi.path.join(configDir, STORE_FILE_NAME);
log.debug(`📄 Store path resolved to: ${storePath}`);
const content = await tauriApi.fs.readTextFile(storePath);
log.debug(`📖 File content length: ${content?.length || 0}`);
if (content) {
const data = JSON.parse(content);
log.debug(`📋 Parsed store data:`, data);
if (data.prompts && Array.isArray(data.prompts)) {
setPrompts(data.prompts);
log.info(`✅ Loaded ${data.prompts.length} prompts from store`);
return data.prompts;
} else {
log.warn('⚠️ Store file exists but has no valid prompts array');
return [];
}
} else {
log.info('📭 Store file is empty');
return [];
}
} catch (error) {
const err = error as Error;
log.info(`📂 Prompt store not found or failed to load. A new one will be created on save.`, {
error: err.message,
errorName: err.name,
storePath: STORE_FILE_NAME
});
return [];
}
}
/**
* Process received config data and update state
*/
export function processConfig(
configData: any,
callbacks: Pick<InitCallbacks, 'setPrompt' | 'setDst' | 'setApiKey' | 'setIpcInitialized'>
): InitConfig {
const { setPrompt, setDst, setApiKey, setIpcInitialized } = callbacks;
const config: InitConfig = {};
if (configData.prompt) {
config.prompt = configData.prompt;
setPrompt(configData.prompt);
}
if (configData.dst) {
config.dst = configData.dst;
setDst(configData.dst);
}
if (configData.apiKey) {
config.apiKey = configData.apiKey;
setApiKey(configData.apiKey);
}
if (configData.files) {
config.files = configData.files;
}
setIpcInitialized(true);
// Mark IPC as ready for logging system
log.setIpcReady(true);
log.info('📨 Config processed successfully', {
hasPrompt: !!config.prompt,
hasDst: !!config.dst,
hasApiKey: !!config.apiKey,
fileCount: config.files?.length || 0,
});
return config;
}
/**
* Complete initialization flow
*/
export async function initializeApp(callbacks: InitCallbacks): Promise<InitState> {
// Initialize logging system first
log.initLogging();
const state: InitState = {
isInitialized: false,
isTauriEnv: false,
apiInitialized: false,
config: null,
prompts: [],
error: null
};
try {
log.info('🎯 Starting complete app initialization...');
// Step 1: Initialize APIs
const apiResult = await initAPI();
state.isTauriEnv = apiResult.isTauri;
state.apiInitialized = apiResult.apiInitialized;
if (state.isTauriEnv) {
// Step 2: Request config (this will trigger CONFIG_RECEIVED event)
await getConfig();
log.info('⏳ Waiting for config to be received via event...');
// Note: The actual config processing and store loading will happen
// in the CONFIG_RECEIVED event handler
} else {
log.warn('🌐 Running in browser mode, skipping config request');
state.isInitialized = true;
}
} catch (error) {
const err = error as Error;
state.error = err.message;
log.error('❌ App initialization failed', {
error: err.message,
step: 'initialization'
});
}
return state;
}
/**
* Complete the initialization after config is received
*/
export async function completeInitialization(
configData: any,
callbacks: InitCallbacks
): Promise<{ config: InitConfig; prompts: PromptTemplate[] }> {
try {
log.info('🎯 Completing initialization after config received...');
// Process the received config
const config = processConfig(configData, callbacks);
// Load the prompts store
const prompts = await loadStore(callbacks);
log.info('✅ App initialization completed successfully', {
configKeys: Object.keys(config),
promptCount: prompts.length
});
return { config, prompts };
} catch (error) {
const err = error as Error;
log.error('❌ Failed to complete initialization', {
error: err.message
});
throw error;
}
}
/**
* Save prompts to store
*/
export async function saveStore(prompts: PromptTemplate[]): Promise<void> {
const { isTauri: isTauriEnv } = await tauriApi.ensureTauriApi();
if (!isTauriEnv) {
log.warn('🌐 Not in Tauri, cannot save prompts');
return;
}
try {
log.debug('💾 Starting save prompts process...');
const dataDir = await tauriApi.path.appDataDir();
log.debug(`📁 Got data dir: ${dataDir}`);
const storePath = await tauriApi.path.join(dataDir, STORE_FILE_NAME);
log.debug(`📄 Store path: ${storePath}`);
const dataToSave = JSON.stringify({ prompts }, null, 2);
log.debug(`💾 Data to save:`, { promptCount: prompts.length, dataLength: dataToSave.length });
await tauriApi.fs.writeTextFile(storePath, dataToSave);
log.info(`✅ Prompts saved to ${storePath}`);
} catch (error) {
log.error('Failed to save prompts', {
error: (error as Error).message,
errorName: (error as Error).name,
errorStack: (error as Error).stack
});
throw error;
}
}

View File

@ -0,0 +1,180 @@
import { tauriApi } from './tauriApi';
export type LogLevel = 'info' | 'warn' | 'error' | 'debug';
interface LogMessage {
level: LogLevel;
message: string;
data?: any;
timestamp: string;
}
interface LogState {
isIpcReady: boolean;
pendingMessages: LogMessage[];
localMessages: LogMessage[];
onMessage?: (message: LogMessage) => void;
}
const logState: LogState = {
isIpcReady: false,
pendingMessages: [],
localMessages: [],
};
/**
* Set the IPC ready state and flush pending messages
*/
export function setIpcReady(ready: boolean) {
logState.isIpcReady = ready;
if (ready && logState.pendingMessages.length > 0) {
info(`🔄 IPC ready, flushing ${logState.pendingMessages.length} pending messages`);
// Flush all pending messages to backend
logState.pendingMessages.forEach(msg => {
sendToBackend(msg);
});
logState.pendingMessages = [];
}
}
/**
* Set callback for local message handling (e.g., UI updates)
*/
export function setMessageCallback(callback: (message: LogMessage) => void) {
logState.onMessage = callback;
}
/**
* Get current log state for debugging
*/
export function getLogState() {
return {
...logState,
pendingCount: logState.pendingMessages.length,
localCount: logState.localMessages.length,
};
}
/**
* Send message to backend via IPC
*/
async function sendToBackend(message: LogMessage) {
try {
await tauriApi.addDebugMessage(message.message, message.level, message.data);
} catch (error) {
console.warn('Failed to send log message to backend:', error);
// Don't create infinite loop by logging this error
}
}
/**
* Core logging function
*/
function log(level: LogLevel, message: string, data?: any) {
const timestamp = new Date().toLocaleTimeString();
const logMessage: LogMessage = { level, message, data, timestamp };
// Always log to console for immediate visibility
const consoleMethod = level === 'error' ? 'error' :
level === 'warn' ? 'warn' :
level === 'debug' ? 'debug' : 'log';
const prefix = {
info: '✅',
warn: '⚠️',
error: '❌',
debug: '🔍'
}[level];
console[consoleMethod](`${prefix} [${timestamp}] ${message}`, data || '');
// Store locally for UI
logState.localMessages.push(logMessage);
// Keep only last 100 messages
if (logState.localMessages.length > 100) {
logState.localMessages = logState.localMessages.slice(-100);
}
// Notify UI callback if set
if (logState.onMessage) {
logState.onMessage(logMessage);
}
// Send to backend if IPC is ready
if (logState.isIpcReady) {
sendToBackend(logMessage);
} else {
// Queue for later if IPC not ready
logState.pendingMessages.push(logMessage);
// Prevent memory issues - keep only last 50 pending messages
if (logState.pendingMessages.length > 50) {
logState.pendingMessages = logState.pendingMessages.slice(-50);
}
}
}
/**
* Logging functions with different levels
*/
export function info(message: string, data?: any) {
log('info', message, data);
}
export function warn(message: string, data?: any) {
log('warn', message, data);
}
export function error(message: string, data?: any) {
log('error', message, data);
}
export function debug(message: string, data?: any) {
log('debug', message, data);
}
/**
* Get all local messages for UI display
*/
export function getLocalMessages(): LogMessage[] {
return [...logState.localMessages];
}
/**
* Clear all local messages
*/
export function clearLocalMessages() {
logState.localMessages = [];
if (logState.onMessage) {
// Notify UI that messages were cleared
info('Debug messages cleared');
}
}
/**
* Initialize logging system
*/
export function initLogging() {
info('🚀 Logging system initialized');
debug('Log state:', getLogState());
}
/**
* Default export with all logging functions
*/
const logger = {
info,
warn,
error,
debug,
setIpcReady,
setMessageCallback,
getLocalMessages,
clearLocalMessages,
getLogState,
initLogging
};
export default logger;

View File

@ -22,55 +22,82 @@ let appDataDir: any;
let join: any;
let isTauri = false;
let apiInitialized = false;
const isBrowser = typeof window !== 'undefined';
const apiInitializationPromise = (async () => {
if (!isBrowser) return;
if (!isBrowser) {
console.log('Not in browser environment');
return;
}
try {
const windowApi = await import('@tauri-apps/api/window');
getCurrentWindow = windowApi.getCurrentWindow;
const webviewApi = await import('@tauri-apps/api/webview');
getCurrentWebview = webviewApi.getCurrentWebview;
isTauri = true;
console.log('✅ Tauri window API loaded');
// Check if we're in Tauri environment first
if (typeof window !== 'undefined' && (window as any).__TAURI__) {
console.log('🔍 Tauri environment detected, loading APIs...');
const coreApi = await import('@tauri-apps/api/core');
invoke = coreApi.invoke;
// Load all Tauri APIs in parallel for better performance
const [
windowApi,
webviewApi,
coreApi,
eventApi,
dialogApi,
fsApi,
httpApi,
pathApi
] = await Promise.all([
import('@tauri-apps/api/window'),
import('@tauri-apps/api/webview'),
import('@tauri-apps/api/core'),
import('@tauri-apps/api/event'),
import('@tauri-apps/plugin-dialog'),
import('@tauri-apps/plugin-fs'),
import('@tauri-apps/plugin-http'),
import('@tauri-apps/api/path')
]);
const eventApi = await import('@tauri-apps/api/event');
listen = eventApi.listen;
// Assign all APIs
getCurrentWindow = windowApi.getCurrentWindow;
getCurrentWebview = webviewApi.getCurrentWebview;
invoke = coreApi.invoke;
listen = eventApi.listen;
open = dialogApi.open;
save = dialogApi.save;
readFile = fsApi.readFile;
writeFile = fsApi.writeFile;
readTextFile = fsApi.readTextFile;
writeTextFile = fsApi.writeTextFile;
BaseDirectory = fsApi.BaseDirectory;
fetch = httpApi.fetch;
appConfigDir = pathApi.appConfigDir;
appDataDir = pathApi.appDataDir;
join = pathApi.join;
const dialogApi = await import('@tauri-apps/plugin-dialog');
open = dialogApi.open;
save = dialogApi.save;
const fsApi = await import('@tauri-apps/plugin-fs');
readFile = fsApi.readFile;
writeFile = fsApi.writeFile;
readTextFile = fsApi.readTextFile;
writeTextFile = fsApi.writeTextFile;
BaseDirectory = fsApi.BaseDirectory;
const httpApi = await import('@tauri-apps/plugin-http');
fetch = httpApi.fetch;
const pathApi = await import('@tauri-apps/api/path');
appConfigDir = pathApi.appConfigDir;
appDataDir = pathApi.appDataDir;
join = pathApi.join;
isTauri = true;
apiInitialized = true;
console.log('✅ All Tauri APIs loaded successfully');
} else {
console.log('🌐 No Tauri environment detected, running in browser mode');
isTauri = false;
}
} catch (e) {
console.warn('Tauri APIs not available, running in browser mode.');
console.warn('❌ Failed to load Tauri APIs, falling back to browser mode:', e);
isTauri = false;
}
// Fallback to browser fetch if no Tauri fetch available
if (isBrowser && !fetch) {
fetch = window.fetch;
console.log('🔄 Using browser fetch as fallback');
}
apiInitialized = true;
})();
export const ensureTauriApi = async () => {
await apiInitializationPromise;
return { isTauri, apiInitialized };
};
// Safe invoke function
@ -94,7 +121,12 @@ export { invoke, isTauri };
// Typed API wrappers
export const tauriApi = {
ensureTauriApi,
isTauri: () => isTauri,
isTauri: () => {
if (!apiInitialized) {
console.warn('⚠️ Tauri API not yet initialized, returning current state');
}
return isTauri;
},
fetch: async (...args: Parameters<typeof window.fetch>): Promise<Response> => {
await ensureTauriApi();
const fetchFn = fetch || window.fetch;

View File

@ -125,7 +125,6 @@ async function launchGuiAndGetPrompt(argv: any): Promise<string | null> {
});
return new Promise((_resolve, reject) => {
logger.info('🚀 Starting GUI application with improved logging');
const guiAppPath = getGuiAppPath();
logger.info('📁 GUI app path:', guiAppPath);
if (!exists(guiAppPath)) {