kbot cpp structured & output

This commit is contained in:
lovebird 2026-03-30 17:28:46 +02:00
parent c90063d8a1
commit d5382c5af4
3 changed files with 112 additions and 6 deletions

View File

@ -11,6 +11,7 @@
* npm run test:ipc:classifier -- -r openrouter -m openai/gpt-4o-mini --backend remote -n 3 -F structured
* npm run test:ipc:classifier -- -r openrouter -m x -F stress,no-heartbeat
* npm run test:ipc:classifier -- -r openrouter -m x --backend remote -n 3 -F stress,structured
* npm run test:ipc:classifier -- -r openrouter -m x --backend remote -F structured --dst ./out.json
*
* Env:
* KBOT_IPC_CLASSIFIER_LLAMA set 0 to use OpenRouter (KBOT_ROUTER, KBOT_IPC_MODEL) instead of local llama :8888
@ -30,7 +31,7 @@
import { spawn } from 'node:child_process';
import { mkdir, writeFile } from 'node:fs/promises';
import { dirname } from 'node:path';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import net from 'node:net';
import { existsSync, unlinkSync } from 'node:fs';
@ -64,7 +65,7 @@ import {
writeTestReports,
} from './reports.js';
const __dirname = dirname(fileURLToPath(import.meta.url));
const __dirname = path.dirname(fileURLToPath(import.meta.url));
/** Set at run start; used by catch for error reports */
let classifierMetricsCollector = null;
let classifierRunStartedAt = null;
@ -136,6 +137,16 @@ export function parseClassifierArgv() {
describe:
'Feature flags (repeat or comma-separated): stress, structured, no-heartbeat, no-report, quiet',
})
.option('dst', {
type: 'string',
describe:
'Forwarded to kbot-ai IPC `dst` (worker writes completion text here; path resolved from cwd). Same as C++ --dst.',
})
.option('output', {
type: 'string',
describe:
'Forwarded to IPC if --dst omitted (C++ `output` field). Prefer --dst when both are set.',
})
.strict()
.help()
.alias('h', 'help');
@ -417,6 +428,12 @@ function buildKbotAiPayload(labels, tmo) {
if (structured) {
payload.response_format = { type: 'json_object' };
}
const rawDst = classifierArgv?.dst || classifierArgv?.output;
if (rawDst != null && String(rawDst).trim() !== '') {
payload.dst = path.resolve(process.cwd(), String(rawDst).trim());
}
return payload;
}
@ -663,6 +680,13 @@ async function run() {
backend: useLlama ? 'local' : 'remote',
stressRuns: nRuns,
structuredOutput: !useLlama && classifierFeatures.has('structured'),
dst:
classifierArgv?.dst || classifierArgv?.output
? path.resolve(
process.cwd(),
String(classifierArgv.dst || classifierArgv.output).trim()
)
: null,
},
env: {
KBOT_IPC_CLASSIFIER_LLAMA: process.env.KBOT_IPC_CLASSIFIER_LLAMA ?? null,
@ -725,7 +749,7 @@ async function run() {
/** Array-only artifact (same timestamp as main report). */
arrayPath = reportFilePathWithExt('test-ipc-classifier-distances', '.json', { cwd, now: reportNow });
await mkdir(dirname(arrayPath), { recursive: true });
await mkdir(path.dirname(arrayPath), { recursive: true });
await writeFile(arrayPath, `${JSON.stringify(distances, null, 2)}\n`, 'utf8');
}

View File

@ -1,5 +1,7 @@
#include "kbot.h"
#include "source_files.h"
#include <fstream>
#include <filesystem>
#include <iostream>
#include "logger/logger.h"
#include "llm_client.h"
@ -12,6 +14,76 @@ namespace kbot {
namespace {
namespace fs = std::filesystem;
static void replace_all(std::string &s, const std::string &from, const std::string &to) {
std::size_t pos = 0;
while ((pos = s.find(from, pos)) != std::string::npos) {
s.replace(pos, from.length(), to);
pos += to.length();
}
}
static std::string model_basename(const std::string &model) {
if (model.empty())
return "unknown_model";
const auto slash = model.find_last_of("/\\");
if (slash == std::string::npos)
return model;
return model.substr(slash + 1);
}
static std::string expand_dst_path(const KBotOptions &opts, std::string raw) {
const std::string m = model_basename(opts.model);
const std::string r = opts.router.empty() ? std::string("unknown_router") : opts.router;
replace_all(raw, "${MODEL}", m);
replace_all(raw, "${MODEL_NAME}", m);
replace_all(raw, "${ROUTER}", r);
return raw;
}
/** Same idea as TS `onCompletion`: write to --dst / --output; `dst` wins over legacy `output` if both set. */
static std::string effective_completion_dst(const KBotOptions &opts) {
if (!opts.dst.empty())
return opts.dst;
return opts.output;
}
/** @returns true if wrote to file (caller should skip printing body to stdout). */
static bool try_write_completion_to_dst(const KBotOptions &opts, const std::string &text) {
const std::string raw = effective_completion_dst(opts);
if (raw.empty())
return false;
std::string expanded = expand_dst_path(opts, raw);
fs::path p;
try {
p = fs::absolute(expanded);
} catch (const std::exception &e) {
logger::error(std::string("Invalid output path: ") + e.what());
return false;
}
std::error_code ec;
fs::create_directories(p.parent_path(), ec);
if (ec) {
logger::error("Failed to create output directories: " + ec.message());
return false;
}
const bool append_existing = (opts.append != "replace") && fs::exists(p);
std::ofstream out(p, std::ios::binary | (append_existing ? std::ios::app : std::ios::trunc));
if (!out) {
logger::error("Failed to open output file: " + p.string());
return false;
}
out << text;
if (!text.empty() && text.back() != '\n')
out.put('\n');
logger::info(std::string(append_existing ? "Appended completion to " : "Wrote completion to ") + p.string());
return true;
}
std::string json_job_result_ai(bool success, const std::string &text_or_error, bool is_text,
const std::string &provider_meta_json = {}) {
nlohmann::json o;
@ -63,7 +135,8 @@ int run_kbot_ai_pipeline(const KBotOptions &opts, const KBotCallbacks &cb) {
LLMResponse res = client.execute_chat(target_prompt);
if (res.success) {
std::cout << res.text << "\n";
if (!try_write_completion_to_dst(opts, res.text))
std::cout << res.text << "\n";
if (cb.onEvent) {
cb.onEvent("ai_progress",
"{\"message\":\"Task completion received\",\"has_text\":true}");

View File

@ -42,8 +42,11 @@ CLI::App* setup_cmd_kbot(CLI::App& app) {
ai_cmd->add_option("-p,--path", g_kbot_opts.path, "Target directory")->default_val(".");
ai_cmd->add_option("--prompt", g_kbot_opts.prompt, "The prompt. Supports file paths and vars.");
ai_cmd->add_option("-c,--config", g_kbot_opts.config_path, "Config file for API Keys")->default_val("config/postgres.toml");
ai_cmd->add_option("--output", g_kbot_opts.output, "Optional output path for modified files");
ai_cmd->add_option("--dst", g_kbot_opts.dst, "Optional destination path for the result");
/* Same destination as TS `onCompletion`: write LLM text here (--dst wins if both appear in IPC JSON). */
ai_cmd->add_option(
"--dst,--output",
g_kbot_opts.dst,
"Write completion result to this path (${MODEL}, ${ROUTER} expanded). Same as --output.");
ai_cmd->add_option("--append", g_kbot_opts.append, "How to handle output if --dst exists: concat|merge|replace")->default_val("concat");
ai_cmd->add_option("--wrap", g_kbot_opts.wrap, "Specify how to wrap output: meta|none")->default_val("none");
ai_cmd->add_option("--each", g_kbot_opts.each, "Iterate over items (GLOB, JSON, array)");
@ -146,6 +149,12 @@ int run_kbot_ai_ipc(const std::string& payload, const std::string& jobId, const
opts.response_format_json = rf.GetString();
}
}
if (doc.HasMember("dst") && doc["dst"].IsString())
opts.dst = doc["dst"].GetString();
if (doc.HasMember("output") && doc["output"].IsString()) {
if (opts.dst.empty())
opts.dst = doc["output"].GetString();
}
}
if (opts.api_key.empty()) {