media server 2/3 - stb

This commit is contained in:
lovebird 2026-04-12 22:57:40 +02:00
parent 71fd96e543
commit 148b0f6e57
140 changed files with 3274 additions and 773 deletions

View File

@ -1,37 +1,6 @@
# Build output
/build/
# Compiled objects
*.o
*.obj
*.exe
*.out
*.app
# CMake generated
CMakeCache.txt
CMakeFiles/
cmake_install.cmake
Makefile
# IDE / Editor
.vscode/
.idea/
*.swp
*.swo
*~
.env*
# OS
.DS_Store
Thumbs.db
# Logs
*.log
cache/
config/postgres.toml
dist
# Orchestrator reports (cwd/tests/*)
tests/*.json
tests/*.md
src/cmd_grid*.cpp
build/
dist/
.cache/
CMakeUserPresets.json
.vs/
*.user

View File

@ -1,9 +1,9 @@
cmake_minimum_required(VERSION 3.20)
project(kbot-cli
project(media-image-service
VERSION 0.1.0
DESCRIPTION "KBot C++ CLI"
LANGUAGES CXX C
DESCRIPTION "Polymech image resize service (CLI, REST, IPC)"
LANGUAGES CXX
)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_SOURCE_DIR}/dist")
@ -12,90 +12,52 @@ set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_RELEASE "${CMAKE_SOURCE_DIR}/dist")
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_RELWITHDEBINFO "${CMAKE_SOURCE_DIR}/dist")
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_MINSIZEREL "${CMAKE_SOURCE_DIR}/dist")
# C++ standard
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
# Dependencies
include(FetchContent)
set(JSON_BuildTests OFF CACHE BOOL "" FORCE)
FetchContent_Declare(
cli11
GIT_REPOSITORY https://github.com/CLIUtils/CLI11.git
GIT_TAG v2.4.2
GIT_SHALLOW TRUE
)
FetchContent_Declare(
tomlplusplus
GIT_REPOSITORY https://github.com/marzer/tomlplusplus.git
GIT_TAG v3.4.0
GIT_SHALLOW TRUE
)
FetchContent_Declare(
Catch2
GIT_REPOSITORY https://github.com/catchorg/Catch2.git
GIT_TAG v3.7.1
GIT_SHALLOW TRUE
GIT_TAG v2.4.2
GIT_SHALLOW TRUE
)
FetchContent_Declare(
asio
GIT_REPOSITORY https://github.com/chriskohlhoff/asio.git
GIT_TAG asio-1-28-0
GIT_SHALLOW TRUE
)
FetchContent_Declare(
concurrentqueue
GIT_REPOSITORY https://github.com/cameron314/concurrentqueue.git
GIT_TAG v1.0.4
GIT_SHALLOW TRUE
)
FetchContent_Declare(
taskflow
GIT_REPOSITORY https://github.com/taskflow/taskflow.git
GIT_TAG v3.6.0
GIT_SHALLOW TRUE
GIT_TAG asio-1-28-0
GIT_SHALLOW TRUE
)
FetchContent_Declare(
nlohmann_json
GIT_REPOSITORY https://github.com/nlohmann/json.git
GIT_TAG v3.11.3
GIT_SHALLOW TRUE
GIT_TAG v3.11.3
GIT_SHALLOW TRUE
)
FetchContent_Declare(
liboai
GIT_REPOSITORY https://github.com/jasonduncan/liboai.git
GIT_TAG main
GIT_SHALLOW TRUE
SOURCE_SUBDIR liboai
cpp_httplib
GIT_REPOSITORY https://github.com/yhirose/cpp-httplib.git
GIT_TAG v0.16.3
GIT_SHALLOW TRUE
)
# p-ranav/glob Unix-style glob / rglob (C++17); avoid upstream CMake (CPM + gtest).
FetchContent_Declare(
pranav_glob
GIT_REPOSITORY https://github.com/p-ranav/glob.git
GIT_TAG master
GIT_SHALLOW TRUE
stb
GIT_REPOSITORY https://github.com/nothings/stb.git
GIT_TAG master
GIT_SHALLOW TRUE
)
FetchContent_GetProperties(pranav_glob)
if(NOT pranav_glob_POPULATED)
FetchContent_Populate(pranav_glob)
endif()
add_library(pranav_glob STATIC ${pranav_glob_SOURCE_DIR}/source/glob.cpp)
target_include_directories(pranav_glob PUBLIC ${pranav_glob_SOURCE_DIR}/include)
target_compile_features(pranav_glob PUBLIC cxx_std_17)
if(MSVC)
target_compile_options(pranav_glob PRIVATE /permissive-)
endif()
# laserpants/dotenv-cpp load .env into the process environment (header-only).
FetchContent_MakeAvailable(cli11 asio nlohmann_json cpp_httplib)
# laserpants/dotenv-cpp load .env (same pattern as packages/kbot/cpp).
FetchContent_Declare(
laserpants_dotenv
GIT_REPOSITORY https://github.com/laserpants/dotenv-cpp.git
@ -110,68 +72,51 @@ add_library(laserpants_dotenv INTERFACE)
target_include_directories(laserpants_dotenv INTERFACE ${laserpants_dotenv_SOURCE_DIR}/include)
add_library(laserpants::dotenv ALIAS laserpants_dotenv)
set(TF_BUILD_TESTS OFF CACHE BOOL "" FORCE)
set(TF_BUILD_EXAMPLES OFF CACHE BOOL "" FORCE)
set(JSON_BuildTests OFF CACHE BOOL "" FORCE)
FetchContent_GetProperties(stb)
if(NOT stb_POPULATED)
FetchContent_Populate(stb)
endif()
FetchContent_MakeAvailable(cli11 tomlplusplus Catch2 asio concurrentqueue taskflow nlohmann_json)
# Packages
add_subdirectory(packages/logger)
add_subdirectory(packages/html)
add_subdirectory(packages/postgres)
add_subdirectory(packages/http)
add_subdirectory(packages/json)
add_subdirectory(packages/polymech)
add_subdirectory(packages/ipc)
add_subdirectory(packages/liboai/liboai)
add_library(stb_headers INTERFACE)
target_include_directories(stb_headers INTERFACE ${stb_SOURCE_DIR})
add_subdirectory(packages/kbot)
# Sources
add_executable(${PROJECT_NAME}
add_executable(media-img
src/main.cpp
src/cmd_kbot.cpp
src/cmd_kbot_uds.cpp
src/sys_metrics.cpp
src/core/resize.cpp
src/http/serve.cpp
src/ipc/ipc_serve.cpp
)
# Output file name is kbot.exe / kbot (not kbot-cli)
set_target_properties(${PROJECT_NAME} PROPERTIES OUTPUT_NAME "kbot")
target_link_libraries(${PROJECT_NAME} PRIVATE CLI11::CLI11 tomlplusplus::tomlplusplus logger html postgres http json polymech ipc kbot laserpants::dotenv)
target_include_directories(${PROJECT_NAME} PRIVATE
target_include_directories(media-img PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}/src
${asio_SOURCE_DIR}/asio/include
${taskflow_SOURCE_DIR}
${concurrentqueue_SOURCE_DIR}
)
# Define standalone ASIO (since it's not boost)
target_compile_definitions(media-img PRIVATE
ASIO_STANDALONE
ASIO_NO_DEPRECATED
CPPHTTPLIB_NO_EXCEPTIONS=0
)
if(WIN32)
# Enable math constants like M_PI
add_compile_definitions(_USE_MATH_DEFINES)
add_compile_definitions(NOMINMAX)
endif()
target_compile_definitions(${PROJECT_NAME} PRIVATE ASIO_STANDALONE=1 ASIO_NO_DEPRECATED=1)
# Compiler warnings
if(MSVC)
target_compile_options(${PROJECT_NAME} PRIVATE /W4 /permissive-)
else()
target_compile_options(${PROJECT_NAME} PRIVATE -Wall -Wextra -Wpedantic)
target_compile_definitions(media-img PRIVATE
_WIN32_WINNT=0x0A00
NOMINMAX
)
endif()
# Install
# Library + headers: see packages/kbot/CMakeLists.txt and packages/ipc/CMakeLists.txt
# Optional DLL/so: configure with -DIPC_BUILD_SHARED=ON -DPOLYMECH_KBOT_SHARED=ON
install(TARGETS ${PROJECT_NAME}
RUNTIME DESTINATION bin
)
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/src/cmd_kbot.h
DESTINATION include/polymech
target_link_libraries(media-img PRIVATE
CLI11::CLI11
nlohmann_json::nlohmann_json
httplib::httplib
laserpants::dotenv
stb_headers
)
# Tests
enable_testing()
add_subdirectory(tests)
if(UNIX AND NOT APPLE)
target_link_libraries(media-img PRIVATE pthread)
endif()
target_compile_definitions(media-img PRIVATE
"MEDIA_IMG_VERSION=\"${PROJECT_VERSION}\""
)

View File

@ -21,16 +21,6 @@
"cacheVariables": {
"CMAKE_BUILD_TYPE": "Release"
}
},
{
"name": "dev-dll",
"displayName": "Dev (Debug, ipc + kbot as DLL)",
"binaryDir": "${sourceDir}/build/dev-dll",
"cacheVariables": {
"CMAKE_BUILD_TYPE": "Debug",
"IPC_BUILD_SHARED": "ON",
"POLYMECH_KBOT_SHARED": "ON"
}
}
],
"buildPresets": [
@ -41,10 +31,6 @@
{
"name": "release",
"configurePreset": "release"
},
{
"name": "dev-dll",
"configurePreset": "dev-dll"
}
]
}
}

View File

@ -1,6 +1,33 @@
# kbot (C++)
# media-img (C++)
CMake-based C++ toolchain for **kbot**: HTML/HTTP/JSON utilities, **length-prefixed JSON IPC**, optional **UDS/TCP worker** for Node orchestrators, and **LLM chat** via liboai (OpenRouter, OpenAI, Ollama-compatible servers, etc.). The main binary is **`kbot`** (`kbot.exe` on Windows).
CMake-based **`media-img`** binary: **CLI resize**, **HTTP REST** (`serve`), and **line-delimited JSON IPC** (`ipc`) over **TCP** (all platforms) or **Unix domain sockets** (non-Windows). Loads optional **`.env`** from the working directory via [laserpants/dotenv-cpp](https://github.com/laserpants/dotenv-cpp) (same idea as kbot).
## Image stack (not libvips)
Processing uses **stb** ([nothings/stb](https://github.com/nothings/stb)): `stb_image`, **stb_image_resize2** (`stbir_*`), and `stb_image_write`. There is **no** [libvips](https://www.libvips.org/) / glib dependency in this package.
## Concurrency and defaults
| Area | Behavior |
|------|----------|
| **HTTP `serve`** | Uses **cpp-httplib**s default **thread pool**: `CPPHTTPLIB_THREAD_POOL_COUNT` is **`max(8, hardware_concurrency() - 1)`** when hardware concurrency is known (see `httplib.h` in the fetched dependency). There is **no** extra app-level job queue or `MEDIA_*` concurrency cap yet—each accepted request runs resize on the pool. |
| **IPC `ipc`** | **One JSON line per accepted connection**; the handler replies with one line and returns (sequential per socket). For parallel work, open **multiple connections** or run multiple processes. |
| **CLI `resize`** | Single invocation, single file pair. |
**CLI defaults** (see `src/main.cpp`):
- `serve`: `--host 127.0.0.1`, `--port 8080`
- `ipc`: `--host 127.0.0.1`, `--port 9333`, or `--unix <path>` (Unix only)
**REST**
- `GET /health``{"ok":true,"service":"media-img"}`
- `POST /v1/resize` — JSON body: `input`, `output` (paths), optional `max_width`, `max_height`, `format` (`png` / `jpg` / `jpeg`)
**IPC** — one line per request, newline-terminated JSON:
- Success: `{"ok":true}`
- Failure: `{"ok":false,"error":"..."}`
## Prerequisites
@ -9,258 +36,86 @@ CMake-based C++ toolchain for **kbot**: HTML/HTTP/JSON utilities, **length-prefi
| CMake | ≥ 3.20 |
| C++ compiler | C++17 (MSVC, GCC, Clang) |
| Git | For `FetchContent` dependencies |
| Node.js | Optional; for `orchestrator/` IPC integration tests (`npm run test:ipc`) |
On Windows, use a **Developer Command Prompt** or **PowerShell** with MSVC in `PATH`. **Git Bash** helps if you use shell scripts under `scripts/`.
| Node.js | Optional; for integration tests (`npm run test:media`) — **Node 18+** recommended (`fetch`, `AbortSignal.timeout`) |
## Quick start (build)
From this directory (`packages/kbot/cpp`):
From `packages/media/cpp`:
```bash
npm install # optional; only needed if you use npm scripts
npm run build
npm install # optional if you only use cmake
npm run build:release
```
Artifacts go to **`dist/`** (e.g. `dist/kbot.exe`, test tools).
Equivalent CMake:
Artifacts: **`dist/media-img`** (or **`dist/media-img.exe`** on Windows).
```bash
cmake --preset dev
cmake --build --preset dev
cmake --preset release
cmake --build --preset release
```
### Presets
| Preset | Role |
|--------|------|
| `dev` | Debug, static `ipc` + `kbot` libraries (default) |
| `dev` | Debug build |
| `release` | Release build |
| `dev-dll` | Debug with **`ipc.dll`** and **`kbot.dll`** (`IPC_BUILD_SHARED=ON`, `POLYMECH_KBOT_SHARED=ON`) |
```bash
cmake --preset dev-dll
cmake --build --preset dev-dll --config Debug
```
Place **`ipc.dll`** and **`kbot.dll`** next to **`kbot.exe`** (or on `PATH`) when using the DLL configuration.
### npm scripts (reference)
| Script | Purpose |
|--------|---------|
| `npm run build` | Configure `dev` + build |
| `npm run build:release` | Release preset |
| `npm run test` | `ctest` in `build/dev` |
| `npm run clean` | Remove `build/` and `dist/` |
| `npm run test:ipc` | Node UDS IPC integration test |
| `npm run worker` | Run worker (stdio IPC) |
## Installation
Install the CLI and headers into a prefix (e.g. local tree or system root):
```bash
cmake --install build/dev --prefix "C:/path/to/install"
```
This installs:
- **`bin/kbot`** (runtime)
- **`include/polymech/`** — `kbot.h`, `llm_client.h`, `polymech_export.h`, `cmd_kbot.h`
- **`include/ipc/`** — `ipc.h`, `ipc_export.h`
- **`lib/`** — import libraries / archives (depending on static vs shared)
Library layout is defined in `packages/kbot/CMakeLists.txt` and `packages/ipc/CMakeLists.txt`.
### CMake options (libraries)
| Cache variable | Effect |
|----------------|--------|
| `IPC_BUILD_SHARED` | Build **`ipc`** as a shared library (`OFF` default) |
| `POLYMECH_KBOT_SHARED` | Build **`kbot`** as a shared library (`OFF` default) |
Static builds define `IPC_STATIC_BUILD` / `POLYMECH_STATIC_BUILD` for consumers via `INTERFACE` compile definitions. Shared builds export **`IPC_API`** / **`POLYMECH_API`** (see `ipc_export.h`, `polymech_export.h`).
## CLI overview
Top-level:
```bash
kbot --help
kbot -v,--version
kbot --log-level debug|info|warn|error
media-img --help
media-img -v,--version
```
### Subcommands
| Command | Description |
|---------|-------------|
| `parse <html>` | Parse HTML and list elements |
| `select <html> <selector>` | CSS-select elements |
| `config <file>` | Load and print a TOML file |
| `fetch <url>` | HTTP GET |
| `json <input>` | Prettify JSON |
| `db [-c config] [table] [-l limit]` | Supabase / DB helper (uses `config/postgres.toml` by default) |
| `worker [--uds <arg>]` | IPC worker (see below) |
| `kbot ai ...` / `kbot run ...` | AI and run pipelines (`setup_cmd_kbot` — use `kbot kbot ai --help`) |
### Worker mode (`kbot worker`)
Used by orchestrators and tests.
- **Stdio IPC** (length-prefixed JSON frames on stdin/stdout):
```bash
kbot worker
```
- **UDS / TCP** (Windows: TCP port string, e.g. `4001`; Unix: socket path):
```bash
kbot worker --uds 4001
```
Framing: `[uint32 LE length][UTF-8 JSON object with id, type, payload]`. Message types include `ping`, `job`, `kbot-ai`, `kbot-run`, `shutdown`, etc. See `src/main.cpp` and `orchestrator/test-ipc.mjs`.
### `kbot kbot` (nested)
CLI for AI tasks and run configurations:
```bash
kbot kbot ai --help
kbot kbot run --help
```
| Subcommand | Description |
|------------|-------------|
| `resize <input> <output>` | Resize file on disk (`--max-width`, `--max-height`, `--format`) |
| `serve` | HTTP server (`--host`, `-p/--port`) |
| `ipc` | JSON-line IPC: TCP (`--host`, `--port`) or **`--unix`** (Unix only) |
| `kbot ...` | Forwards extra arguments to **`KBOT_EXE`** (optional AI / kbot workflows; build kbot separately) |
Example:
```bash
kbot kbot ai --prompt "Hello" --config config/postgres.toml
set KBOT_EXE=C:\path\to\kbot.exe
media-img kbot ai --prompt "hello"
```
API keys are typically resolved from **`config/postgres.toml`** (`[services]`).
## Integration tests (REST + IPC)
## Using in other CMake projects
Tests mirror the style of **kbot** orchestrators (e.g. spawn binary, wait for listen, talk over TCP): see **`orchestrator/test-media.mjs`**. They cover:
There is no single `find_package(kbot)` config yet. Practical options:
### 1. Same repository / superbuild (recommended)
Add this repos `cpp` tree as a subdirectory from a parent `CMakeLists.txt` so `FetchContent` and internal targets (`logger`, `json`, `ipc`, `oai`, `kbot`, …) resolve once. Then:
```cmake
target_link_libraries(your_app PRIVATE ipc kbot)
```
`kbot` pulls in `logger`, `json`, `liboai` (`oai`) per `packages/kbot/CMakeLists.txt`.
### 2. Install prefix + explicit `IMPORTED` libraries
After `cmake --install`, link import libraries under `lib/` and add `include/` for **`ipc`** and **`polymech`**. You must still satisfy **transitive** dependencies (`oai`, `logger`, `json`, …) from the **same** build/install of this project, or duplicate their build—usually easier to use option 1.
### 3. Minimal example: IPC framing only
If you only need **`ipc::encode` / `ipc::decode`** (and can build `logger` + `json` the same way this project does), mirror `packages/ipc/CMakeLists.txt`:
```cmake
cmake_minimum_required(VERSION 3.20)
project(myapp CXX)
set(CMAKE_CXX_STANDARD 17)
add_subdirectory(path/to/polymech-mono/packages/kbot/cpp/packages/logger)
add_subdirectory(path/to/polymech-mono/packages/kbot/cpp/packages/json)
add_subdirectory(path/to/polymech-mono/packages/kbot/cpp/packages/ipc)
add_executable(myapp main.cpp)
target_link_libraries(myapp PRIVATE ipc)
```
**`main.cpp`** (stdio-style framing helpers):
```cpp
#include <iostream>
#include <ipc/ipc.h>
int main() {
ipc::Message msg{"1", "ping", "{}"};
auto frame = ipc::encode(msg);
// frame: 4-byte LE length + JSON object bytes
ipc::Message roundtrip;
if (frame.size() > 4 &&
ipc::decode(frame.data() + 4, frame.size() - 4, roundtrip)) {
std::cout << roundtrip.type << "\n"; // ping
}
return 0;
}
```
### 4. Example: LLM pipeline API (`kbot` library)
Headers: `kbot.h`, `llm_client.h`, `polymech_export.h`. You need a valid API key and options (see `KBotOptions` in `kbot.h`).
```cpp
#include <iostream>
#include "kbot.h"
#include "llm_client.h"
int main() {
polymech::kbot::KBotOptions opts;
opts.prompt = "Say hello in one sentence.";
opts.api_key = "YOUR_KEY";
opts.router = "openrouter";
opts.model = "openai/gpt-4o-mini";
polymech::kbot::LLMClient client(opts);
polymech::kbot::LLMResponse r = client.execute_chat(opts.prompt);
if (r.success) {
std::cout << r.text << "\n";
} else {
std::cerr << r.error << "\n";
return 1;
}
return 0;
}
```
Or use the callback-based pipeline:
```cpp
polymech::kbot::KBotCallbacks cb;
cb.onEvent = [](const std::string& type, const std::string& json) {
std::cout << type << ": " << json << "\n";
};
return polymech::kbot::run_kbot_ai_pipeline(opts, cb);
```
Link **`kbot`** (and its public dependencies). **`cmd_kbot.h`** entry points (`run_kbot_ai_ipc`, `run_cmd_kbot_uds`, …) are implemented in **`src/cmd_kbot*.cpp`** in this project; to reuse them, compile those sources into your binary or vendor the logic.
## Node / IPC tests
Integration tests live under **`orchestrator/`** (see comments in `orchestrator/test-ipc.mjs`). Typical run from `cpp/`:
- **REST**: `GET /health`, `POST /v1/resize` (PNG + JPEG output), error path for missing input
- **IPC TCP**: line JSON request/response, error path
- **IPC Unix**: same protocol on a **Unix socket** (skipped on Windows — use TCP there)
```bash
npm run test:ipc
npm run build:release
npm run generate:assets # if tests/assets PNGs are missing
npm run test:media
```
Classifier batch (semantic distances vs JobViewer labels):
| Script | Purpose |
|--------|---------|
| `npm run test:media` | REST + IPC (TCP + Unix where supported) |
| `npm run test:media:rest` | `--rest-only` |
| `npm run test:media:ipc` | `--ipc-only` |
```bash
npm run test:ipc:classifier
npm run test:ipc:classifier:openrouter
```
Env: **`MEDIA_IMG_TEST_UNIX`** — Unix socket path for the UDS test (default `/tmp/media-img-test.sock`).
Stress: repeat the **same** batched `kbot-ai` call **N** times on **one** worker; prints per-run wall time, token usage (when present), then **min / max / avg / p50 / p95** and Σ tokens. Default **N = 5** for the OpenRouter stress script:
## Test fixtures
```bash
npm run test:ipc:classifier:openrouter:stress
npm run test:ipc:classifier -- -r openrouter -m openai/gpt-4o-mini --backend remote -n 3
KBOT_CLASSIFIER_STRESS_RUNS=10 npm run test:ipc:classifier:openrouter:stress
```
Under **`tests/assets/`**:
Requires a built **`dist/kbot.exe`** (or `kbot` on Unix). Set API keys via `config/postgres.toml` for OpenRouter.
- **`build-fixtures.mjs`** — Generates RGB PNGs (no extra npm deps). Run: `node tests/assets/build-fixtures.mjs` (also exposed as **`npm run generate:assets`**).
- Additional subfolders (`in/`, `out/`, `*_webp/`, watermark samples, etc.) are available for broader manual or future tests.
## Related docs
- **`ROLLOUT.md`** — Phased rollout notes.
- **`polymech.md`** — Broader Polymech context (if present).
## License
See [LICENSE](LICENSE) in this directory.
See [LICENSE](LICENSE) in this directory when present.

View File

@ -0,0 +1,50 @@
# Media image service (C++) — rollout plan
## Goals
- **CLI**: resize files on disk (batch-friendly, scripts).
- **REST**: HTTP server for resize jobs (Node or other clients).
- **IPC**: async socket server — **Unix domain socket** on Linux/macOS; **TCP loopback** on Windows (Asio does not ship portable UDS on Windows; optional **named pipe** phase later).
## Dependencies (CMake / FetchContent)
| Component | Choice | Notes |
|-----------|--------|--------|
| CLI | [CLI11](https://github.com/CLIUtils/CLI11) | Same pattern as `kbot/cpp`. |
| Async I/O | [Asio](https://think-async.com/Asio/) (standalone) | UDS + accept loop; no Boost linkage. |
| HTTP | [cpp-httplib](https://github.com/yhirose/cpp-httplib) | Header-only REST; good for a dedicated worker. |
| JSON | [nlohmann/json](https://github.com/nlohmann/json) | Request/response bodies. |
| Images (v1) | [stb](https://github.com/nothings/stb) | No system install; PNG/JPEG in-tree. |
| Images (later) | **libvips** | Optional `find_package` / vcpkg when you need parity with Sharp speed/quality. |
## Phases
### Phase 0 — Scaffold (this PR)
- CMake presets `dev` / `release`, output `dist/media-img(.exe)`.
- `npm run build:release` green on Windows MSVC.
### Phase 1 — Core + CLI
- `resize` command: input path, output path, max width/height, format (png/jpeg).
- Single-threaded; deterministic errors to stderr.
### Phase 2 — REST
- `serve --bind --port`: `GET /health`, `POST /v1/resize` (JSON with paths or raw body + query params — v1 uses file paths for simplicity).
### Phase 3 — IPC
- `ipc --listen <path|host:port>`: line-delimited or length-prefixed JSON requests (documented in `docs/ipc-protocol.md` stub).
- Linux: Unix socket. Windows: TCP `127.0.0.1:<port>` (or named pipe in a follow-up).
### Phase 4 — Production hardening
- Optional libvips backend behind `MEDIA_USE_VIPS`.
- Worker pool, request limits, metrics.
- CI: Linux + Windows matrix.
## npm scripts
- `npm run build:release` — configure + build Release.
- `npm run run``dist/media-img --help`.

View File

@ -1,112 +0,0 @@
{
"items": [
{
"label": "3D printing service",
"distance": 6.0
},
{
"label": "Drafting service",
"distance": 7.0
},
{
"label": "Engraver",
"distance": 6.5
},
{
"label": "Furniture maker",
"distance": 7.5
},
{
"label": "Industrial engineer",
"distance": 7.0
},
{
"label": "Industrial equipment supplier",
"distance": 5.5
},
{
"label": "Laser cutting service",
"distance": 4.5
},
{
"label": "Machine construction",
"distance": 3.0
},
{
"label": "Machine repair service",
"distance": 2.5
},
{
"label": "Machine shop",
"distance": 0.2
},
{
"label": "Machine workshop",
"distance": 0.0
},
{
"label": "Machinery parts manufacturer",
"distance": 2.0
},
{
"label": "Machining manufacturer",
"distance": 1.5
},
{
"label": "Manufacturer",
"distance": 6.0
},
{
"label": "Mechanic",
"distance": 5.0
},
{
"label": "Mechanical engineer",
"distance": 6.5
},
{
"label": "Mechanical plant",
"distance": 3.5
},
{
"label": "Metal fabricator",
"distance": 2.0
},
{
"label": "Metal heat treating service",
"distance": 3.5
},
{
"label": "Metal machinery supplier",
"distance": 5.0
},
{
"label": "Metal working shop",
"distance": 1.0
},
{
"label": "Metal workshop",
"distance": 1.2
},
{
"label": "Novelty store",
"distance": 10.0
},
{
"label": "Plywood supplier",
"distance": 9.5
},
{
"label": "Sign shop",
"distance": 7.5
},
{
"label": "Tool manufacturer",
"distance": 3.0
},
{
"label": "Trophy shop",
"distance": 8.0
}
]
}

View File

@ -0,0 +1,52 @@
# Polymech Postgres Configuration
# Derived from pm-pics/server/.env
[postgres]
url = "postgresql://postgres.ytoadlpbdguriiccjnip:PKUF3t.cZDr3b4k@aws-1-eu-north-1.pooler.supabase.com:6543/postgres?pgbouncer=true"
[supabase]
url = "https://ytoadlpbdguriiccjnip.supabase.co"
service_key = "sb_secret_A8CNmiO3Th9kl5BVnCb9uA_pPFwvW8_"
publishable_key = "sb_publishable_1GyeD_grTZhrU-_OdUCz4g_gjxcxRNp"
[server]
port = 3333
url = "http://localhost:3333"
image_api_url = "http://localhost:3333"
[server.remote]
url = "https://service.polymech.info"
image_api_url = "https://service.polymech.info"
# -----------------------------------------------------------------------------
# Services
# -----------------------------------------------------------------------------
[services]
SERPAPI_KEY="517879d08bd8f13df9c4265c42aea8cfe960942f3a10e8774bbec11becbfb687"
GEO_CODER_KEY="65bcf01943459613018206nmi9830a9"
BIG_DATA_KEY="bdc_fac0842cc0b6480fa64c4a6df6c6835c"
SCRAPELESS_KEY="sk_3xKBbjxpOvtRVHMbw1MwSg26feEjBzDKQxQqG2qtlJrW3tbGe6x0uGhz0aMU1DDl"
OPENROUTER="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"
# -----------------------------------------------------------------------------
# Enricher Configuration
# -----------------------------------------------------------------------------
[enricher]
ENRICHER_LOCATION_CONCURRENCY=10
ENRICHER_META_CONCURRENCY=5
ENRICHER_META_IDLE_TIMEOUT=60
ENRICHER_META_SCRAPER="HTTP"
# -----------------------------------------------------------------------------
# System Hard Constraints & Throttle Limits (IPC / Threads / Buffers)
# -----------------------------------------------------------------------------
[system]
executor_threads = 12
http_concurrency_throttle = 15
queue_depth_max = 200
buffer_size_max = 52428800
[cache]
GADM_CACHE_DEV = "../../packages/gadm/cache/gadm"
GADM_CACHE_PROD = "../../../geo-cache/gadm/"

View File

@ -0,0 +1,70 @@
/**
* Line-delimited JSON over TCP or Unix stream (media-img `ipc` mode).
* One request per connection (server closes after one response).
*/
import net from 'node:net';
/**
* @param {import('node:net').Socket} socket
* @param {Record<string, unknown>} payload
* @param {number} [timeoutMs]
*/
export function requestLineJson(socket, payload, timeoutMs = 10_000) {
return new Promise((resolve, reject) => {
let buf = '';
const timer = setTimeout(() => {
cleanup();
reject(new Error('IPC line read timeout'));
}, timeoutMs);
function cleanup() {
clearTimeout(timer);
socket.removeListener('data', onData);
socket.removeListener('error', onErr);
}
function onData(chunk) {
buf += chunk.toString('utf8');
const lineEnd = buf.indexOf('\n');
if (lineEnd >= 0) {
cleanup();
const line = buf.slice(0, lineEnd);
try {
resolve(JSON.parse(line));
} catch (e) {
reject(e);
}
}
}
function onErr(e) {
cleanup();
reject(e);
}
socket.on('data', onData);
socket.once('error', onErr);
socket.write(`${JSON.stringify(payload)}\n`);
});
}
/**
* @param {string} host
* @param {number} port
*/
export function connectTcp(host, port) {
return new Promise((resolve, reject) => {
const s = net.connect({ host, port }, () => resolve(s));
s.once('error', reject);
});
}
/**
* @param {string} path
*/
export function connectUnix(path) {
return new Promise((resolve, reject) => {
const s = net.connect(path, () => resolve(s));
s.once('error', reject);
});
}

View File

@ -0,0 +1,34 @@
/**
* Defaults for media-img orchestrator tests (REST + line IPC).
*/
import { dirname, resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
const __dirname = dirname(fileURLToPath(import.meta.url));
export const platform = {
isWin: process.platform === 'win32',
};
/** Path to media-img binary next to orchestrator/ → ../dist/ */
export function mediaExePath(orchestratorDir = __dirname) {
const name = platform.isWin ? 'media-img.exe' : 'media-img';
return resolve(orchestratorDir, '..', 'dist', name);
}
/** Default fixtures directory: packages/media/cpp/tests/assets */
export function defaultAssetsDir(orchestratorDir = __dirname) {
return resolve(orchestratorDir, '..', 'tests', 'assets');
}
export const timeouts = {
connectAttempts: 20,
connectRetryMs: 100,
httpMs: 15_000,
ipcReadMs: 10_000,
};
/** Unix socket path for IPC tests (non-Windows). */
export function ipcUnixPath() {
return process.env.MEDIA_IMG_TEST_UNIX || '/tmp/media-img-test.sock';
}

View File

@ -0,0 +1,283 @@
/**
* Integration tests: media-img REST (`serve`) and line IPC (`ipc`) TCP on all platforms;
* Unix domain socket on non-Windows (same JSON line protocol as TCP).
*
* Run (from packages/media/cpp, after build:release):
* npm run test:media
*
* npm run test:media -- --rest-only
* npm run test:media -- --ipc-only
*
* Fixtures: tests/assets (run `node tests/assets/build-fixtures.mjs` if missing).
*
* Env:
* MEDIA_IMG_TEST_UNIX Unix socket path for IPC UDS test (default /tmp/media-img-test.sock)
*/
import { spawn } from 'node:child_process';
import { existsSync, mkdtempSync, rmSync, unlinkSync } from 'node:fs';
import net from 'node:net';
import { tmpdir } from 'node:os';
import { dirname, join, resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import {
mediaExePath,
defaultAssetsDir,
timeouts,
ipcUnixPath,
platform,
} from './media-presets.js';
import { requestLineJson, connectTcp, connectUnix } from './media-line-ipc.js';
import { probeTcpPort, createAssert, pipeWorkerStderr } from './test-commons.js';
const __dirname = dirname(fileURLToPath(import.meta.url));
const EXE = mediaExePath(__dirname);
const stats = createAssert();
const { assert } = stats;
const restOnly = process.argv.includes('--rest-only');
const ipcOnly = process.argv.includes('--ipc-only');
function getFreePort() {
return new Promise((resolvePort, reject) => {
const s = net.createServer();
s.listen(0, '127.0.0.1', () => {
const p = s.address().port;
s.close(() => resolvePort(p));
});
s.on('error', reject);
});
}
async function waitListen(host, port, label) {
for (let i = 0; i < timeouts.connectAttempts; i++) {
if (await probeTcpPort(host, port, 300)) return;
await new Promise((r) => setTimeout(r, timeouts.connectRetryMs));
}
throw new Error(`${label}: nothing listening on ${host}:${port}`);
}
async function suiteRest(assetsDir) {
console.log('\n── REST (media-img serve) ──\n');
const inPng = resolve(assetsDir, 'square-64.png');
assert(existsSync(inPng), `fixture ${inPng}`);
const port = await getFreePort();
const proc = spawn(EXE, ['serve', '--host', '127.0.0.1', '--port', String(port)], {
stdio: ['ignore', 'pipe', 'pipe'],
});
pipeWorkerStderr(proc, '[media-img:serve]');
try {
await waitListen('127.0.0.1', port, 'serve');
const base = `http://127.0.0.1:${port}`;
const h = await fetch(`${base}/health`, { signal: AbortSignal.timeout(timeouts.httpMs) });
assert(h.ok, 'GET /health ok');
const hj = await h.json();
assert(hj?.ok === true && hj?.service === 'media-img', 'GET /health JSON');
const outDir = mkdtempSync(join(tmpdir(), 'media-rest-'));
const outPng = join(outDir, 'out-32.png');
const r1 = await fetch(`${base}/v1/resize`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
input: inPng,
output: outPng,
max_width: 32,
max_height: 32,
}),
signal: AbortSignal.timeout(timeouts.httpMs),
});
assert(r1.ok, 'POST /v1/resize ok');
const j1 = await r1.json();
assert(j1?.ok === true, 'resize response ok');
assert(existsSync(outPng), 'output png exists');
const outJpg = join(outDir, 'out.jpg');
const r2 = await fetch(`${base}/v1/resize`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
input: inPng,
output: outJpg,
max_width: 48,
format: 'jpeg',
}),
signal: AbortSignal.timeout(timeouts.httpMs),
});
assert(r2.ok, 'POST /v1/resize jpeg');
assert(existsSync(outJpg), 'output jpg exists');
const bad = await fetch(`${base}/v1/resize`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ input: '/nope/nope.png', output: join(outDir, 'x.png') }),
signal: AbortSignal.timeout(timeouts.httpMs),
});
assert(bad.status === 500, 'POST /v1/resize missing file → 500');
rmSync(outDir, { recursive: true, force: true });
} finally {
proc.kill();
await new Promise((r) => setTimeout(r, 150));
}
}
async function suiteIpcTcp(assetsDir) {
console.log('\n── IPC TCP (media-img ipc --host --port) ──\n');
const inPng = resolve(assetsDir, 'square-64.png');
assert(existsSync(inPng), `fixture ${inPng}`);
const port = await getFreePort();
const proc = spawn(EXE, ['ipc', '--host', '127.0.0.1', '--port', String(port)], {
stdio: ['ignore', 'pipe', 'pipe'],
});
pipeWorkerStderr(proc, '[media-img:ipc]');
try {
await waitListen('127.0.0.1', port, 'ipc');
const outDir = mkdtempSync(join(tmpdir(), 'media-ipc-tcp-'));
const outPng = join(outDir, 'ipc-out.png');
const sock = await connectTcp('127.0.0.1', port);
const res = await requestLineJson(
sock,
{
input: inPng,
output: outPng,
max_width: 32,
max_height: 32,
},
timeouts.ipcReadMs,
);
sock.destroy();
assert(res?.ok === true, 'IPC line JSON ok');
assert(existsSync(outPng), 'IPC output file exists');
const sock2 = await connectTcp('127.0.0.1', port);
const res2 = await requestLineJson(
sock2,
{ input: '/not/found.png', output: join(outDir, 'bad.png') },
timeouts.ipcReadMs,
);
sock2.destroy();
assert(res2?.ok === false, 'IPC error path ok=false');
rmSync(outDir, { recursive: true, force: true });
} finally {
proc.kill();
await new Promise((r) => setTimeout(r, 150));
}
}
async function suiteIpcUnix(assetsDir) {
console.log('\n── IPC Unix (media-img ipc --unix) ──\n');
const path = ipcUnixPath();
if (existsSync(path)) {
try {
unlinkSync(path);
} catch {
/* ignore */
}
}
const inPng = resolve(assetsDir, 'checker-128x128.png');
assert(existsSync(inPng), `fixture ${inPng}`);
const proc = spawn(EXE, ['ipc', '--unix', path], {
stdio: ['ignore', 'pipe', 'pipe'],
});
pipeWorkerStderr(proc, '[media-img:ipc:uds]');
const outDir = mkdtempSync(join(tmpdir(), 'media-ipc-uds-'));
const outPng = join(outDir, 'uds-out.png');
try {
for (let i = 0; i < timeouts.connectAttempts; i++) {
if (existsSync(path)) break;
await new Promise((r) => setTimeout(r, timeouts.connectRetryMs));
}
assert(existsSync(path), 'unix socket path exists');
let sock;
for (let i = 0; i < timeouts.connectAttempts; i++) {
try {
sock = await connectUnix(path);
break;
} catch {
if (i === timeouts.connectAttempts - 1) throw new Error('connect unix failed');
await new Promise((r) => setTimeout(r, timeouts.connectRetryMs));
}
}
const res = await requestLineJson(
sock,
{
input: inPng,
output: outPng,
max_width: 64,
},
timeouts.ipcReadMs,
);
sock.destroy();
assert(res?.ok === true, 'UDS line JSON ok');
assert(existsSync(outPng), 'UDS output file exists');
} finally {
proc.kill();
try {
if (existsSync(path)) unlinkSync(path);
} catch {
/* ignore */
}
rmSync(outDir, { recursive: true, force: true });
await new Promise((r) => setTimeout(r, 150));
}
}
async function run() {
const assetsDir = resolve(defaultAssetsDir(__dirname));
if (!existsSync(EXE)) {
console.error(`Binary not found: ${EXE}`);
process.exit(1);
}
const need = ['square-64.png', 'checker-128x128.png'];
const missing = need.filter((f) => !existsSync(join(assetsDir, f)));
if (missing.length) {
console.error(`Missing fixtures under ${assetsDir}: ${missing.join(', ')}`);
console.error('Run: node tests/assets/build-fixtures.mjs');
process.exit(1);
}
console.log(`\nmedia-img integration tests\n binary: ${EXE}\n assets: ${assetsDir}\n`);
const runRest = !ipcOnly;
const runIpc = !restOnly;
if (runRest) await suiteRest(assetsDir);
if (runIpc) {
await suiteIpcTcp(assetsDir);
if (!platform.isWin) {
await suiteIpcUnix(assetsDir);
} else {
console.log('\n── IPC Unix (media-img ipc --unix) ──\n');
console.log(' (skipped on Windows — use TCP IPC or run tests on Linux/macOS)\n');
}
}
console.log(`\nDone. Passed: ${stats.passed} Failed: ${stats.failed}\n`);
process.exit(stats.failed > 0 ? 1 : 0);
}
run().catch((e) => {
console.error(e);
process.exit(1);
});

View File

@ -1,41 +1,29 @@
{
"name": "kbot-cpp",
"version": "1.0.0",
"name": "media-cpp",
"version": "0.1.0",
"type": "module",
"description": "KBot C++ CLI built with CMake.",
"description": "media-img — image resize CLI, REST server, and JSON-line IPC (C++).",
"directories": {
"test": "tests"
},
"dependencies": {
"yargs": "^17.7.2"
},
"scripts": {
"config": "cmake --preset dev",
"config:release": "cmake --preset release",
"build": "cmake --preset dev && cmake --build --preset dev",
"build:release": "cmake --preset release && cmake --build --preset release",
"build:linux": "bash build-linux.sh",
"test": "ctest --test-dir build/dev -C Debug --output-on-failure",
"test:release": "ctest --test-dir build/release -C Release --output-on-failure",
"clean": "cmake -E rm -rf build dist",
"rebuild": "npm run clean && npm run build",
"run": ".\\dist\\kbot.exe --help",
"worker": ".\\dist\\kbot.exe worker",
"worker:uds": ".\\dist\\kbot.exe worker --uds \\\\.\\pipe\\kbot-worker",
"kbot:ai": ".\\dist\\kbot.exe kbot ai --prompt \"hi\"",
"kbot:run": ".\\dist\\kbot.exe kbot run --list",
"test:ipc": "node orchestrator/test-ipc.mjs",
"test:ipc:classifier": "node orchestrator/test-ipc-classifier.mjs",
"test:files": "node orchestrator/test-files.mjs",
"test:ipc:classifier:openrouter": "node orchestrator/classifier-openrouter.mjs",
"test:ipc:classifier:openrouter:stress": "node orchestrator/classifier-openrouter-stress.mjs",
"test:html": "cmake --preset release && cmake --build --preset release --target test_html && .\\dist\\test_html.exe"
},
"repository": {
"type": "git",
"url": "https://git.polymech.info/polymech/mono-cpp.git"
"generate:assets": "node tests/assets/build-fixtures.mjs",
"test:media": "node orchestrator/test-media.mjs",
"test:media:rest": "node orchestrator/test-media.mjs --rest-only",
"test:media:ipc": "node orchestrator/test-media.mjs --ipc-only",
"run": ".\\dist\\media-img.exe --help",
"resize": ".\\dist\\media-img.exe resize",
"serve": ".\\dist\\media-img.exe serve --help",
"ipc": ".\\dist\\media-img.exe ipc --help",
"kbot": ".\\dist\\media-img.exe kbot --help"
},
"keywords": [],
"author": "",
"license": "ISC"
}
}

View File

@ -0,0 +1,95 @@
#include "resize.hpp"
#include <algorithm>
#include <cstdio>
#include <cstring>
#include <vector>
#define STB_IMAGE_IMPLEMENTATION
#define STB_IMAGE_WRITE_IMPLEMENTATION
#define STB_IMAGE_RESIZE_IMPLEMENTATION
#include "stb_image.h"
#include "stb_image_resize2.h"
#include "stb_image_write.h"
namespace media {
static void compute_target_size(int iw, int ih, const ResizeOptions& opt, int* ow, int* oh) {
*ow = iw;
*oh = ih;
if (opt.max_width <= 0 && opt.max_height <= 0)
return;
double scale = 1.0;
if (opt.max_width > 0 && iw > opt.max_width)
scale = std::min(scale, static_cast<double>(opt.max_width) / static_cast<double>(iw));
if (opt.max_height > 0 && ih > opt.max_height)
scale = std::min(scale, static_cast<double>(opt.max_height) / static_cast<double>(ih));
if (scale >= 1.0)
return;
*ow = std::max(1, static_cast<int>(iw * scale));
*oh = std::max(1, static_cast<int>(ih * scale));
}
bool resize_file(const std::string& input_path, const std::string& output_path,
const ResizeOptions& opt, std::string& err_out) {
int iw = 0, ih = 0, channels = 0;
unsigned char* data =
stbi_load(input_path.c_str(), &iw, &ih, &channels, 0);
if (!data) {
err_out = std::string("stbi_load failed: ") + stbi_failure_reason();
return false;
}
int ow = iw, oh = ih;
compute_target_size(iw, ih, opt, &ow, &oh);
const int out_channels = channels;
std::vector<unsigned char> out(static_cast<size_t>(ow) * static_cast<size_t>(oh) * static_cast<size_t>(out_channels));
stbir_pixel_layout layout = STBIR_RGBA;
if (channels == 1)
layout = STBIR_1CHANNEL;
else if (channels == 2)
layout = STBIR_2CHANNEL;
else if (channels == 3)
layout = STBIR_RGB;
else if (channels == 4)
layout = STBIR_RGBA;
if (!stbir_resize_uint8_srgb(data, iw, ih, 0, out.data(), ow, oh, 0, layout)) {
stbi_image_free(data);
err_out = "stbir_resize_uint8_srgb failed";
return false;
}
stbi_image_free(data);
std::string fmt = opt.format;
if (fmt.empty()) {
auto dot = output_path.rfind('.');
if (dot != std::string::npos)
fmt = output_path.substr(dot + 1);
}
for (auto& c : fmt) {
if (c >= 'A' && c <= 'Z')
c = static_cast<char>(c - 'A' + 'a');
}
int ok = 0;
if (fmt == "jpg" || fmt == "jpeg") {
ok = stbi_write_jpg(output_path.c_str(), ow, oh, out_channels, out.data(), 90);
} else {
if (fmt != "png" && !fmt.empty()) {
err_out = "warning: unsupported format '" + fmt + "', writing PNG";
}
ok = stbi_write_png(output_path.c_str(), ow, oh, out_channels, out.data(), ow * out_channels);
}
if (!ok) {
err_out = "stbi_write failed";
return false;
}
return true;
}
} // namespace media

View File

@ -0,0 +1,18 @@
#pragma once
#include <string>
namespace media {
struct ResizeOptions {
int max_width = 0; // 0 = no limit (respect max_height)
int max_height = 0; // 0 = no limit
/** "png", "jpg", "jpeg" — if empty, infer from output path extension */
std::string format;
};
/** Resize image on disk. At least one of max_width / max_height should be > 0. */
bool resize_file(const std::string& input_path, const std::string& output_path,
const ResizeOptions& opt, std::string& err_out);
} // namespace media

View File

@ -0,0 +1,63 @@
#include "serve.hpp"
#include <httplib.h>
#include <nlohmann/json.hpp>
#include <iostream>
#include "core/resize.hpp"
namespace media::http {
int run_server(const std::string& host, int port) {
httplib::Server svr;
svr.Get("/health", [](const httplib::Request&, httplib::Response& res) {
res.set_content(R"({"ok":true,"service":"media-img"})", "application/json");
});
svr.Post("/v1/resize", [](const httplib::Request& req, httplib::Response& res) {
nlohmann::json body;
try {
body = nlohmann::json::parse(req.body.empty() ? "{}" : req.body);
} catch (...) {
res.status = 400;
res.set_content(R"({"error":"invalid JSON"})", "application/json");
return;
}
if (!body.contains("input") || !body.contains("output")) {
res.status = 400;
res.set_content(R"({"error":"input and output paths required"})", "application/json");
return;
}
const std::string in = body["input"].get<std::string>();
const std::string out = body["output"].get<std::string>();
media::ResizeOptions opt;
if (body.contains("max_width"))
opt.max_width = body["max_width"].get<int>();
if (body.contains("max_height"))
opt.max_height = body["max_height"].get<int>();
if (body.contains("format") && body["format"].is_string())
opt.format = body["format"].get<std::string>();
std::string err;
if (!media::resize_file(in, out, opt, err)) {
res.status = 500;
nlohmann::json j{{"error", err}};
res.set_content(j.dump(), "application/json");
return;
}
res.set_content(R"({"ok":true})", "application/json");
});
std::cerr << "media-img HTTP listening on http://" << host << ":" << port << "\n";
if (!svr.listen(host.c_str(), port)) {
std::cerr << "listen failed\n";
return 1;
}
return 0;
}
} // namespace media::http

View File

@ -0,0 +1,10 @@
#pragma once
#include <string>
namespace media::http {
/** Blocking HTTP server (cpp-httplib). Returns 0 on clean shutdown. */
int run_server(const std::string& host, int port);
} // namespace media::http

View File

@ -0,0 +1,105 @@
#include "ipc_serve.hpp"
#include <cstdio>
#include <asio.hpp>
#include <asio/read_until.hpp>
#include <asio/write.hpp>
#include <iostream>
#include <nlohmann/json.hpp>
#include "core/resize.hpp"
namespace media::ipc {
static int handle_session(asio::ip::tcp::socket sock) {
try {
asio::streambuf buf;
asio::read_until(sock, buf, '\n');
std::istream is(&buf);
std::string line;
std::getline(is, line);
nlohmann::json j = nlohmann::json::parse(line, nullptr, false);
if (!j.is_object()) {
std::string err = R"({"ok":false,"error":"invalid json"})";
asio::write(sock, asio::buffer(err + "\n"));
return 0;
}
if (!j.contains("input") || !j.contains("output")) {
std::string err = R"({"ok":false,"error":"need input and output"})";
asio::write(sock, asio::buffer(err + "\n"));
return 0;
}
media::ResizeOptions opt;
if (j.contains("max_width"))
opt.max_width = j["max_width"].get<int>();
if (j.contains("max_height"))
opt.max_height = j["max_height"].get<int>();
if (j.contains("format") && j["format"].is_string())
opt.format = j["format"].get<std::string>();
std::string err;
bool ok = media::resize_file(j["input"].get<std::string>(), j["output"].get<std::string>(), opt, err);
nlohmann::json out = ok ? nlohmann::json{{"ok", true}} : nlohmann::json{{"ok", false}, {"error", err}};
std::string payload = out.dump() + "\n";
asio::write(sock, asio::buffer(payload));
} catch (const std::exception& e) {
try {
std::string err = std::string(R"({"ok":false,"error":")") + e.what() + "\"}\n";
asio::write(sock, asio::buffer(err));
} catch (...) {
}
}
return 0;
}
int run_tcp_server(const std::string& host, int port) {
asio::io_context io;
asio::ip::tcp::acceptor acc(io, asio::ip::tcp::endpoint(asio::ip::make_address(host), static_cast<unsigned short>(port)));
std::cerr << "media-img IPC (TCP) " << host << ":" << port << "\n";
for (;;) {
asio::ip::tcp::socket sock(io);
acc.accept(sock);
handle_session(std::move(sock));
}
}
#if !defined(_WIN32)
#include <asio/local/stream_protocol.hpp>
#include <unistd.h>
int run_unix_server(const std::string& path) {
::unlink(path.c_str());
asio::io_context io;
asio::local::stream_protocol::acceptor acc(io, asio::local::stream_protocol::endpoint(path));
std::cerr << "media-img IPC (unix) " << path << "\n";
for (;;) {
asio::local::stream_protocol::socket sock(io);
acc.accept(sock);
// reuse same JSON line protocol over stream socket
asio::streambuf buf;
asio::read_until(sock, buf, '\n');
std::istream is(&buf);
std::string line;
std::getline(is, line);
nlohmann::json j = nlohmann::json::parse(line, nullptr, false);
if (!j.is_object()) {
asio::write(sock, asio::buffer(std::string(R"({"ok":false})") + "\n"));
continue;
}
media::ResizeOptions opt;
if (j.contains("max_width"))
opt.max_width = j["max_width"].get<int>();
if (j.contains("max_height"))
opt.max_height = j["max_height"].get<int>();
if (j.contains("format") && j["format"].is_string())
opt.format = j["format"].get<std::string>();
std::string err;
bool ok = media::resize_file(j["input"].get<std::string>(), j["output"].get<std::string>(), opt, err);
nlohmann::json out = ok ? nlohmann::json{{"ok", true}} : nlohmann::json{{"ok", false}, {"error", err}};
asio::write(sock, asio::buffer(out.dump() + "\n"));
}
}
#endif
} // namespace media::ipc

View File

@ -0,0 +1,15 @@
#pragma once
#include <string>
namespace media::ipc {
/** TCP JSON line server for resize jobs (cross-platform). Returns 0 on error exit 0. */
int run_tcp_server(const std::string& host, int port);
#if !defined(_WIN32)
/** Unix domain socket (same JSON protocol). */
int run_unix_server(const std::string& path);
#endif
} // namespace media::ipc

View File

@ -1,271 +1,130 @@
#include <cstdlib>
#include <iostream>
#include <fstream>
#include <string>
#include <vector>
#include <laserpants/dotenv/dotenv.h>
#include <chrono>
#include <set>
#include <ctime>
#include <iomanip>
#include <sstream>
#include <rapidjson/document.h>
#include <CLI/CLI.hpp>
#include <toml++/toml.hpp>
#include "html/html.h"
#include "http/http.h"
#include "ipc/ipc.h"
#include "logger/logger.h"
#include "postgres/postgres.h"
#include "json/json.h"
#include "cmd_kbot.h"
#ifndef PROJECT_VERSION
#define PROJECT_VERSION "0.1.0"
#ifdef _WIN32
#include <process.h>
#else
#include <unistd.h>
#endif
int main(int argc, char *argv[]) {
/* Optional .env next to cwd — do not override variables already set in the shell. */
dotenv::init(dotenv::Preserve);
#include "core/resize.hpp"
#include "http/serve.hpp"
#include "ipc/ipc_serve.hpp"
CLI::App app{"kbot — KBot C++ CLI", "kbot"};
app.set_version_flag("-v,--version", PROJECT_VERSION);
#ifndef MEDIA_IMG_VERSION
#define MEDIA_IMG_VERSION "0.1.0"
#endif
std::string log_level = "info";
app.add_option("--log-level", log_level, "Set log level (debug/info/warn/error)")->default_val("info");
// Subcommand: parse HTML
std::string html_input;
auto *parse_cmd = app.add_subcommand("parse", "Parse HTML and list elements");
parse_cmd->add_option("html", html_input, "HTML string to parse")->required();
// Subcommand: select from HTML
std::string select_input;
std::string selector;
auto *select_cmd =
app.add_subcommand("select", "CSS-select elements from HTML");
select_cmd->add_option("html", select_input, "HTML string")->required();
select_cmd->add_option("selector", selector, "CSS selector")->required();
// Subcommand: config — read a TOML file
std::string config_path;
auto *config_cmd =
app.add_subcommand("config", "Read and display a TOML config file");
config_cmd->add_option("file", config_path, "Path to TOML file")->required();
// Subcommand: fetch — HTTP GET a URL
std::string fetch_url;
auto *fetch_cmd =
app.add_subcommand("fetch", "HTTP GET a URL and print the response");
fetch_cmd->add_option("url", fetch_url, "URL to fetch")->required();
// Subcommand: json — prettify JSON
std::string json_input;
auto *json_cmd = app.add_subcommand("json", "Prettify a JSON string");
json_cmd->add_option("input", json_input, "JSON string")->required();
// Subcommand: db — connect to Supabase and query
std::string db_config_path = "config/postgres.toml";
std::string db_table;
int db_limit = 10;
auto *db_cmd =
app.add_subcommand("db", "Connect to Supabase and query a table");
db_cmd->add_option("-c,--config", db_config_path, "TOML config path")
->default_val("config/postgres.toml");
db_cmd->add_option("table", db_table, "Table to query (optional)");
db_cmd->add_option("-l,--limit", db_limit, "Row limit")->default_val(10);
// Subcommand: worker — IPC mode (spawned by Node.js orchestrator)
std::string uds_path;
auto *worker_cmd = app.add_subcommand(
"worker", "Run as IPC worker (stdin/stdout length-prefixed JSON)");
worker_cmd->add_option("--uds", uds_path,
"Listen on TCP port (Windows) or Unix socket path");
// Subcommand: kbot — AI workflows & task configurations
auto* kbot_cmd = polymech::setup_cmd_kbot(app);
(void)kbot_cmd;
CLI11_PARSE(app, argc, argv);
// Worker mode uses stderr for logs to keep stdout clean for IPC frames
if (worker_cmd->parsed()) {
if (!uds_path.empty()) {
logger::init_uds("polymech-uds", log_level, "../logs/uds.json");
} else {
logger::init_stderr("polymech-worker", log_level);
static int forward_kbot(const std::vector<std::string> &args) {
const char *exe = std::getenv("KBOT_EXE");
if (exe == nullptr || exe[0] == '\0') {
std::cerr
<< "KBOT_EXE is not set. Set it to the kbot binary path (e.g. packages/kbot/cpp/dist/kbot.exe).\n";
return 1;
}
} else {
logger::init("polymech-cli", log_level);
}
// ── worker mode ─────────────────────────────────────────────────────────
if (worker_cmd->parsed()) {
logger::info("Worker mode: listening on stdin");
if (!uds_path.empty()) {
logger::info("Worker mode: UDS Server active on " + uds_path);
int rc = polymech::run_cmd_kbot_uds(uds_path);
return rc;
std::vector<std::string> storage;
storage.emplace_back(exe);
storage.insert(storage.end(), args.begin(), args.end());
std::vector<char *> argv;
for (auto &s : storage) {
argv.push_back(s.data());
}
// Send a "ready" message so the orchestrator knows we're alive
ipc::write_message({"0", "ready", "{}"});
while (true) {
ipc::Message req;
if (!ipc::read_message(req)) {
logger::info("Worker: stdin closed, exiting");
break;
}
logger::debug("Worker recv: type=" + req.type + " id=" + req.id);
if (req.type == "ping") {
ipc::write_message({req.id, "pong", "{}"});
} else if (req.type == "job") {
// Stub: echo the payload back as job_result
ipc::write_message({req.id, "job_result", req.payload});
} else if (req.type == "kbot-ai") {
logger::info("Worker: kbot-ai job received");
std::string req_id = req.id;
polymech::kbot::KBotCallbacks cb;
cb.onEvent = [&req_id](const std::string& type, const std::string& json) {
if (type == "job_result") {
ipc::write_message({req_id, "job_result", json});
} else {
ipc::write_message({"0", type, json});
}
};
int rc = polymech::run_kbot_ai_ipc(req.payload, req.id, cb);
if (rc != 0) {
ipc::write_message({req.id, "error", "{\"message\":\"kbot ai pipeline failed\"}"});
}
} else if (req.type == "kbot-run") {
logger::info("Worker: kbot-run job received");
std::string req_id = req.id;
polymech::kbot::KBotCallbacks cb;
cb.onEvent = [&req_id](const std::string& type, const std::string& json) {
if (type == "job_result") {
ipc::write_message({req_id, "job_result", json});
} else {
ipc::write_message({"0", type, json});
}
};
int rc = polymech::run_kbot_run_ipc(req.payload, req.id, cb);
if (rc != 0) {
ipc::write_message({req.id, "error", "{\"message\":\"kbot run pipeline failed\"}"});
}
} else if (req.type == "shutdown") {
ipc::write_message({req.id, "shutdown_ack", "{}"});
logger::info("Worker: shutdown requested, exiting");
break;
} else {
// Unknown type — respond with error
ipc::write_message(
{req.id, "error",
"{\"message\":\"unknown type: " + req.type + "\"}"});
}
argv.push_back(nullptr);
#ifdef _WIN32
const int rc = _spawnvp(_P_WAIT, exe, argv.data());
if (rc < 0) {
perror("_spawnvp");
return 1;
}
return 0;
}
// ── existing subcommands ────────────────────────────────────────────────
if (parse_cmd->parsed()) {
auto elements = html::parse(html_input);
logger::info("Parsed " + std::to_string(elements.size()) + " elements");
for (const auto &el : elements) {
std::cout << "<" << el.tag << "> " << el.text << "\n";
}
return 0;
}
if (select_cmd->parsed()) {
auto matches = html::select(select_input, selector);
logger::info("Matched " + std::to_string(matches.size()) + " elements");
for (const auto &m : matches) {
std::cout << m << "\n";
}
return 0;
}
if (config_cmd->parsed()) {
try {
auto tbl = toml::parse_file(config_path);
logger::info("Loaded config: " + config_path);
std::cout << tbl << "\n";
} catch (const toml::parse_error &err) {
logger::error("TOML parse error: " + std::string(err.what()));
return 1;
}
return 0;
}
if (fetch_cmd->parsed()) {
auto resp = http::get(fetch_url);
logger::info("HTTP " + std::to_string(resp.status_code) + " from " +
fetch_url);
if (json::is_valid(resp.body)) {
std::cout << json::prettify(resp.body) << "\n";
} else {
std::cout << resp.body << "\n";
}
return 0;
}
if (json_cmd->parsed()) {
if (!json::is_valid(json_input)) {
logger::error("Invalid JSON input");
return 1;
}
std::cout << json::prettify(json_input) << "\n";
return 0;
}
if (db_cmd->parsed()) {
try {
auto cfg = toml::parse_file(db_config_path);
postgres::Config pg_cfg;
pg_cfg.supabase_url = cfg["supabase"]["url"].value_or(std::string(""));
pg_cfg.supabase_key =
cfg["supabase"]["publishable_key"].value_or(std::string(""));
postgres::init(pg_cfg);
auto status = postgres::ping();
logger::info("Supabase: " + status);
if (!db_table.empty()) {
auto result = postgres::query(db_table, "*", "", db_limit);
if (json::is_valid(result)) {
std::cout << json::prettify(result) << "\n";
} else {
std::cout << result << "\n";
}
}
} catch (const std::exception &e) {
logger::error(std::string("db error: ") + e.what());
return 1;
}
return 0;
}
// ── kbot subcommand ──────────────────────────────────────────────────
if (polymech::is_kbot_ai_parsed()) {
return polymech::run_cmd_kbot_ai();
}
if (polymech::is_kbot_run_parsed()) {
return polymech::run_cmd_kbot_run();
}
// No subcommand — show help
std::cout << app.help() << "\n";
return 0;
return rc;
#else
execvp(exe, argv.data());
perror("execvp");
return 127;
#endif
}
int main(int argc, char **argv) {
dotenv::init(dotenv::Preserve);
CLI::App app{"media-img — resize (CLI), serve (REST), ipc (JSON lines)", "media-img"};
app.set_version_flag("-v,--version", std::string(MEDIA_IMG_VERSION));
app.require_subcommand(1);
std::string in_path;
std::string out_path;
int max_w = 0;
int max_h = 0;
std::string format;
auto *resize_cmd = app.add_subcommand("resize", "Resize an image file");
resize_cmd->add_option("input", in_path, "Input image path")->required();
resize_cmd->add_option("output", out_path, "Output image path")->required();
resize_cmd->add_option("--max-width", max_w, "Max width (0 = no limit)");
resize_cmd->add_option("--max-height", max_h, "Max height (0 = no limit)");
resize_cmd->add_option("--format", format, "Output format: png, jpg, jpeg (default: from extension)");
std::string host = "127.0.0.1";
int port = 8080;
auto *serve_cmd = app.add_subcommand("serve", "Run HTTP REST server");
serve_cmd->add_option("--host", host, "Bind address")->default_val("127.0.0.1");
serve_cmd->add_option("-p,--port", port, "TCP port")->default_val(8080);
std::string ipc_host = "127.0.0.1";
int ipc_port = 9333;
std::string ipc_unix;
auto *ipc_cmd = app.add_subcommand("ipc", "Run JSON-line IPC server (TCP; Unix socket on non-Windows)");
ipc_cmd->add_option("--host", ipc_host, "Bind address")->default_val("127.0.0.1");
ipc_cmd->add_option("-p,--port", ipc_port, "TCP port")->default_val(9333);
ipc_cmd->add_option("--unix", ipc_unix, "Unix domain socket path (not Windows)");
auto *kbot_cmd = app.add_subcommand("kbot", "Forward remaining args to kbot (KBOT_EXE)");
kbot_cmd->allow_extras(true);
CLI11_PARSE(app, argc, argv);
if (resize_cmd->parsed()) {
media::ResizeOptions opt;
opt.max_width = max_w;
opt.max_height = max_h;
opt.format = format;
std::string err;
if (!media::resize_file(in_path, out_path, opt, err)) {
std::cerr << err << "\n";
return 1;
}
return 0;
}
if (serve_cmd->parsed()) {
return media::http::run_server(host, port);
}
if (ipc_cmd->parsed()) {
#if defined(_WIN32)
if (!ipc_unix.empty()) {
std::cerr << "media-img: --unix is not supported on Windows; use --host and --port.\n";
return 1;
}
return media::ipc::run_tcp_server(ipc_host, ipc_port);
#else
if (!ipc_unix.empty()) {
return media::ipc::run_unix_server(ipc_unix);
}
return media::ipc::run_tcp_server(ipc_host, ipc_port);
#endif
}
if (kbot_cmd->parsed()) {
return forward_kbot(kbot_cmd->remaining());
}
std::cout << app.help() << "\n";
return 0;
}

View File

@ -0,0 +1,30 @@
# Test image fixtures
## Generated PNGs (`build-fixtures.mjs`)
PNG files can be **regenerated** anytime (no extra npm dependencies):
```bash
# from packages/media/cpp
npm run generate:assets
# or
node tests/assets/build-fixtures.mjs
```
| File | Size | Role |
|------|------|------|
| `tiny-1x1.png` | 1×1 | Minimal decode/encode |
| `tiny-8x8.png` | 8×8 | Small resize |
| `square-64.png` | 64×64 | Gradient-ish pattern (used by `npm run test:media`) |
| `wide-320x80.png` | 320×80 | Wide aspect |
| `tall-80x320.png` | 80×320 | Tall aspect |
| `mid-256x256.png` | 256×256 | Busier pattern |
| `photo-ish-640x360.png` | 640×360 | Larger image |
| `stripes-512x64.png` | 512×64 | Horizontal bands |
| `checker-128x128.png` | 128×128 | Checkerboard (used by IPC Unix test on Linux/macOS) |
All are **RGB 8-bit** PNG (no alpha). **`media-img` uses STB**, not libvips.
## Other folders
Subdirectories such as `in/`, `out/`, `in_webp/`, `out_webp/`, `out_jpg/`, `watermark-*`, and `bg-removed/` hold extra samples for manual or future automated tests. They are independent of `build-fixtures.mjs`.

Binary file not shown.

After

Width:  |  Height:  |  Size: 633 KiB

View File

@ -0,0 +1,123 @@
/**
* Generate PNG RGB fixtures for media-img tests (no extra npm deps).
* Run: node tests/assets/build-fixtures.mjs
*/
import { writeFileSync, mkdirSync } from 'node:fs';
import { dirname, join } from 'node:path';
import { fileURLToPath } from 'node:url';
import { deflateSync } from 'node:zlib';
const __dirname = dirname(fileURLToPath(import.meta.url));
function crc32(buf) {
let c = 0xffffffff;
for (let i = 0; i < buf.length; i++) {
c ^= buf[i];
for (let j = 0; j < 8; j++) {
c = c & 1 ? 0xedb88320 ^ (c >>> 1) : c >>> 1;
}
}
return (c ^ 0xffffffff) >>> 0;
}
function chunk(type, data) {
const len = Buffer.alloc(4);
len.writeUInt32BE(data.length, 0);
const typeBuf = Buffer.from(type, 'ascii');
const crcInput = Buffer.concat([typeBuf, data]);
const c = crc32(crcInput);
const crcBuf = Buffer.alloc(4);
crcBuf.writeUInt32BE(c >>> 0, 0);
return Buffer.concat([len, typeBuf, data, crcBuf]);
}
function ihdr(w, h) {
const b = Buffer.alloc(13);
b.writeUInt32BE(w, 0);
b.writeUInt32BE(h, 4);
b[8] = 8;
b[9] = 2;
b[10] = 0;
b[11] = 0;
b[12] = 0;
return b;
}
/** Raw RGB scanlines: filter 0 + width*3 bytes per row */
function rawRgb(w, h, pixel) {
const row = 1 + w * 3;
const buf = Buffer.alloc(row * h);
const fn =
typeof pixel === 'function'
? pixel
: (x, y) => {
const p = pixel;
return [p[0], p[1], p[2]];
};
for (let y = 0; y < h; y++) {
const off = y * row;
buf[off] = 0;
for (let x = 0; x < w; x++) {
const [r, g, b] = fn(x, y);
const p = off + 1 + x * 3;
buf[p] = r;
buf[p + 1] = g;
buf[p + 2] = b;
}
}
return buf;
}
function encodePng(w, h, pixel) {
const raw = rawRgb(w, h, pixel);
const idat = deflateSync(raw);
const sig = Buffer.from([137, 80, 78, 71, 13, 10, 26, 10]);
return Buffer.concat([sig, chunk('IHDR', ihdr(w, h)), chunk('IDAT', idat), chunk('IEND', Buffer.alloc(0))]);
}
function writeFixture(name, w, h, pixel) {
const p = join(__dirname, name);
writeFileSync(p, encodePng(w, h, pixel));
console.log(`wrote ${name} (${w}x${h})`);
}
mkdirSync(__dirname, { recursive: true });
writeFixture('tiny-1x1.png', 1, 1, [255, 0, 0]);
writeFixture('tiny-8x8.png', 8, 8, [40, 80, 160]);
writeFixture('square-64.png', 64, 64, (x, y) => {
const v = ((x + y) * 4) & 255;
return [v, 128, 255 - v];
});
writeFixture('wide-320x80.png', 320, 80, (x) => {
const r = (x * 255) / 319;
return [r & 255, 90, 200];
});
writeFixture('tall-80x320.png', 80, 320, (x, y) => {
const g = (y * 255) / 319;
return [30, g & 255, 100];
});
writeFixture('mid-256x256.png', 256, 256, (x, y) => {
const cx = x - 128;
const cy = y - 128;
const d = Math.sqrt(cx * cx + cy * cy);
const v = Math.min(255, Math.floor(d));
return [v, 255 - v, (x + y) & 255];
});
writeFixture('photo-ish-640x360.png', 640, 360, (x, y) => {
return [
(x * 31) & 255,
(y * 17 + x) & 255,
((x + y) * 13) & 255,
];
});
writeFixture('stripes-512x64.png', 512, 64, (x) => {
const band = Math.floor(x / 32) % 3;
return band === 0 ? [220, 40, 40] : band === 1 ? [40, 200, 60] : [40, 80, 220];
});
writeFixture('checker-128x128.png', 128, 128, (x, y) => {
const c = (Math.floor(x / 16) + Math.floor(y / 16)) % 2 === 0 ? 240 : 20;
return [c, c, c];
});
console.log('done.');

Binary file not shown.

After

Width:  |  Height:  |  Size: 455 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 266 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 292 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 272 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 259 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 275 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 285 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 361 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 398 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 136 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 304 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 135 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 948 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 259 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 284 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 274 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 609 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 292 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 389 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 336 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 565 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 771 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 480 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 140 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 221 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 169 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 662 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 380 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 224 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 171 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 232 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 182 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 243 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 179 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 292 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 223 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 310 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 237 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 82 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 697 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 516 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 252 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 275 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 307 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 585 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 350 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 488 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 568 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 286 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 567 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 298 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 839 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 372 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 98 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 710 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 609 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 292 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 389 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 336 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 565 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 771 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 480 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 275 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 551 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 629 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1005 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 69 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 74 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 649 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 243 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 252 KiB

Some files were not shown because too many files have changed in this diff Show More