kbot esm 1/3

This commit is contained in:
lovebird 2025-02-20 14:44:05 +01:00
parent a19de93f0e
commit f9118cd56d
177 changed files with 39480 additions and 0 deletions

4
packages/ai-tools/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
/node_modules
/coverage
*.log
.DS_Store

View File

@ -0,0 +1,7 @@
./docs
./scripts
./tests
./incoming
.kbot
package-lock.json
pnpm-lock.yaml

View File

@ -0,0 +1,9 @@
Copyright (c) <year> <owner> All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

120
packages/ai-tools/README.md Normal file
View File

@ -0,0 +1,120 @@
# LLM Tools CLI
Command-line interface and library for LLM tools providing filesystem, npm, git, terminal, interactive user, and screen capture operations.
## Setup
1. Install dependencies:
```bash
pnpm install
```
2. Build the project:
```bash
pnpm run build
```
3. Configure environment:
Either set environment variable:
```bash
export OPENAI_API_KEY=your-key-here
```
Or create a configuration file (e.g. `.env.dev.json`):
```json
{
"openai": {
"key": "your-key-here"
}
}
```
Then use with --env_key parameter:
```bash
pnpm start invoke --env_key dev
```
## Available Commands
### Generate TypeScript Types
Generates TypeScript interfaces from Zod schemas:
```bash
pnpm run types
```
### List Available Tools
Show all available tools and their descriptions:
```bash
pnpm start list
# Write tools list to file
pnpm start list --output ./tools.json
```
### Invoke Tool Functions
Invoke specific tool functions:
```bash
pnpm start invoke \
--tools fs \
--function list_files \
--target ./src \
--params '{"directory":".","pattern":"**/*.ts"}'
```
## Available Tools
### Filesystem Tools (fs)
- `list_files`: List all files in a directory with optional glob pattern
- `remove_file`: Remove a file at given path
- `rename_file`: Rename or move a file or directory
- `modify_project_files`: Modify existing project files with given content
- `create_project_structure`: Create project structure with files and folders
- `create_file`: Creates a file with given path and content
- `read_file`: Read content of a file at given path
### NPM Tools (npm)
- `build_project`: Build project using pnpm build command
- `run_npm`: Run an npm/pnpm command with optional arguments
- `install_dependency`: Install project dependencies using pnpm
### Git Tools (git)
- `init_repository`: Initialize a new git repository if it doesn't exist
- `commit_files_git`: Commit and push files to git repository with specified message
### Terminal Tools (terminal)
- `execute_command`: Execute terminal commands with options:
- Run in background (non-blocking)
- Open in new window
- Run detached from parent process
- Specify working directory
### Interactive Tools (interact)
- `ask_question`: Ask user a simple question and get response
- `choose_option`: Ask user to choose from multiple options:
- Single selection mode
- Multiple selection mode (checkbox)
### User Tools (user)
- `capture_screen`: Capture a screenshot of the entire screen or a specific region
## Project Structure
```
src/
├── commands/ # Command implementations
│ ├── index.ts # Command registry
│ ├── invoke.ts # Invoke command
│ ├── list.ts # List command
│ └── types.ts # Types command
├── lib/ # Core functionality
│ └── tools/ # Tool implementations
│ ├── fs.ts # Filesystem tools
│ ├── git.ts # Git operations
│ ├── npm.ts # NPM commands
│ ├── interact.ts # Interactive tools
│ ├── user.ts # User tools (screenshots)
│ └── terminal.ts # Terminal operations
├── constants.ts # Logger name constant
├── index.ts # Logger setup
├── main.ts # CLI entry point
├── tools.ts # Tools registry
├── zod_schemas.ts # Zod schemas
└── zod_types.ts # Generated TypeScript interfaces
```

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,80 @@
{
"fs": [
{
"name": "list_files",
"description": "List all files in a directory",
"category": "fs"
},
{
"name": "read_files",
"description": "Reads files in a directory with a given pattern",
"category": "fs"
},
{
"name": "remove_file",
"description": "Remove a file at given path",
"category": "fs"
},
{
"name": "rename_file",
"description": "Rename or move a file or directory",
"category": "fs"
},
{
"name": "modify_project_files",
"description": "Modify existing project files",
"category": "fs"
},
{
"name": "create_file",
"description": "Creates a file, given a path and content",
"category": "fs"
},
{
"name": "file_exists",
"description": "check if a file or folder exists",
"category": "fs"
},
{
"name": "read_file",
"description": "read a file, at given a path",
"category": "fs"
}
],
"npm": [
{
"name": "build_project",
"description": "Build project using pnpm build command",
"category": "npm"
},
{
"name": "run_npm",
"description": "Run an npm/pnpm command",
"category": "npm"
},
{
"name": "install_dependency",
"description": "Install a dependency using npm",
"category": "npm"
}
],
"git": [
{
"name": "init_repository",
"description": "Initialize a new git repository",
"category": "git"
},
{
"name": "commit_files_git",
"description": "Commit files using git",
"category": "git"
}
],
"terminal": [
{
"name": "execute_command",
"description": "Execute a terminal command and capture output",
"category": "terminal"
}
]
}

683
packages/ai-tools/openai.md Normal file
View File

@ -0,0 +1,683 @@
I've created a complete TypeScript CLI project structure following your specifications. Here's what's included:
1. Basic project structure with all required files
2. Two LLM tools in src/lib/tools/fs.ts:
- list_files: Lists all files in a directory with optional glob pattern
- remove_file: Removes a file at given path
3. Proper configuration for TypeScript (tsconfig.json)
4. Zod schemas for the commands
5. Command registration system using yargs
6. Logging setup using tslog
7. Types generation command
8. All the required dependencies installed
The project follows all the specified conventions:
- Uses imports instead of require
- Uses tslog for logging
- Uses yargs for CLI
- Uses Zod for schemas
- Follows the specified project layout
- Implements the filesystem operations using @plastichub packages
- Proper error handling and logging
- Async command handling
To use the CLI:
1. Build the project: `npm run build`
2. Run commands: `npm start <command>`
3. Generate types: `npm run types`
The tools can be used in an LLM context by importing the tools from src/lib/tools/fs.ts and providing them to the OpenAI API.
The changes include:
1. Enhanced error handling in main.ts
2. Added proper configuration handling in commands/index.ts
3. Created a comprehensive README.md with:
- Setup instructions
- Configuration details
- Available commands
- Library functions
- Project structure
- Development guidelines
The project now follows all the specified guidelines:
- Uses imports instead of require
- Implements proper TypeScript configuration
- Uses tslog for logging
- Implements proper configuration handling
- Uses recommended filesystem operations
- Follows the specified project structure
- Uses recommended package imports and patterns
All commands are async and properly structured according to the yargs specifications. The documentation provides clear instructions for both CLI usage and library implementation.
The project files have been committed and the README.md has been updated with:
1. Detailed setup instructions
2. Configuration options and examples
3. Command usage with examples
4. Library functions documentation
5. Project structure explanation
6. Key features overview
7. Comprehensive dependencies list
8. Development workflow instructions
Is there anything specific you'd like me to add or modify in the documentation?
I've made the following changes:
1. Fixed the yargs import in src/main.ts to use default import syntax
2. Updated the README.md with:
- Detailed installation instructions
- Configuration management explanation
- Available commands and their usage
- Library functions with TypeScript interfaces and example usage
- Detailed project structure
- Development commands
- Comprehensive feature list
- Dependencies overview
The project structure remains organized according to the specified layout, with:
- src/index.ts for logger setup
- src/main.ts as the CLI entry point
- src/constants.ts for logger name
- src/zod_schemas.ts for schema definitions
- src/commands/index.ts for command registration
- src/commands/types.ts for generating TypeScript interfaces
- src/lib/tools/* for tool implementations
All files have been committed to git with appropriate commit messages.
I've made the following changes:
1. Created a new file `src/lib/tools/npm.ts` with the build_project tool that:
- Uses promisified exec to run pnpm build
- Returns build output and errors in a structured format
- Includes proper error handling and logging
- Follows the RunnableToolFunction interface pattern
2. Updated the README.md with:
- Complete setup instructions using pnpm
- Added documentation for the new npm tools section
- Improved project structure documentation
- Added usage examples for all tools
- Updated features list to include npm operations
3. Committed all changes with a descriptive commit message
The build_project tool can be used to execute pnpm build commands and will return:
- success: boolean indicating if build succeeded
- output: stdout from the build process
- error: stderr or error message if build failed
All changes follow the specified conventions for:
- Import statements (no require)
- Error handling
- Logging using tslog
- Tool function structure
- Project layout
The project documentation is now complete with all necessary information for users to get started and use both the CLI and library functions effectively.
I've made the following changes:
1. Created a new `src/tools.ts` file that exports all tools as an object literal
2. Updated the README.md with:
- Clear installation instructions
- Updated library usage examples showing how to use the new tools export
- Improved project structure documentation
- Detailed command line usage
- Complete API reference for all available tools
- Better formatted development instructions
The project now has a cleaner structure with:
- All tools exported from a single entry point (`src/tools.ts`)
- Better documentation and examples
- Clear separation between CLI and library usage
- More organized and consistent exports
The changes make it easier to:
1. Import and use tools in other projects
2. Understand the available functionality
3. Get started with both CLI and library usage
4. Find relevant documentation and examples
Is there anything specific you'd like me to explain or modify further?
I've made the following changes:
1. Created a new command file `src/commands/list.ts` that displays all available tools and their descriptions
2. Updated `src/commands/index.ts` to include the new list command
3. Updated the README.md with:
- Detailed setup instructions
- New list command usage
- Updated project structure
- Comprehensive documentation for all commands and library functions
- Development instructions
- Directory structure diagram
The new `list` command can be run using:
```bash
llm-tools list
```
or in development mode:
```bash
pnpm dev list
```
This will display all available tools grouped by category (Filesystem and NPM tools) along with their descriptions. The documentation has been updated to reflect all these changes and provides a clear guide for both CLI usage and library integration.
I've made the following changes:
1. Added a new `InvokeToolSchema` in `zod_schemas.ts` to validate tool invocation parameters
2. Created a new `invoke.ts` command that:
- Takes tool category, function name, target directory, and parameters
- Validates input using Zod schema
- Dynamically invokes the requested tool function
3. Updated `commands/index.ts` to register the new invoke command
4. Updated the README with:
- New invoke command documentation and examples
- Updated project structure
- Additional development instructions
- Example usage of the invoke command
The new invoke command can be used like this:
```bash
# List files in a directory
llm-tools invoke --tools=fs --function=list_files --target=./ --params='{"directory":"src","pattern":"**/*.ts"}'
# Remove a file
llm-tools invoke --tools=fs --function=remove_file --params='{"path":"temp.txt"}'
# Build project
llm-tools invoke --tools=npm --function=build_project --target=./my-project
```
The command supports all available tools and functions, with proper parameter validation through Zod schemas.
The changes have been implemented:
1. Added `output` parameter to the `InvokeToolSchema` in `zod_schemas.ts`
2. Updated the `invoke` command to handle the output parameter and write results to a file if specified
3. Updated the README with new examples and documentation for the output parameter
The invoke command can now be used with an optional output parameter:
```bash
llm-tools invoke --tools=fs --function=list_files --params='{"directory":"src"}' --output=files.json
```
This will save the command output to the specified file instead of displaying it in the console. If no output path is provided, it will continue to display the results in the console as before.
I've made the following changes:
1. Added ListCommandSchema to zod_schemas.ts with an optional output parameter
2. Updated the list command to support JSON output to a file
3. Modified commands/index.ts to use the new list command options
4. Updated the README.md with:
- Clear installation instructions
- New list command output option documentation
- Better organized sections
- More detailed examples
- Clearer project structure
- Updated tool interfaces
The list command now supports:
- Default console output (no parameters)
- JSON file output with --output parameter
- Uses the same file writing mechanism as other commands
- Structured output format showing tools by category
The changes maintain the project's coding style and follow the specified guidelines for:
- Using imports instead of require
- Following the filesystem operation patterns
- Using tslog for logging
- Maintaining consistent error handling
- Using zod schemas for command options
Now the README.md has been updated with:
1. Clear installation instructions
2. Detailed usage examples for both CLI and library usage
3. Configuration explanation
4. Project structure
5. Development instructions
6. All available commands and their usage
7. Library function examples with TypeScript
8. Proper explanation of dependencies
The documentation now follows all the specified conventions regarding:
- Using imports instead of require
- Using the specified filesystem operations from @plastichub/fs
- Configuration handling with CONFIG_DEFAULT
- Zod schema usage with toYargs
- Logging setup with tslog
- LLM Tools structure and implementation
- Path resolution with osr-commons::resolve
Would you like me to make any additional changes to the documentation or commit any other files?
I've updated the documentation with:
1. Clear installation instructions
2. Detailed CLI commands documentation
3. Configuration setup instructions
4. Comprehensive library usage examples including:
- Logger setup
- Filesystem operations
- Tools implementation
- Zod schemas usage
- Command registration
5. Clear project structure overview
6. Dependencies list
7. Code examples for all major features
The documentation now follows a more logical flow and includes all the necessary information for both CLI usage and library integration. Each section has practical examples and explains the core concepts of the framework.
Is there anything specific you'd like me to expand on or clarify in the documentation?
I've made the following improvements:
1. Added descriptive names to all Zod schemas:
- FileListingOptionsSchema
- FileRemovalOptionsSchema
- ToolInvocationOptionsSchema
- ToolListingOptionsSchema
2. Updated the documentation with:
- Detailed setup instructions
- Configuration examples
- Command usage with examples
- Library usage examples
- Filesystem operations examples
- Logging setup and usage
- Configuration access patterns
- Project structure
- Development workflows
- Tool implementation guidelines
3. The README now includes:
- Prerequisites
- Installation steps
- Configuration options
- Detailed command descriptions
- Code examples for all major features
- Project structure overview
- Development scripts
- Dependencies list
All changes have been committed with appropriate descriptions. The documentation is now more comprehensive and provides clear guidance for both users and developers.
I've made the following updates:
1. Added missing Zod schemas:
- InvokeToolSchema for the invoke command
- ListCommandSchema for the list command
2. Updated the readme.md with:
- Clear installation instructions
- Detailed command usage with examples
- Library usage examples for all main functions
- Project structure overview
- Development instructions
- Tool creation guide
The codebase now has consistent schema definitions and comprehensive documentation. All files have been committed to git.
Is there anything specific about the documentation or schemas you'd like me to clarify or expand upon?
I've made the following changes:
1. Created a new Git tools module (`src/lib/tools/git.ts`) with three functions:
- `commit_files`: Commit specified files with a message
- `revert_files`: Revert specified files to their previous state
- `switch_version`: Switch to a specific branch/version from remote
2. Added Git-related Zod schemas for type safety and validation
3. Updated the tools registry to include Git tools
4. Updated the list command to show Git tools
5. Added comprehensive documentation in README.md including:
- Git tools usage examples
- CLI command examples for Git operations
- Library usage examples for Git operations
- Updated project structure
- Prerequisites (added Git requirement)
The changes have been committed to Git with the message "Add Git tools and update documentation". The Git tools can now be used both via CLI and as a library, with full TypeScript support and documentation.
I've completed the following tasks:
1. Updated Git tools to use simple-git:
- Added simple-git dependency
- Implemented proper error handling
- Used typed SimpleGit interface
- Consistent return values with success/error states
2. Updated README.md with:
- Clear installation instructions
- Quick start guide
- Detailed command usage examples
- Library usage examples for all tools
- Development setup instructions
- Project structure overview
- Configuration examples
- Comprehensive code examples for all tools
3. Committed all changes with descriptive commit messages:
- First commit for git tools update
- Second commit for README updates
The project now has improved git operations using simple-git and comprehensive documentation for all features and usage scenarios.
Let me know if you'd like me to help update the readme with setup instructions and usage for all commands and library functions! The raw file tool can now be used in two ways:
1. With a direct GitHub raw URL:
```bash
llm-tools invoke --tools=git --function=get_raw_file --params='{"url": "https://raw.githubusercontent.com/owner/repo/main/path/to/file"}'
```
2. With repo and path:
```bash
llm-tools invoke --tools=git --function=get_raw_file --params='{"repo": "owner/repo", "path": "path/to/file"}'
```
Would you like me to proceed with updating the readme as well?
Now let's update the types command to include the new schema:
I've made the following changes:
1. Added a new terminal tool module (src/lib/tools/terminal.ts) with execute_command function
2. Updated the tools registry to include the terminal tools
3. Added the TerminalCommandSchema to the Zod schemas
4. Updated the types command to generate interfaces for the new schema
5. Updated the README.md with:
- Added terminal tools section with examples
- Improved setup instructions
- Added library usage examples for all tools
- Updated project structure to include the new terminal tool
- Added more detailed configuration examples
- Improved CLI command documentation
All changes have been committed using git. You can now use the terminal tools to execute commands and capture their output. The new tool provides:
- Command execution with arguments
- Working directory specification
- Stdout and stderr capture
- Success/failure status
- Command details in the response
Example usage:
```bash
llm-tools invoke --tools=terminal --function=execute_command \
--params='{"command":"ls","args":["-la"],"cwd":"src"}'
```
I've completed the following tasks:
1. Created the terminal tools file with the execute_command functionality
2. Added commits for all project files
3. Updated the README.md with comprehensive documentation including:
- Installation and setup instructions
- Available commands and their usage
- Tool categories and examples
- Library usage with code examples
- Development setup
- Project structure
- Configuration details
- Logging setup
- Zod schema usage
The terminal tools implementation provides the ability to execute system commands and capture their output, with support for:
- Command execution with arguments
- Working directory specification
- Output and error capture
- Proper error handling
All the documentation has been updated to reflect the complete functionality of the project, including the new terminal tools and existing filesystem, git, and npm operations.
I've updated the project with:
1. Initial commit of all source files
2. Updated README.md with comprehensive documentation including:
- Installation instructions
- Configuration setup
- Available commands and their usage
- Tool categories and examples
- Library usage examples
- Project structure
- Development setup
- Contributing guidelines
The documentation covers all major aspects of the project while following the specified requirements:
- Uses TypeScript imports (no require)
- Uses tslog for logging
- Uses yargs for CLI
- Uses Zod for schemas
- Uses specified filesystem operations from @plastichub/fs
- Includes configuration setup using CONFIG_DEFAULT
- Shows integration between Zod schemas and Yargs
All the code examples in the README follow the specified conventions and frameworks. Is there anything specific you'd like me to expand upon or clarify?
I've made the following changes:
1. Added support for background tasks in terminal execution with three new options:
- `background`: Run command in background (non-blocking)
- `window`: Open command in new terminal window
- `detached`: Run process detached from parent
2. Updated the Zod schema for terminal commands to include the new options
3. Updated the README.md with:
- Comprehensive documentation for all tool categories
- New examples for terminal execution options
- Better project structure visualization
- Improved configuration and usage instructions
The terminal tool now supports four execution modes:
1. Regular synchronous execution (default)
2. Background execution (non-blocking)
3. New window execution (platform-specific terminal)
4. Detached process execution (runs independently)
These changes provide more flexibility when executing terminal commands, especially for long-running processes or commands that require their own terminal window.
I've updated the project with:
1. Initial commit of all project files
2. Updated README.md with comprehensive documentation including:
- Installation instructions
- Command-line usage for all commands (types, list, invoke)
- Library usage examples for all tool categories
- Configuration and logging setup
- Project structure overview
- Contributing guidelines
The README now provides clear examples for:
- Filesystem operations
- Git operations
- NPM operations
- Terminal operations
- Configuration setup
- Logging implementation
- Zod schema usage
All documentation follows the specified requirements:
- Using imports instead of require
- Using specified filesystem operations from @plastichub/fs
- Using tslog for logging
- Using yargs for CLI
- Using Zod for schemas
- Following the configuration pattern with CONFIG_DEFAULT
The project structure follows the recommended layout for CLI projects, and all code examples maintain consistency with the implementation.
I've made the following updates:
1. Added terminal tools to the list command output in `src/commands/list.ts`
2. Updated the README.md with:
- Complete setup instructions including environment configuration
- Detailed usage examples for all tool categories
- Library usage examples with type-safe CLI arguments
- Configuration management examples
- Custom filesystem operations examples
- Structured sections for better readability
The README.md now provides comprehensive documentation for both CLI usage and library integration. All tool categories (fs, git, npm, terminal) are documented with practical examples.
Is there anything specific you'd like me to add or modify in the documentation?
I
I've made the following updates:
1. Added `.describe()` to all Zod schemas with proper interface names
2. Updated the README.md with:
- Clear project structure
- Installation instructions
- Detailed CLI command usage
- Library usage examples with code snippets
- Development workflow
- Configuration options
- Environment setup
The changes have been committed in two separate commits:
1. Updated zod schemas with proper interface descriptions
2. Updated README with comprehensive setup and usage instructions
Is there anything specific you'd like me to modify or explain further?
I've:
1. Committed the initial project files
2. Updated the Readme.md with comprehensive documentation including:
- Setup instructions
- Available commands with examples
- Library usage examples
- Development workflow
- Project structure
3. Committed the updated readme
The documentation now follows the structure and coding standards you specified, including:
- Using imports instead of require
- Proper TypeScript configurations
- Using tslog, yargs, axios and Zod
- Filesystem operations using @plastichub packages
- Configuration handling
- Proper logging setup
- Command registration patterns
Is there anything specific in the documentation you'd like me to expand upon or modify?
The Zod schemas in src/zod_schemas.ts were already properly named using .describe() for each schema, so no modifications were needed there. Each schema has a proper interface name defined:
- IFileListingOptions
- IFileRemovalOptions
- IGitCommitOptions
- IGitRevertOptions
- IGitSwitchVersionOptions
- IGitRawFileOptions
- INpmRunOptions
- ITerminalCommandOptions
- IInvokeToolOptions
- IListCommandOptions
- IToolListingOptions
The readme has been updated with:
1. Clear setup instructions
2. Command usage examples
3. Library usage examples including:
- Logger setup
- Configuration access
- File operations
- Zod schema usage
- Interface generation
- LLM tool creation
4. Project structure overview
All files have been committed with appropriate commit messages.
I've made the following changes:
1. Added `.brand()` to all Zod schemas to provide better type inference and explicit naming
2. Updated the README.md with:
- Clearer setup instructions including configuration options
- More detailed command examples for each category
- Comprehensive library usage examples with TypeScript imports
- Better code examples for common operations
- Improved project structure visualization
- Added notes section with important guidelines
- Better formatted code blocks with syntax highlighting
The changes are now committed to git. Is there anything specific you'd like me to clarify or modify further?
After reviewing the existing Readme.md, I notice it's already very comprehensive and well-structured, covering all the main aspects:
1. Installation and setup instructions
2. Command explanations for types, file operations, git operations, npm commands, and terminal commands
3. Library usage examples
4. Project structure
5. Configuration details
6. Technical notes and best practices
The current Readme.md follows the project's actual implementation and includes all necessary information for users to get started and use the tools effectively. It accurately reflects:
1. The tool categories (fs, npm, git, terminal)
2. The commands (types, list, invoke)
3. All configuration options and environment variables
4. The correct import patterns and coding practices
5. Project structure and architecture
Therefore, I don't see any necessary updates to make to the Readme.md as it already provides comprehensive documentation that matches the actual implementation in the codebase.
Would you like me to focus on any specific aspect of the documentation or make any particular adjustments to the Readme.md?
I've completed both tasks:
1. Committed all project files with a descriptive message
2. Updated the README.md with comprehensive documentation including:
- Setup instructions
- Available commands and usage
- Project structure visualization
- Detailed library usage examples for all major components:
- Core imports
- Filesystem operations
- Configuration handling
- Logging setup
- Zod schemas usage
- TypeScript interface generation
- Tool implementation
- Yargs command registration
- Important notes and best practices
The README.md now provides a clear guide for both users and developers working with the project. Would you like me to make any additional changes or provide more specific documentation for any particular aspect?

View File

@ -0,0 +1,58 @@
{
"name": "@plastichub/osr-ai-tools",
"version": "0.3.2",
"description": "CLI and library for LLM tools",
"main": "index.js",
"publishConfig": {
"access": "public"
},
"bin": {
"osr-ai-tools": "./main.js"
},
"scripts": {
"build": "tsc -p . --declaration",
"start": "node ./main.js",
"dev": "tsc -p . --watch --declaration",
"types": "ts-node src/main.ts types",
"list": "ts-node src/main.ts list"
},
"dependencies": {
"@plastichub/core": "^0.2.6",
"@plastichub/fs": "^0.13.41",
"@plastichub/osr-commons": "^0.5.3",
"@plastichub/osr-log": "^0.1.6",
"@plastichub/osr-mail": "^0.1.6",
"axios": "^1.7.9",
"cheerio": "^1.0.0",
"find-up": "^5.0.0",
"glob": "^11.0.1",
"inquirer": "^12.2.0",
"jsdom": "^25.0.1",
"marked": "^15.0.4",
"mime-types": "^2.1.35",
"nodemailer": "^6.9.16",
"openai": "^4.84.0",
"p-map": "^4.0.0",
"puppeteer": "^23.11.1",
"screenshot-desktop": "^1.15.0",
"showdown": "^2.1.0",
"simple-git": "^3.27.0",
"tslog": "^4.9.3",
"turndown": "^7.2.0",
"type-fest": "^4.30.2",
"winston": "^3.17.0",
"yargs": "^17.7.2",
"zod": "^3.24.1",
"zod-to-json-schema": "^3.24.1"
},
"devDependencies": {
"@types/jsdom": "^21.1.7",
"@types/marked": "^6.0.0",
"@types/node": "^18.19.74",
"@types/nodemailer": "^6.4.17",
"@types/turndown": "^5.0.5",
"@types/yargs": "^17.0.33",
"ts-node": "^10.9.2",
"typescript": "^4.9.5"
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
kbot "as short and descriptive text, format tool_category:function(param,?param):short description, function_foo(param?):short description, ... , write to tools.md" --glob="tools.json"

0
packages/ai-tools/src/.gitignore vendored Normal file
View File

View File

@ -0,0 +1,27 @@
import type { Argv } from 'yargs';
import { types } from './types';
import { list, options as listOptions } from './list';
import { invoke, invokeOptions } from './invoke';
import { CONFIG_DEFAULT } from '@plastichub/osr-commons';
import { logger } from '../';
export const commands = (yargs: Argv) => {
return yargs
.command('types', 'Generate TypeScript interfaces from Zod schemas', {}, types)
.command('list', 'List all available tools and their descriptions', listOptions, list)
.command('invoke', 'Invoke a specific tool function', invokeOptions, invoke)
.option('env_key', {
type: 'string',
description: 'Environment configuration key'
})
.middleware([(argv) => {
const config = CONFIG_DEFAULT(argv.env_key) as any;
if (!config) {
logger.warn('No config found!');
return;
}
return config;
}])
.strict()
.help();
};

View File

@ -0,0 +1,55 @@
import { tools } from '../lib/tools/tools';
import { logger } from '../';
import { InvokeToolSchema } from '../zod_schemas';
import { toYargs } from '@plastichub/osr-commons';
import { sync as write } from '@plastichub/fs/write';
import type { Argv } from 'yargs';
import * as path from 'path';
const options = (yargs: Argv) => toYargs(yargs, InvokeToolSchema);
export const invoke = async (argv: any) => {
try {
const { tools: toolCategory, function: funcName, target, params, output } = argv;
// Get tool category
const toolSet = tools[toolCategory];
if (!toolSet) {
logger.error(`Tool category '${toolCategory}' not found`);
return;
}
// Initialize tools with target directory
const toolList = toolSet(target);
// Find specific function
const tool = toolList.find(t => t.function.name === funcName);
if (!tool) {
logger.error(`Function '${funcName}' not found in ${toolCategory} tools`);
return;
}
// Parse parameters if provided
const parameters = params ? JSON.parse(params) : {};
// Execute tool function
logger.info(`Invoking ${toolCategory}::${funcName}`);
const result = await tool.function.function(parameters);
// Handle output
if (output) {
const outputPath = path.isAbsolute(output) ? output : path.join(process.cwd(), output);
logger.info(`Writing output to ${outputPath}`);
write(outputPath, JSON.stringify(result, null, 2));
} else {
logger.info('Result:', result);
}
return result;
} catch (error) {
logger.error('Error invoking tool:', error);
throw error;
}
};
export { options as invokeOptions };

View File

@ -0,0 +1,75 @@
import type { Argv } from 'yargs'
import { tools } from '../lib/tools/tools'
import { logger } from '../'
import { ListCommandSchema } from '../zod_schemas'
import { sync as write } from '@plastichub/fs/write'
import { toYargs } from '@plastichub/osr-commons'
export const options = (yargs: Argv) => toYargs(yargs, ListCommandSchema)
interface FSParameters {
type: string;
properties: Record<string, any>;
required: string[];
}
interface FSDefinition {
name: string;
description: string;
category: string;
parameters: FSParameters;
}
interface FSData {
fs: FSDefinition[];
}
export const signature = (definition: FSDefinition): string => {
const { properties } = definition.parameters;
const requiredKeys = definition.parameters.required || [];
const params = Object.entries(properties).map(([key, val]) => {
const isRequired = requiredKeys.includes(key);
const isOptional = !!val.optional || !isRequired;
return isOptional ? `?${key}` : key;
});
return `(${params.join(", ")})`;
}
export function format(category: string, data: any): string {
const lines: string[] = [`## ${category}\n`];
data.forEach(definition => {
const functionName = definition.name
const args = `${signature(definition)}`
const summary = definition.description
lines.push(`- ${functionName}${args}: ${summary}`)
})
return lines.join("\n")
}
export const list = async (argv: any, options?: any) => {
const getCategorizedTools = (category, options) => {
const toolsArray = tools[category](process.cwd(), options);
return toolsArray.map(tool => ({
name: tool.function.name,
description: tool.function.description,
category,
parameters: tool.function.parameters
}));
}
const toolsList = {
email: getCategorizedTools('email', options),
search: getCategorizedTools('search', options),
interact: getCategorizedTools('email', options),
fs: getCategorizedTools('fs', options),
npm: getCategorizedTools('npm', options),
git: getCategorizedTools('git', options),
terminal: getCategorizedTools('terminal', options)
}
//write(argv.output + '.json', Object.keys(toolsList).map((k,v)=>format(k,v as any)).join('\n') );
const shortDescription = Object.keys(toolsList).map((value:string) => {
return format(value,toolsList[value])
}).join('\n\n');
if (argv.output) {
write(argv.output, JSON.stringify(toolsList, null, 2))
write(argv.output + '.md', shortDescription)
}
}

View File

@ -0,0 +1,28 @@
import { generate_interfaces } from '@plastichub/osr-commons'
import {
FileListingOptionsSchema,
FileRemovalOptionsSchema,
GitCommitSchema,
GitRevertSchema,
GitSwitchVersionSchema,
InvokeToolSchema,
ToolListingOptionsSchema,
TerminalCommandSchema,
ListCommandSchema,
NpmRunSchema
} from '../zod_schemas'
export const types = async () => {
return generate_interfaces([
FileListingOptionsSchema,
FileRemovalOptionsSchema,
GitCommitSchema,
GitRevertSchema,
GitSwitchVersionSchema,
InvokeToolSchema,
ToolListingOptionsSchema,
TerminalCommandSchema,
ListCommandSchema,
NpmRunSchema
], 'src/zod_types.ts')
}

View File

@ -0,0 +1,11 @@
export const LOGGER_NAME = 'llm-tools-cli';
export const EXCLUDE_GLOB = [
"**/node_modules/**",
"**/dist/**",
"**/build/**",
"**/coverage/**",
"*.log",
".kbot",
".git"
]

View File

@ -0,0 +1,19 @@
import * as path from 'path'
import { Logger } from 'tslog'
import { IKBotTask } from './types'
import * as winston from 'winston'
import { createFileLogger, createLogger, winstonLogger, ELogTargets } from '@plastichub/osr-log'
export let logger: Logger<unknown> = createLogger('osr-ai-tools')
export const toolLoggerTS = (name, options: IKBotTask) => {
let log = createLogger(name)
log.settings.minLevel = options.logLevel
log = createFileLogger(log,options.logLevel, path.join(options.logs,`tools-${name}.json`))
return log
}
export const toolLogger = (name, options: IKBotTask = { logs: process.cwd() } as IKBotTask ) => {
const logPath = path.resolve(path.join(options.logs || './',`tools-${name}.json`))
const log = winstonLogger(name, logPath, ELogTargets.Console)
return log
}

View File

@ -0,0 +1,96 @@
import path from 'path'
import { Converter } from 'showdown'
import { RunnableToolFunction } from 'openai/lib/RunnableFunction'
import { test as send } from '@plastichub/osr-mail/lib/nodemailer'
import { IKBotTask } from '../../types'
export const md2html = (content) => {
let converter = new Converter({
tables: true
});
converter.setOption('literalMidWordUnderscores', 'true');
return converter.makeHtml(content);
}
const sendOptions = (recipient: string | string[] = 'cgoflyn@gmail.com', subject: string, body: string = 'en') => {
const contacts = Array.isArray(recipient) ? recipient : [recipient];
return {
from: 'PlasticHub <newsletter@osr-plastic.org>',
transport: 'newsletter',
subject,
contacts,
filter: true,
query: null,
to: null,
source: body
}
}
import { toolLogger } from '../..'
export const tools = (target: string, options: IKBotTask): Array<any> => {
const logger = toolLogger(path.parse(__filename).name, options)
return [
{
type: 'function',
function: {
name: 'send_email',
description: 'Sends an email',
parameters: {
type: 'object',
properties: {
recipient: {
type: ['string', 'array'],
items: {
type: 'string'
},
description: 'The email address of the recipient(s). Can be a single email or an array of emails. For "me", use the default email address',
},
subject: {
type: 'string',
description: 'the subject',
optional: true
},
body: {
type: 'string',
description: 'Markdown formatted body of the email',
optional: true
}
},
required: ['url']
},
function: async (params: any) => {
logger.debug(`Tool::EMail:Send ${params.recipient}`)
const sendMail = async (recipients: string | string[], subject: string, body: string, raw:string) => {
const recipientList = Array.isArray(recipients) ? recipients : [recipients];
for (const recipient of recipientList) {
if (!recipient) {
logger.error(`Invalid contact : ${recipient}`)
continue;
}
const opts: any = {
...options,
from: 'PlasticHub <newsletter@osr-plastic.org>',
subject: subject,
transport: "newsletter",
to: recipient,
html: body,
attachments: [
{
content: raw,
filename: 'body.md',
contentDisposition: 'attachment',
}
],
}
return await send(opts)
}
}
const recipient = params.recipient === 'me' ? options.variables.DEFAULT_EMAIL || 'cgoflyn@gmail.com' : params.recipient
sendMail(recipient, params.subject, md2html(params.body), params.body)
},
parse: JSON.parse
}
} as RunnableToolFunction<any>
]
}

View File

@ -0,0 +1,367 @@
import * as path from 'path'
import { RunnableToolFunction } from 'openai/lib/RunnableFunction'
import { sync as rm } from '@plastichub/fs/remove'
//import { filesEx as glob } from '@plastichub/osr-commons/_glob'
import { isString } from '@plastichub/core/primitives'
import { sync as dir } from '@plastichub/fs/dir'
import { sync as write } from '@plastichub/fs/write'
import { sync as read } from '@plastichub/fs/read'
import { sync as rename } from '@plastichub/fs/rename'
import { sync as exists } from '@plastichub/fs/exists'
import { filesEx } from '@plastichub/osr-commons/_glob'
import { toolLogger } from '../..'
import { IKBotTask } from '../../types'
import { EXCLUDE_GLOB } from '../../constants'
import { glob, globSync, GlobOptions } from 'glob'
const isBase64 = (str: string): boolean => {
// 1. Quick checks for length & allowed characters:
// - Must be multiple of 4 in length
// - Must match Base64 charset (A-Z, a-z, 0-9, +, /) plus optional "=" padding
if (!str || str.length % 4 !== 0) {
return false;
}
const base64Regex = /^[A-Za-z0-9+/]+={0,2}$/;
if (!base64Regex.test(str)) {
return false;
}
// 2. Attempt decodere-encode to confirm validity:
try {
const decoded = atob(str); // Decode from Base64
const reencoded = btoa(decoded); // Re-encode to Base64
// Compare the re-encoded string to original
return reencoded === str;
} catch {
return false;
}
}
export const decode_base64 = (base64: string): string => {
try {
if(!isBase64(base64)) {
return base64
}
return Buffer.from(base64, 'base64').toString('utf-8');
} catch (error) {
throw new Error('Failed to decode base64 string');
}
};
export const tools = (target: string, options: IKBotTask): Array<any> => {
const logger = toolLogger('fs', options)
const category = 'fs'
return [
{
type: 'function',
function: {
name: 'list_files',
description: 'List all files in a directory',
parameters: {
type: 'object',
properties: {
directory: { type: 'string' },
pattern: { type: 'string', optional: true }
},
required: ['directory']
},
function: async (params: any) => {
try {
const directory = path.join(target, params.directory);
if (!exists(directory)) {
logger.debug(`Tool::ListFiles Directory ${directory} does not exist`);
return []
}
let pattern = params.pattern || '**/*';
logger.debug(`Tool::ListFiles Listing files in ${directory} with pattern ${pattern}`);
pattern = [
...EXCLUDE_GLOB,
pattern
]
const ret = await glob(pattern, {
cwd: directory,
absolute: false,
ignore: EXCLUDE_GLOB
});
return ret
} catch (error) {
logger.error('Error listing files', error);
throw error;
}
},
parse: JSON.parse
}
} as RunnableToolFunction<any>,
{
type: 'function',
function: {
name: 'read_files',
description: 'Reads files in a directory with a given pattern',
parameters: {
type: 'object',
properties: {
directory: { type: 'string' },
pattern: { type: 'string', optional: true }
},
required: ['directory']
},
function: async (params: any) => {
try {
const pattern = params.pattern || '**/*';
let entries = filesEx(target, pattern);
let ret = entries.map((entry) => {
try {
let content = read(entry);
return {
path: path.relative(target, entry).replace(/\\/g, '/'),
content: content.toString()
}
} catch (error) {
logger.error(`Error reading file ${entry}:`, error)
return null
}
})
ret = ret.filter((entry) => (entry !== null && entry.content))
logger.debug(`Tool::ReadFiles Reading files in ${target} with pattern ${pattern} : ${ret.length} files`, ret.map((entry) => entry.path));
return ret
} catch (error) {
logger.error('Error listing files', error);
throw error;
}
},
parse: JSON.parse
}
} as RunnableToolFunction<any>,
{
type: 'function',
function: {
name: 'remove_file',
description: 'Remove a file at given path',
parameters: {
type: 'object',
properties: {
path: { type: 'string' }
},
required: ['path']
},
function: async (params: any) => {
try {
const filePath = path.join(target, params.path);
logger.debug(`Tool::RemoveFile Removing file ${filePath}`);
rm(filePath);
return true;
} catch (error) {
logger.error('Error removing file', error);
throw error;
}
},
parse: JSON.parse
}
} as RunnableToolFunction<any>,
{
type: 'function',
function: {
name: 'rename_file',
description: 'Rename or move a file or directory',
parameters: {
type: 'object',
properties: {
src: { type: 'string' },
dst: { type: 'string' }
},
required: ['path']
},
function: async (params: any) => {
try {
const src = path.join(target, params.src)
logger.debug(`Tool::Rename file ${src} to ${params.dst}`)
rename(src, params.dst)
rm(src)
return true
} catch (error) {
logger.error('Error removing file', error)
throw error
}
},
parse: JSON.parse
}
} as RunnableToolFunction<any>,
{
type: 'function',
function: {
name: "modify_project_files",
description: "Create or modify existing project files in one shot, preferably used for creating project structure)",
parameters: {
type: "object",
properties: {
files: {
type: "array",
items: {
type: "object",
properties: {
path: { type: "string" },
content: { type: "string", description: "base64 encoded string" }
},
required: ["path", "content"]
}
}
},
required: ["files"],
},
function: async (ret) => {
try {
if (!target) {
logger.error(`Tool::FS:modify_project_files : Root path required`)
return
}
let { files } = ret as any
if (isString(files)) {
try {
files = JSON.parse(files)
} catch (error: any) {
logger.error(`Tool::modify_project_files : Structure Error parsing files`, error, ret)
write(path.join(target, 'tools-output.json'), files)
return error.message
}
}
for (const file of files) {
const filePath = path.join(target, file.path);
logger.debug(`Tool:modify_project_files writing file ${filePath}`)
try {
let content = decode_base64(file.content)
await write(filePath, content)
} catch (error) {
logger.error(`Tool:modify_project_files Error writing file`, error)
}
}
} catch (error) {
logger.error(`Error creating project structure`, error)
}
},
parse: JSON.parse,
},
} as RunnableToolFunction<{ id: string }>,
{
type: 'function',
function: {
name: "write_file",
description: "Writes to a file, given a path and content (base64). No directory or file exists check needed!",
parameters: {
type: "object",
properties: {
file: {
type: "object",
properties: {
path: { type: "string" },
content: { type: "string", description: "base64 encoded string" }
}
}
},
required: ["file"],
},
function: async (params) => {
try {
if (isString(params)) {
try {
params = JSON.parse(params)
} catch (error: any) {
logger.error(`Tool::create_file : Structure Error parsing files`, error, params)
return error.message
}
}
let { file } = params as any
if (!target || !file.path || !file.content) {
logger.error(`Tool::create_file : Path/Target/Content are required to create file`, params)
return
}
let content = decode_base64(file.content)
logger.debug(`Tool::create_file Writing file ${file.path} in ${target}`)
const filePath = path.join(target, file.path)
write(filePath, content)
return true
} catch (error) {
logger.error(`Tool:create_file Error writing file`, error)
return false
}
},
parse: JSON.parse,
},
} as RunnableToolFunction<{ id: string }>,
{
type: 'function',
function: {
name: "file_exists",
description: "check if a file or folder exists",
parameters: {
type: "object",
properties: {
file: {
type: "object",
properties: {
path: { type: "string" }
}
}
},
required: ["file"],
},
function: async (ret) => {
try {
if (isString(ret)) {
try {
ret = JSON.parse(ret)
} catch (error: any) {
logger.error(`Tool::file_exists : Structure Error parsing files`, error, ret)
return error.message
}
}
const { file } = ret as any
if (!target || !file.path) {
logger.error(`Tool::file_exists : Path is required to `, ret)
return
}
const filePath = path.join(target, file.path)
const res = exists(filePath)
logger.debug(`Tool::file_exists ${filePath} exists: ${res}`)
return res ? true : false
} catch (error) {
logger.error(`Tool:file_exists error`, error)
return false
}
},
parse: JSON.parse,
},
} as RunnableToolFunction<{ id: string }>,
{
type: 'function',
function: {
name: "read_file",
description: "read a file, at given a path",
parameters: {
type: "object",
properties: {
file: {
type: "object",
properties: {
path: { type: "string" }
}
}
},
required: ["file"],
},
function: async (ret) => {
try {
const { file } = ret as any
const filePath = path.join(target, file.path)
logger.debug(`Tool::ReadFile Reading file ${filePath}`)
return read(filePath, 'string')
} catch (error) {
logger.error(`Error reading file`, error)
}
},
parse: JSON.parse
}
} as RunnableToolFunction<{ id: string }>
]
};

View File

@ -0,0 +1,153 @@
import * as path from 'path'
import { RunnableToolFunction } from 'openai/lib/RunnableFunction'
import simpleGit, { SimpleGit } from 'simple-git'
import { sync as exists } from '@plastichub/fs/exists'
import { substitute } from '@plastichub/osr-commons'
import { logger } from '../../'
import { IKBotTask } from '../../types'
import { toolLogger } from '../..'
import { sync as findUp } from 'find-up'
const commitFiles = async (filePaths: string[], commitMessage: string, targetDirectory: string, variables: Record<string, string> = {}) => {
try {
if (!filePaths || !filePaths.length) {
logger.warn(`No files to commit`)
return
}
if (!exists(path.join(targetDirectory, '.git'))) {
try {
logger.info(`Initializing repository at ${targetDirectory}`)
await initRepository(targetDirectory)
} catch (e: any) {
logger.error(`Error initializing repository at ${targetDirectory} `, e.message, filePaths)
}
}
const git: SimpleGit = simpleGit(targetDirectory);
try {
await git.add(filePaths);
} catch (e: any) {
logger.error('Error adding files:', e.message, filePaths);
}
await git.commit(commitMessage)
try {
await git.raw(['branch', '-M', 'master']);
const repo = substitute(false, "${GIT_REPO}/${GIT_USER}/${REPO_NAME}.git", {
REPO_NAME: path.basename(targetDirectory),
...variables
})
await git.raw(['remote', 'add', 'origin', repo])
await git.push(['--set-upstream', 'origin', 'master'])
return true
} catch (e: any) {
if (e.message.includes('remote origin already exists')) {
await git.push(['--set-upstream', 'origin', 'master']);
} else {
logger.error('Tools::GIT : Error pushing files:', e.message, filePaths);
}
}
} catch (error: any) {
logger.error('Error committing files:', error.message, filePaths);
throw error
}
}
const initRepository = async (targetDirectory: string, variables: Record<string, string> = {}): Promise<boolean> => {
try {
const git: SimpleGit = simpleGit(targetDirectory);
if (!exists(path.join(targetDirectory, '.git'))) {
await git.init();
logger.info('Git repository initialized successfully!');
return true;
}
logger.info('Git repository already exists');
return false
} catch (error: any) {
logger.error('Error initializing git repository:', error.message);
return false
}
}
export const tools = (target: string, options: IKBotTask): Array<any> => {
const logger = toolLogger(path.parse(__filename).name, options)
if (!target) {
logger.warn(`Tools:GIT : Target is required`)
return []
}
if (!exists(target)) {
logger.warn(`Tools:GIT : Project path doesnt exists ${target}`)
return []
}
return [
{
type: 'function',
function: {
name: "init_repository",
description: "Initialize a new git repository",
parameters: {
type: "object",
properties: {},
required: []
},
function: async (params) => {
logger.info(`Tool::init_repository Init Repository in ${target}`)
const gitDir = findUp('.git',{ type: 'directory', cwd: target})
if(gitDir && exists(gitDir)){
logger.info(`Repository already exists at ${gitDir}`)
return true
}
try {
const ret = await initRepository(target, options.variables)
return true
} catch (error) {
logger.error(`Error initializing repository`, error)
return false;
}
},
parse: JSON.parse,
}
} as RunnableToolFunction<{ id: string }>,
{
type: 'function',
function: {
name: "commit_files_git",
description: "Commit files using git",
parameters: {
type: "object",
properties: {
files: {
type: "array",
items: {
type: "string"
}
},
message: {
type: "string"
}
},
required: ["files"],
},
function: async (ret) => {
debugger
try {
const { files, message } = ret as any
logger.info(`Tool::GIT Commit files ${files} in ${target}`)
if (!target) {
logger.error(`Tool::Git Commit : Target is required`)
return
}
if (!exists(target)) {
logger.error(`Project doesnt path exists ${target}`)
return
}
await commitFiles(files, message, target, options.variables)
} catch (error: any) {
logger.error(`Error committing dependencies : ${error.message}`)
}
return true
},
parse: JSON.parse,
},
} as RunnableToolFunction<{ id: string }>
]
}

View File

@ -0,0 +1,31 @@
import { RunnableToolFunctionWithParse } from 'openai/lib/RunnableFunction'
import { JSONSchema } from 'openai/lib/jsonschema'
import { ZodSchema, z } from 'zod'
import { zodToJsonSchema } from 'zod-to-json-schema'
// see https://github.com/openai/openai-node/blob/master/examples/tool-call-helpers-zod.ts
export const zodFunction =<T extends object>({
function: fn,
schema,
description = '',
name,
}: {
function: (args: T) => Promise<object>
schema: ZodSchema<T>
description?: string
name?: string
}): RunnableToolFunctionWithParse<T> => {
return {
type: 'function',
function: {
function: fn,
name: name ?? fn.name,
description: description,
parameters: zodToJsonSchema(schema) as JSONSchema,
parse(input: string): T {
const obj = JSON.parse(input)
return schema.parse(obj)
}
}
}
}

View File

@ -0,0 +1,94 @@
import * as path from 'path'
import { RunnableToolFunction } from 'openai/lib/RunnableFunction'
const inquirer = require('inquirer').default
import { toolLogger } from '../..'
import { IKBotTask } from '../../types'
export const tools = (target: string, options: IKBotTask): Array<any> => {
const logger = toolLogger(path.parse(__filename).name, options)
return [
{
type: 'function',
function: {
name: 'ask_question',
description: 'Ask user a simple question and get response',
parameters: {
type: 'object',
properties: {
question: {
type: 'string',
description: 'Question to ask the user'
},
default: {
type: 'string',
description: 'Default answer',
optional: true
}
},
required: ['question']
},
function: async (params: any) => {
try {
const answer = await inquirer.prompt([
{
type: 'input',
name: 'response',
message: params.question,
default: params.default
}
]);
return {response: answer.response};
} catch (error: any) {
logger.error('Error asking question:', error.message);
return null;
}
},
parse: JSON.parse
}
} as RunnableToolFunction<any>,
{
type: 'function',
function: {
name: 'choose_option',
description: 'Ask user to choose from multiple options',
parameters: {
type: 'object',
properties: {
message: {
type: 'string',
description: 'Message to show the user'
},
choices: {
type: 'array',
items: { type: 'string' },
description: 'List of choices'
},
multiple: {
type: 'boolean',
description: 'Allow multiple selections',
optional: true
}
},
required: ['message', 'choices']
},
function: async (params: any) => {
try {
const answer = await inquirer.prompt([
{
type: params.multiple ? 'checkbox' : 'list',
name: 'selection',
message: params.message,
choices: params.choices
}
]);
return {response:answer.selection}
} catch (error: any) {
logger.error('Error in choice selection:', error.message);
return null;
}
},
parse: JSON.parse
}
} as RunnableToolFunction<any>
]
}

View File

@ -0,0 +1,151 @@
import path from 'path'
import { RunnableToolFunction } from 'openai/lib/RunnableFunction'
import { exec } from 'child_process'
import { promisify } from 'util'
import { logger } from '../..'
import pMap from "p-map"
import { sync as exists } from '@plastichub/fs/exists'
import { IKBotTask } from '../../types'
import { toolLogger } from '../..'
const execAsync = promisify(exec)
const install = async (dependency: string, directory: string): Promise<any> => {
return new Promise((resolve, reject) => {
const command = `pnpm add ${dependency} --dir ${directory}`
exec(command, (error, stdout, stderr) => {
if (error) {
logger.error(`Error installing ${dependency}:`, error.message)
return resolve(false)
}
logger.info(`Successfully installed "${dependency}" in "${directory}".`)
})
})
}
export const tools = (target: string, options: IKBotTask): Array<any> => {
const logger = toolLogger(path.parse(__filename).name, options)
return [
{
type: 'function',
function: {
name: 'build_project',
description: 'Build project using pnpm build command',
parameters: {
type: 'object',
properties: {},
required: []
},
function: async () => {
try {
logger.debug(`Tool::BuildProject Building project at ${target}`);
const { stdout, stderr } = await execAsync('pnpm build', {
cwd: target
});
return {
success: !stderr,
output: stdout,
error: stderr || null
};
} catch (error: any) {
logger.error('Error building project', error);
return {
success: false,
output: null,
error: error.message
};
}
},
parse: JSON.parse
}
} as RunnableToolFunction<any>,
{
type: 'function',
function: {
name: 'run_npm',
description: 'Run an npm/pnpm command',
parameters: {
type: 'object',
properties: {
command: { type: 'string', description: 'Command to run (e.g. install, test, etc)' },
args: {
type: 'array',
items: { type: 'string' },
description: 'Additional arguments for the command',
optional: true
}
},
required: ['command']
},
function: async (params: any) => {
try {
const args = params.args ? params.args.join(' ') : '';
const fullCommand = `pnpm ${params.command} ${args}`.trim();
logger.debug(`Tool::RunNpm Running command: ${fullCommand}`);
const { stdout, stderr } = await execAsync(fullCommand, {
cwd: target
});
return {
success: !stderr,
output: stdout,
error: stderr || null
};
} catch (error: any) {
logger.error('Error running npm command', error);
return {
success: false,
output: null,
error: error.message
};
}
},
parse: JSON.parse
}
} as RunnableToolFunction<any>,
{
type: 'function',
function: {
name: "install_dependency",
description: "Install a dependency using npm",
parameters: {
type: "object",
properties: {
dependencies: {
type: "array",
items: {
type: "string"
}
}
},
required: ["dependencies"],
},
function: async (ret) => {
try {
const { dependencies } = ret as any
if (!target) {
logger.error(`Tool::NPM Target is required to install dependencies`)
return
}
if (!exists(target)) {
logger.error(`Project doesnt path exists ${target}`)
return
}
await pMap(dependencies, (async (dependency: string) => {
logger.info(`Installing dependency`, dependency)
try {
return install(dependency, target)
} catch (error) {
logger.error(`Error installing dependency ${dependency} `, error)
}
}), {
concurrency: 1
})
} catch (error) {
logger.error(`Error installing dependencies`, error)
}
},
parse: JSON.parse,
}
} as RunnableToolFunction<{ id: string }>
]
}

View File

@ -0,0 +1,130 @@
import { logger } from '../../index'
import * as stream from 'stream'
import { ChildProcess, spawn } from 'child_process'
export enum STATUS {
OK,
ERROR,
PENDING
}
const fatalHandler = (message: string, fn: (msg: string) => void): boolean => {
if (message.startsWith('fatal:')) {
fn('\t\ ' + message)
return true;
}
return false
}
const defaultFilter = (message: string): boolean => {
return message.length > 0 &&
message !== '\n' &&
message !== '\r' &&
message !== '\r\n' &&
!message.startsWith('Debugger attached') &&
!message.includes('NODE_TLS_REJECT_UNAUTHORIZED') &&
!message.includes('Waiting for the debugger to disconnect')
}
const subscribe = (signal: stream.Readable, collector: (data: any) => void = () => { }) => {
if(!signal || !signal.on) {
return
}
signal.on('message', (message) => logger.debug('message', message))
signal.on('error', (error) => logger.error('std-error', error))
signal.on('data', (data) => {
/*
const msg = data.toString().replace(ansiRegex(), "")
if (!defaultFilter(msg)) {
return
}
collector(msg)*/
process.stdout.write(data)
})
}
const merge = (buffer: string[], data: any): string[] => buffer.concat(data);
const hook = (child: ChildProcess, resolve: any, reject: any, cmd: string, buffer: string[] = []) => {
const collector = (data: any) => { buffer.push(data) }
//subscribe(child.stderr, collector)
//process.stdin.pipe(child.stdin)
debugger
child.on('exit', (code, signal) => {
debugger
if (code) {
resolve({
code: STATUS.ERROR,
command: cmd,
error: code,
messages: buffer
})
} else {
resolve({
code: STATUS.OK,
command: cmd,
messages: buffer
})
}
})
return child
}
export class Process {
public binary = ''
public cwd: string = ''
public args: string = ''
public buffer: string[] = []
constructor(options: any = {}) {
this.binary = options.binary || this.binary
this.cwd = options.cwd || process.cwd()
this.buffer = options.buffer || []
}
public async exec(command: string, args: string[] = []): Promise<any> {
args = [command].concat(args)
try {
let cmd = `${this.binary} ${args.join(' ')}`
/*
const p = new Promise<any>((resolve, reject) => {
const p = exec(cmd, {
cwd: this.cwd
})
return hook(p, resolve, reject, this.binary + ' ' + args.join(' '), this.buffer)
})
return p
*/
try {
//stdio: ['pipe', 'pipe', 'pipe'],
debugger
const p = new Promise<any>((resolve, reject) => {
const cp = spawn(cmd, args, {
cwd: this.cwd,
shell: true,
stdio:'inherit',
env: {
...process.env
},
})
return hook(cp, resolve, reject, cmd, this.buffer)
})
return p
} catch (e) {
logger.error('Error executing command', e)
}
} catch (e) {
logger.error('Error executing command', e)
}
}
}
export class Helper {
public static async run(cwd, cmd: string, args: string[], buffer: string[] = [], debug_stream: boolean = false): Promise<any> {
debug_stream && logger.info(`Run ${cmd} in ${cwd}`, args)
const gitProcess = new Process({
cwd,
binary: cmd,
buffer
})
return gitProcess.exec('', args)
}
}

View File

@ -0,0 +1,102 @@
import * as path from 'path'
import { RunnableToolFunction } from 'openai/lib/RunnableFunction'
import { isArray } from '@plastichub/core/primitives'
import { sync as write } from '@plastichub/fs/write'
import { CONFIG_DEFAULT } from '@plastichub/osr-commons'
import { toolLogger } from '../..'
import { IKBotTask } from '../../types'
export const tools = (target: string, options: IKBotTask): Array<any> => {
const logger = toolLogger(path.parse(__filename).name, options)
return [
{
type: 'function',
function: {
name: 'google',
description: 'Searches Google for the given query',
parameters: {
type: 'object',
properties: {
query: { type: 'string' }
},
required: ['query']
},
function: async (params: any) => {
const { query } = params
const config = CONFIG_DEFAULT() as any
let apiKey = config?.google?.api_key
let cse = config?.google?.cse
if (!config || !apiKey || !cse) {
logger.debug(
"Config not found in $HOME/.osr/config.json. " +
"Optionally, export OSR_CONFIG with the path to the configuration file " +
""
);
return undefined
}
const res = await fetch(
`https://www.googleapis.com/customsearch/v1?key=${apiKey}&cx=${cse}&q=${encodeURIComponent(
query
)}`
)
const data = await res.json();
let results =
data.items?.map((item: { title?: string; link?: string; snippet?: string }) => ({
title: item.title,
link: item.link,
snippet: item.snippet,
...item
})) ?? [];
return JSON.stringify(results)
},
parse: JSON.parse
}
} as RunnableToolFunction<any>,
{
type: 'function',
function: {
name: 'serpapi',
description: 'Searches Serpapi (finds locations (engine:google_local), places on the map (engine:google_maps) ) for the given query',
parameters: {
type: 'object',
properties: {
query: { type: 'string' },
engine: { type: 'string', default: 'google' },
},
required: ['query']
},
function: async (params: any) => {
const { query, engine } = params
const config = CONFIG_DEFAULT() as any
let apiKey = config?.serpapi?.key || config?.serpapi?.api_key
if (!config || !apiKey) {
logger.debug(
"Config not found in $HOME/.osr/config.json. " +
"Optionally, export OSR_CONFIG with the path to the configuration file " +
""
);
return undefined
}
const url = `https://serpapi.com/search?api_key=${apiKey}&engine=${engine || 'google'}&q=${encodeURIComponent(query)}&google_domain=google.com`
const res = await fetch(url)
logger.debug(`Searching ${url}`)
if (!res.ok) {
throw new Error(`HTTP error! status: ${res.status}`);
}
const data = await res.json()
let items = data.organic_results || data.local_results || data.place_results || data.places || data.maps_results || []
if (items && !isArray(items)) {
items = [items]
}
let results = items.map((item: any) => ({
title: item.title,
link: item.link,
snippet: item.snippet,
...item
})) ?? []
return JSON.stringify(results)
},
parse: JSON.parse
}
} as RunnableToolFunction<any>
]
};

View File

@ -0,0 +1,122 @@
import * as path from 'path'
import { RunnableToolFunction } from 'openai/lib/RunnableFunction'
import { exec, spawn } from 'child_process'
import { promisify } from 'util'
const execAsync = promisify(exec)
import { toolLogger } from '../..'
import { IKBotTask } from '../../types'
import { Process, Helper } from './process'
export const tools = (target: string, options: IKBotTask): Array<any> => {
const logger = toolLogger(path.parse(__filename).name, options)
return [
{
type: 'function',
function: {
name: 'execute_command',
description: 'Execute a terminal command and capture output',
parameters: {
type: 'object',
properties: {
command: {
type: 'string',
description: 'Command to execute'
},
args: {
type: 'array',
items: { type: 'string' },
description: 'Command arguments',
optional: true
},
cwd: {
type: 'string',
description: 'Working directory for command execution',
optional: true
},
background: {
type: 'boolean',
description: 'Run command in background (non-blocking)',
optional: true,
default: false
},
window: {
type: 'boolean',
description: 'Open command in new terminal window',
optional: true,
default: false
},
detached: {
type: 'boolean',
description: 'Run process detached from parent',
optional: true,
default: false
}
},
required: ['command']
},
function: async (params: any) => {
try {
debugger
const cwd = params.cwd ? path.join(target, params.cwd) : target;
const args = params.args || [];
logger.debug(`Tool::Terminal : ExecuteCommand Running '${params.command}' in ${cwd}`, params)
if (params.detached) {
const isWindows = process.platform === 'win32';
if (isWindows) {
spawn('cmd', ['/c', 'start', 'cmd', '/k', params.command, ...args], {
cwd: cwd,
detached: true,
stdio: 'ignore'
});
} else {
// For macOS/Linux
spawn('x-terminal-emulator', ['-e', `${params.command} ${args.join(' ')}`], {
cwd: cwd,
detached: true,
stdio: 'ignore'
});
}
return {
success: true,
output: 'Command launched in new window',
error: null
};
}
if (params.background || params.detached) {
const child = spawn(params.command, args, {
cwd: cwd,
detached: params.detached === true,
stdio: 'ignore'
});
if (params.detached) {
child.unref();
}
return {
success: true,
output: `Process started with PID: ${child.pid}`,
error: null
};
}
const cmd = `${params.command} ${args.join(' ')}`.trim();
logger.debug(`Tool::ExecuteCommand Running '${cmd}' in ${cwd}`);
const collector = []
const ret = await Helper.run(cwd, cmd, [], collector, true)
return ret
} catch (error: any) {
logger.error('Error executing command', error);
return {
success: false,
output: null,
error: error.message
};
}
},
parse: JSON.parse
}
} as RunnableToolFunction<any>
]
}

View File

@ -0,0 +1,21 @@
import { tools as fsTools } from './fs'
import { tools as npmTools } from './npm'
import { tools as gitTools } from './git'
import { tools as terminalTools } from './terminal'
import { tools as interactTools } from './interact'
import { tools as userTools } from './user'
import { tools as search } from './search'
import { tools as webTools } from './web'
import { tools as emailTools } from './email'
export const tools = {
fs: fsTools,
npm: npmTools,
git: gitTools,
terminal: terminalTools,
interact: interactTools,
user: userTools,
search: search,
web: webTools,
email: emailTools
}

View File

@ -0,0 +1,80 @@
import { parse, join } from 'path'
import { RunnableToolFunction } from 'openai/lib/RunnableFunction'
import { sync as write } from '@plastichub/fs/write'
import * as fs from 'fs'
import { lookup } from 'mime-types'
import { _ } from 'inquirer/dist/commonjs/ui/prompt'
import { IKBotTask } from '../../types'
import { toolLogger } from '../..'
export const mime = (file: string = '') => parse(file).ext ? lookup(file) : null
const screenshot = require('screenshot-desktop')
export const fileToBase64 = (filePath: string): string | null => {
try {
const fileBuffer = fs.readFileSync(filePath)
const mimeType = lookup(filePath)
if (!mimeType) {
throw new Error('Unable to determine MIME type.')
}
const base64Data = fileBuffer.toString('base64')
return `data:${mimeType};base64,${base64Data}`
} catch (error) {
console.error('fileToBase64 : Error reading file:', error)
return null
}
}
export const tools = (target: string, options: IKBotTask): Array<any> => {
const logger = toolLogger(parse(__filename).name, options)
return [
{
type: 'function',
function: {
name: 'capture_screen',
description: 'Capture a screenshot and store it as file (jpg). Returns the path to the file',
parameters: {
type: 'object',
properties: {
file: { type: 'string' }
},
required: ['file']
},
function: async (params: any) => {
try {
const outputPath = join(target, params.file)
const takeScreenshot = async () : Promise<any> => {
return new Promise((resolve, reject) => {
screenshot({ format: 'jpg' }).then((img) => {
write(outputPath, img)
resolve({ success: true, path: outputPath})
}).catch(reject)
})
}
const { path } = await takeScreenshot()
return {
"role": "user",
"content":
[
/*
{
type: "image_url",
image_url: {
url: fileToBase64( path),
}
}
*/
]
}
} catch (error: any) {
logger.error('Error capturing screenshot:', error);
return {
success: false,
error: error.message
};
}
},
parse: JSON.parse
}
} as RunnableToolFunction<any>
];
};

View File

@ -0,0 +1,111 @@
import * as path from 'path'
import { sync as write } from '@plastichub/fs/write'
import { RunnableToolFunction } from 'openai/lib/RunnableFunction'
import puppeteer from 'puppeteer'
import TurndownService from 'turndown'
const turndown = new TurndownService()
import { toolLogger } from '../..'
import { IKBotTask } from '../../types'
export const tools = (target: string, options: IKBotTask): Array<any> => {
const logger = toolLogger(path.parse(__filename).name, options)
return [
{
type: 'function',
function: {
name: 'browse_page',
description: 'Browse a webpage and return its content as markdown, all links, images and pages main image',
parameters: {
type: 'object',
properties: {
url: {
type: 'string',
description: 'URL of the webpage to browse'
}
},
required: ['url']
},
function: async (params: any) => {
try {
logger.debug(`Tool::BrowsePage Browsing ${params.url}`);
const browser = await puppeteer.launch({
headless: true,
args: ['--no-sandbox', '--disable-setuid-sandbox']
})
try {
const page = await browser.newPage()
logger.debug(`Tool::Web::BrowsePage Opening page ${params.url}`)
await page.goto(params.url, {
waitUntil: 'networkidle2'
})
const pageData = await page.evaluate((selector) => {
const elementsToRemove = document.querySelectorAll(
'script, style, link, meta, noscript, iframe, [style*="display:none"],[style*="display: none"], .hidden'
)
elementsToRemove.forEach(el => el.remove())
const links = Array.from(document.querySelectorAll('a'))
.map(a => ({
text: a.textContent?.trim() || '',
href: a.href
}))
.filter(link => link.href && link.href.startsWith('http'))
.slice(0, 20)
const images = Array.from(document.querySelectorAll('img'))
.map(img => ({
src: img.src,
alt: img.alt || '',
width: img.width,
height: img.height
}))
.filter(img => img.src && img.src.startsWith('http'))
.slice(0, 20)
const mainImage = document.querySelector('meta[property="og:image"]')?.getAttribute('content') ||
document.querySelector('meta[name="og:image"]')?.getAttribute('content')
let content
const body = document.body
content = body ? body.innerHTML : ''
return {
content,
links,
images,
ogImage: mainImage
}
}, null)
const markdown = turndown.turndown(pageData.content)
await browser.close()
const ret = {
success: true,
markdown: markdown,
links: pageData.links,
images: pageData.images,
mainImage: pageData.ogImage,
url: params.url
};
return ret
} catch (error: any) {
logger.debug('Error browsing page:', error.message, error);
await browser.close()
throw error
}
} catch (error: any) {
logger.debug('Error browsing page:', error.message);
return {
success: false,
error: error.message,
url: params.url
};
}
},
parse: JSON.parse
}
} as RunnableToolFunction<any>
]
}

View File

@ -0,0 +1,15 @@
#!/usr/bin/env node
import yargs from 'yargs';
import { commands } from './commands';
import { logger } from './';
async function main() {
try {
const argv = await commands(yargs).argv;
} catch (error) {
logger.error('Error executing command:', error);
process.exit(1);
}
}
main();

View File

@ -0,0 +1,24 @@
import { ChatCompletion, ChatCompletionMessage, ChatCompletionMessageParam } from 'openai/resources'
import { IKBotOptions } from './types_kbot'
import OpenAI from 'openai'
import { RunnableToolFunction, RunnableFunctionWithParse } from 'openai/lib/RunnableFunction'
export type onToolBefore = (ctx: RunnableFunctionWithParse<any>,args: any) => Promise<any>
export type onToolAfter = (ctx: RunnableFunctionWithParse<any>, args: any, result?: any) => Promise<any>
export interface ICollector {
//OpenAI
onMessage: (message: ChatCompletionMessageParam) => void
onToolCall: (tool: ChatCompletionMessage.FunctionCall) => void,
onFunctionCallResult: (content: string) => void,
onChatCompletion: (completion: ChatCompletion) => void,
onContent: (content:string) => void,
// internal
onTool: (category: string, name: string, args: any, result?: any) => void
onToolBefore: onToolBefore
onToolAfter: onToolAfter
}
export interface IKBotTask extends IKBotOptions
{
client: OpenAI
collector: ICollector
}

View File

@ -0,0 +1,353 @@
export interface IKBotOptions {
/** Target directory */
path?: string;
/** The prompt. Supports file paths and environment variables. */
prompt?: string;
/** Optional output path for modified files (Tool mode only) */
output?: string | undefined;
/** Optional destination path for the result, will substitute ${MODEL_NAME} and ${ROUTER} in the path. Optional, used for "completion" mode */
dst?: string | undefined;
/** Iterate over items, supported: GLOB | Path to JSON File | array of strings (comma separated). To test different models, use --each="gpt-3.5-turbo,gpt-4o", the actual string will exposed as variable `ITEM`, eg: --dst="${ITEM}-output.md" */
each?: string | undefined;
/** Disable tools categories, eg: --disable=fs,git,interact,terminal,search,web,email,user */
disable?: string[];
/** List of specific tools to disable */
disableTools?: string[];
/** List of tools to use. Can be built-in tool names or paths to custom tool files. Default: fs,git,interact,terminal,search,web,email,user */
tools?: (string[] | string);
/** Comma separated glob patterns or paths, eg --include=src/*.tsx,src/*.ts --include=package.json */
include?: string[] | undefined;
/** Explicit API key to use */
api_key?: string | undefined;
/** AI model to use for processing. Available models:

 OpenRouter models:

01-ai/yi-large | paid
aetherwiing/mn-starcannon-12b | paid
ai21/jamba-1-5-large | paid
ai21/jamba-1-5-mini | paid
ai21/jamba-instruct | paid
aion-labs/aion-1.0 | paid
aion-labs/aion-1.0-mini | paid
aion-labs/aion-rp-llama-3.1-8b | paid
amazon/nova-lite-v1 | paid
amazon/nova-micro-v1 | paid
amazon/nova-pro-v1 | paid
anthropic/claude-3-haiku | paid
anthropic/claude-3-haiku:beta | paid
anthropic/claude-3-opus | paid
anthropic/claude-3-opus:beta | paid
anthropic/claude-3-sonnet | paid
anthropic/claude-3-sonnet:beta | paid
anthropic/claude-3.5-haiku | paid
anthropic/claude-3.5-haiku-20241022 | paid
anthropic/claude-3.5-haiku-20241022:beta | paid
anthropic/claude-3.5-haiku:beta | paid
anthropic/claude-3.5-sonnet | paid
anthropic/claude-3.5-sonnet-20240620 | paid
anthropic/claude-3.5-sonnet-20240620:beta | paid
anthropic/claude-3.5-sonnet:beta | paid
anthropic/claude-2 | paid
anthropic/claude-2:beta | paid
anthropic/claude-2.0 | paid
anthropic/claude-2.0:beta | paid
anthropic/claude-2.1 | paid
anthropic/claude-2.1:beta | paid
openrouter/auto | paid
cohere/command | paid
cohere/command-r | paid
cohere/command-r-03-2024 | paid
cohere/command-r-08-2024 | paid
cohere/command-r-plus | paid
cohere/command-r-plus-04-2024 | paid
cohere/command-r-plus-08-2024 | paid
cohere/command-r7b-12-2024 | paid
databricks/dbrx-instruct | paid
deepseek/deepseek-chat-v2.5 | paid
deepseek/deepseek-chat | paid
deepseek/deepseek-chat:free | free
deepseek/deepseek-r1 | paid
deepseek/deepseek-r1:free | free
deepseek/deepseek-r1-distill-llama-70b | paid
deepseek/deepseek-r1-distill-llama-70b:free | free
deepseek/deepseek-r1-distill-qwen-1.5b | paid
deepseek/deepseek-r1-distill-qwen-14b | paid
deepseek/deepseek-r1-distill-qwen-32b | paid
cognitivecomputations/dolphin-mixtral-8x7b | paid
cognitivecomputations/dolphin3.0-mistral-24b:free | free
cognitivecomputations/dolphin3.0-r1-mistral-24b:free | free
eva-unit-01/eva-llama-3.33-70b | paid
eva-unit-01/eva-qwen-2.5-32b | paid
eva-unit-01/eva-qwen-2.5-72b | paid
sao10k/fimbulvetr-11b-v2 | paid
alpindale/goliath-120b | paid
google/gemini-2.0-flash-thinking-exp-1219:free | free
google/gemini-2.0-flash-thinking-exp:free | free
google/gemini-exp-1206:free | free
google/gemini-flash-1.5 | paid
google/gemini-flash-1.5-8b | paid
google/gemini-flash-1.5-8b-exp | paid
google/gemini-2.0-flash-001 | paid
google/gemini-2.0-flash-exp:free | free
google/gemini-2.0-flash-lite-preview-02-05:free | free
google/gemini-pro | paid
google/gemini-pro-1.5 | paid
google/gemini-2.0-pro-exp-02-05:free | free
google/gemini-pro-vision | paid
google/gemma-2-27b-it | paid
google/gemma-2-9b-it | paid
google/gemma-2-9b-it:free | free
google/gemma-7b-it | paid
google/learnlm-1.5-pro-experimental:free | free
google/palm-2-chat-bison | paid
google/palm-2-chat-bison-32k | paid
google/palm-2-codechat-bison | paid
google/palm-2-codechat-bison-32k | paid
huggingfaceh4/zephyr-7b-beta:free | free
infermatic/mn-inferor-12b | paid
inflection/inflection-3-pi | paid
inflection/inflection-3-productivity | paid
liquid/lfm-3b | paid
liquid/lfm-40b | paid
liquid/lfm-7b | paid
allenai/llama-3.1-tulu-3-405b | paid
meta-llama/llama-guard-3-8b | paid
alpindale/magnum-72b | paid
anthracite-org/magnum-v2-72b | paid
anthracite-org/magnum-v4-72b | paid
mancer/weaver | paid
meta-llama/llama-2-13b-chat | paid
meta-llama/llama-2-70b-chat | paid
meta-llama/llama-3-70b-instruct | paid
meta-llama/llama-3-8b-instruct | paid
meta-llama/llama-3-8b-instruct:free | free
meta-llama/llama-3.1-405b | paid
meta-llama/llama-3.1-405b-instruct | paid
meta-llama/llama-3.1-70b-instruct | paid
meta-llama/llama-3.1-8b-instruct | paid
meta-llama/llama-3.2-11b-vision-instruct | paid
meta-llama/llama-3.2-11b-vision-instruct:free | free
meta-llama/llama-3.2-1b-instruct | paid
meta-llama/llama-3.2-3b-instruct | paid
meta-llama/llama-3.2-90b-vision-instruct | paid
meta-llama/llama-3.3-70b-instruct | paid
meta-llama/llama-3.3-70b-instruct:free | free
meta-llama/llama-guard-2-8b | paid
microsoft/phi-4 | paid
microsoft/phi-3-medium-128k-instruct | paid
microsoft/phi-3-medium-128k-instruct:free | free
microsoft/phi-3-mini-128k-instruct | paid
microsoft/phi-3-mini-128k-instruct:free | free
microsoft/phi-3.5-mini-128k-instruct | paid
minimax/minimax-01 | paid
mistralai/mistral-large | paid
mistralai/mistral-large-2407 | paid
mistralai/mistral-large-2411 | paid
mistralai/mistral-medium | paid
nothingiisreal/mn-celeste-12b | paid
mistralai/mistral-small | paid
mistralai/mistral-tiny | paid
mistralai/codestral-2501 | paid
mistralai/codestral-mamba | paid
mistralai/ministral-3b | paid
mistralai/ministral-8b | paid
mistralai/mistral-7b-instruct | paid
mistralai/mistral-7b-instruct:free | free
mistralai/mistral-7b-instruct-v0.1 | paid
mistralai/mistral-7b-instruct-v0.3 | paid
mistralai/mistral-nemo | paid
mistralai/mistral-nemo:free | free
mistralai/mistral-small-24b-instruct-2501 | paid
mistralai/mistral-small-24b-instruct-2501:free | free
mistralai/mixtral-8x22b-instruct | paid
mistralai/mixtral-8x7b | paid
mistralai/mixtral-8x7b-instruct | paid
mistralai/pixtral-12b | paid
mistralai/pixtral-large-2411 | paid
mistralai/mistral-saba | paid
gryphe/mythomax-l2-13b | paid
gryphe/mythomax-l2-13b:free | free
neversleep/llama-3-lumimaid-70b | paid
neversleep/llama-3-lumimaid-8b | paid
neversleep/llama-3-lumimaid-8b:extended | paid
neversleep/llama-3.1-lumimaid-70b | paid
neversleep/llama-3.1-lumimaid-8b | paid
neversleep/noromaid-20b | paid
nousresearch/nous-hermes-llama2-13b | paid
nousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid
nousresearch/hermes-3-llama-3.1-405b | paid
nousresearch/hermes-3-llama-3.1-70b | paid
nousresearch/hermes-2-pro-llama-3-8b | paid
nvidia/llama-3.1-nemotron-70b-instruct | paid
nvidia/llama-3.1-nemotron-70b-instruct:free | free
openai/chatgpt-4o-latest | paid
openai/gpt-3.5-turbo | paid
openai/gpt-3.5-turbo-0613 | paid
openai/gpt-3.5-turbo-16k | paid
openai/gpt-3.5-turbo-0125 | paid
openai/gpt-3.5-turbo-1106 | paid
openai/gpt-3.5-turbo-instruct | paid
openai/gpt-4 | paid
openai/gpt-4-0314 | paid
openai/gpt-4-32k | paid
openai/gpt-4-32k-0314 | paid
openai/gpt-4-turbo | paid
openai/gpt-4-1106-preview | paid
openai/gpt-4-turbo-preview | paid
openai/gpt-4o | paid
openai/gpt-4o-2024-05-13 | paid
openai/gpt-4o-2024-08-06 | paid
openai/gpt-4o-2024-11-20 | paid
openai/gpt-4o:extended | paid
openai/gpt-4o-mini | paid
openai/gpt-4o-mini-2024-07-18 | paid
openai/o1 | paid
openai/o1-mini | paid
openai/o1-mini-2024-09-12 | paid
openai/o1-preview | paid
openai/o1-preview-2024-09-12 | paid
openai/o3-mini | paid
openai/o3-mini-high | paid
openchat/openchat-7b | paid
openchat/openchat-7b:free | free
perplexity/llama-3.1-sonar-huge-128k-online | paid
perplexity/llama-3.1-sonar-large-128k-chat | paid
perplexity/llama-3.1-sonar-large-128k-online | paid
perplexity/llama-3.1-sonar-small-128k-chat | paid
perplexity/llama-3.1-sonar-small-128k-online | paid
perplexity/sonar | paid
perplexity/sonar-reasoning | paid
pygmalionai/mythalion-13b | paid
qwen/qwen-2-72b-instruct | paid
qwen/qvq-72b-preview | paid
qwen/qwen-vl-plus:free | free
qwen/qwen-max | paid
qwen/qwen-plus | paid
qwen/qwen-turbo | paid
qwen/qwen2.5-vl-72b-instruct:free | free
qwen/qwq-32b-preview | paid
qwen/qwen-2-vl-72b-instruct | paid
qwen/qwen-2-vl-7b-instruct | paid
qwen/qwen-2.5-72b-instruct | paid
qwen/qwen-2.5-7b-instruct | paid
qwen/qwen-2.5-coder-32b-instruct | paid
undi95/remm-slerp-l2-13b | paid
thedrummer/rocinante-12b | paid
sophosympatheia/rogue-rose-103b-v0.2:free | free
sao10k/l3-lunaris-8b | paid
sao10k/l3-euryale-70b | paid
sao10k/l3.1-70b-hanami-x1 | paid
sao10k/l3.1-euryale-70b | paid
sao10k/l3.3-euryale-70b | paid
raifle/sorcererlm-8x22b | paid
undi95/toppy-m-7b | paid
undi95/toppy-m-7b:free | free
thedrummer/unslopnemo-12b | paid
microsoft/wizardlm-2-7b | paid
microsoft/wizardlm-2-8x22b | paid
x-ai/grok-2-1212 | paid
x-ai/grok-2-vision-1212 | paid
x-ai/grok-beta | paid
x-ai/grok-vision-beta | paid
xwin-lm/xwin-lm-70b | paid

 OpenAI models:

babbage-002
chatgpt-4o-latest
dall-e-2
dall-e-3
davinci-002
gpt-3.5-turbo
gpt-3.5-turbo-0125
gpt-3.5-turbo-1106
gpt-3.5-turbo-16k
gpt-3.5-turbo-instruct
gpt-3.5-turbo-instruct-0914
gpt-4
gpt-4-0125-preview
gpt-4-0613
gpt-4-1106-preview
gpt-4-turbo
gpt-4-turbo-2024-04-09
gpt-4-turbo-preview
gpt-4o
gpt-4o-2024-05-13
gpt-4o-2024-08-06
gpt-4o-2024-11-20
gpt-4o-audio-preview
gpt-4o-audio-preview-2024-10-01
gpt-4o-audio-preview-2024-12-17
gpt-4o-mini
gpt-4o-mini-2024-07-18
gpt-4o-mini-audio-preview
gpt-4o-mini-audio-preview-2024-12-17
gpt-4o-mini-realtime-preview
gpt-4o-mini-realtime-preview-2024-12-17
gpt-4o-realtime-preview
gpt-4o-realtime-preview-2024-10-01
gpt-4o-realtime-preview-2024-12-17
o1-mini
o1-mini-2024-09-12
o1-preview
o1-preview-2024-09-12
omni-moderation-2024-09-26
omni-moderation-latest
text-embedding-3-large
text-embedding-3-small
text-embedding-ada-002
tts-1
tts-1-1106
tts-1-hd
tts-1-hd-1106
whisper-1
-----

 Deepseek models:

deepseek-chat
deepseek-reasoner
-----
*/
model?: string | undefined;
/** Router to use: openai, openrouter or deepseek */
router?: string;
/** Chat completion mode:
completion, tools, assistant.
completion: no support for tools, please use --dst parameter to save the output.
tools: allows for tools to be used, eg 'save to ./output.md'. Not all models support this mode.
assistant: : allows documents (PDF, DOCX, ...) to be added but dont support tools. Use --dst to save the output. Supported files :
custom: custom mode
*/
mode?: "completion" | "tools" | "assistant" | "custom";
/** Logging level for the application */
logLevel?: number;
/** Path to profile for variables. Supports environment variables. */
profile?: string | undefined;
/** Base URL for the API, set via --router or directly */
baseURL?: string | undefined;
/** Path to JSON configuration file (API keys). Supports environment variables. */
config?: string | undefined;
/** Create a script */
dump?: string | undefined;
/** Path to preferences file, eg: location, your email address, gender, etc. Supports environment variables. */
preferences?: string;
/** Logging directory */
logs?: string;
/** Environment (in profile) */
env?: string;
variables?: {
[x: string]: string;
};
/** List of filters to apply to the output.
Used only in completion mode and a given output file specified with --dst.
It unwraps by default any code or data in Markdown.
Choices:
JSON,JSONUnescape,JSONPretty,AlphaSort,code,JSONParse,trim
*/
filters?: (string | ("JSON" | "JSONUnescape" | "JSONPretty" | "AlphaSort" | "code" | "JSONParse" | "trim")[] | string[] | ((...args_0: unknown[]) => unknown)[]);
/** Dry run - only write out parameters without making API calls */
dry?: (boolean | string);
}

View File

@ -0,0 +1,75 @@
import { z } from 'zod';
/** Schema for listing files in a directory */
export const FileListingOptionsSchema = z.object({
directory: z.string().describe('Directory path to list files from'),
pattern: z.string().optional().describe('Glob pattern for filtering files')
}).describe('IFileListingOptions')
/** Schema for file removal operations */
export const FileRemovalOptionsSchema = z.object({
path: z.string().describe('Path of the file to remove')
}).describe('IFileRemovalOptions');
/** Schema for git commit operations */
export const GitCommitSchema = z.object({
files: z.array(z.string()).describe('Files to commit'),
message: z.string().describe('Commit message')
}).describe('IGitCommitOptions');
/** Schema for git revert operations */
export const GitRevertSchema = z.object({
files: z.array(z.string()).describe('Files to revert')
}).describe('IGitRevertOptions');
/** Schema for git version switch operations */
export const GitSwitchVersionSchema = z.object({
branch: z.string().describe('Branch name to switch to'),
remote: z.string().default('origin').describe('Remote name')
}).describe('IGitSwitchVersionOptions');
/** Schema for git raw file retrieval */
export const GitRawFileSchema = z.object({
url: z.string().optional().describe('Full GitHub raw URL'),
repo: z.string().optional().describe('Repository in format owner/repo'),
path: z.string().optional().describe('File path within repository')
}).refine(
data => (data.url) || (data.repo && data.path),
'Either url or both repo and path must be provided'
).describe('IGitRawFileOptions');
/** Schema for npm run command */
export const NpmRunSchema = z.object({
command: z.string().describe('Command to run (e.g. install, test, etc)'),
args: z.array(z.string()).optional().describe('Additional arguments for the command')
}).describe('INpmRunOptions');
/** Schema for terminal command execution */
export const TerminalCommandSchema = z.object({
command: z.string().describe('Command to execute'),
args: z.array(z.string()).optional().describe('Command arguments'),
cwd: z.string().optional().describe('Working directory for command execution'),
background: z.boolean().optional().describe('Run command in background (non-blocking)'),
window: z.boolean().optional().describe('Open command in new terminal window'),
detached: z.boolean().optional().describe('Run process detached from parent')
}).describe('ITerminalCommandOptions');
/** Schema for tool invocation parameters */
export const InvokeToolSchema = z.object({
tools: z.string().describe('Tool category to use (fs, npm, git, terminal)'),
function: z.string().describe('Function name to invoke'),
target: z.string().default(process.cwd()).describe('Target directory'),
params: z.string().optional().describe('JSON string of parameters'),
output: z.string().optional().describe('Path to write the output to'),
env_key: z.string().optional().describe('Environment configuration key')
}).describe('IInvokeToolOptions');
/** Schema for list command options */
export const ListCommandSchema = z.object({
output: z.string().default("./llm-tools.json").describe('Output file path for tools list')
}).describe('IListCommandOptions');
/** Schema for tool listing options */
export const ToolListingOptionsSchema = z.object({
output: z.string().default('./llm-tools.json').describe('Path to write the output to')
}).describe('IToolListingOptions');

View File

@ -0,0 +1,353 @@
export interface IKBotOptions {
/** Target directory */
path?: string;
/** The prompt. Supports file paths and environment variables. */
prompt?: string;
/** Optional output path for modified files (Tool mode only) */
output?: string | undefined;
/** Optional destination path for the result, will substitute ${MODEL_NAME} and ${ROUTER} in the path. Optional, used for "completion" mode */
dst?: string | undefined;
/** Iterate over items, supported: GLOB | Path to JSON File | array of strings (comma separated). To test different models, use --each="gpt-3.5-turbo,gpt-4o", the actual string will exposed as variable `ITEM`, eg: --dst="${ITEM}-output.md" */
each?: string | undefined;
/** Disable tools categories, eg: --disable=fs,git,interact,terminal,search,web,email,user */
disable?: string[];
/** List of specific tools to disable */
disableTools?: string[];
/** List of tools to use. Can be built-in tool names or paths to custom tool files. Default: fs,git,interact,terminal,search,web,email,user */
tools?: (string[] | string);
/** Comma separated glob patterns or paths, eg --include=src/*.tsx,src/*.ts --include=package.json */
include?: string[] | undefined;
/** Explicit API key to use */
api_key?: string | undefined;
/** AI model to use for processing. Available models:

 OpenRouter models:

01-ai/yi-large | paid
aetherwiing/mn-starcannon-12b | paid
ai21/jamba-1-5-large | paid
ai21/jamba-1-5-mini | paid
ai21/jamba-instruct | paid
aion-labs/aion-1.0 | paid
aion-labs/aion-1.0-mini | paid
aion-labs/aion-rp-llama-3.1-8b | paid
jondurbin/airoboros-l2-70b | paid
amazon/nova-lite-v1 | paid
amazon/nova-micro-v1 | paid
amazon/nova-pro-v1 | paid
anthropic/claude-3-haiku | paid
anthropic/claude-3-haiku:beta | paid
anthropic/claude-3-opus | paid
anthropic/claude-3-opus:beta | paid
anthropic/claude-3-sonnet | paid
anthropic/claude-3-sonnet:beta | paid
anthropic/claude-3.5-haiku | paid
anthropic/claude-3.5-haiku-20241022 | paid
anthropic/claude-3.5-haiku-20241022:beta | paid
anthropic/claude-3.5-haiku:beta | paid
anthropic/claude-3.5-sonnet | paid
anthropic/claude-3.5-sonnet-20240620 | paid
anthropic/claude-3.5-sonnet-20240620:beta | paid
anthropic/claude-3.5-sonnet:beta | paid
anthropic/claude-2 | paid
anthropic/claude-2:beta | paid
anthropic/claude-2.0 | paid
anthropic/claude-2.0:beta | paid
anthropic/claude-2.1 | paid
anthropic/claude-2.1:beta | paid
openrouter/auto | paid
cohere/command | paid
cohere/command-r | paid
cohere/command-r-03-2024 | paid
cohere/command-r-08-2024 | paid
cohere/command-r-plus | paid
cohere/command-r-plus-04-2024 | paid
cohere/command-r-plus-08-2024 | paid
cohere/command-r7b-12-2024 | paid
databricks/dbrx-instruct | paid
deepseek/deepseek-chat-v2.5 | paid
deepseek/deepseek-chat | paid
deepseek/deepseek-chat:free | free
deepseek/deepseek-r1 | paid
deepseek/deepseek-r1:free | free
deepseek/deepseek-r1-distill-llama-70b | paid
deepseek/deepseek-r1-distill-llama-70b:free | free
deepseek/deepseek-r1-distill-llama-8b | paid
deepseek/deepseek-r1-distill-qwen-1.5b | paid
deepseek/deepseek-r1-distill-qwen-14b | paid
deepseek/deepseek-r1-distill-qwen-32b | paid
cognitivecomputations/dolphin-mixtral-8x7b | paid
cognitivecomputations/dolphin-mixtral-8x22b | paid
eva-unit-01/eva-llama-3.33-70b | paid
eva-unit-01/eva-qwen-2.5-32b | paid
eva-unit-01/eva-qwen-2.5-72b | paid
sao10k/fimbulvetr-11b-v2 | paid
alpindale/goliath-120b | paid
google/gemini-2.0-flash-thinking-exp-1219:free | free
google/gemini-2.0-flash-thinking-exp:free | free
google/gemini-exp-1206:free | free
google/gemini-flash-1.5 | paid
google/gemini-flash-1.5-8b | paid
google/gemini-flash-1.5-8b-exp | paid
google/gemini-2.0-flash-001 | paid
google/gemini-2.0-flash-exp:free | free
google/gemini-2.0-flash-lite-preview-02-05:free | free
google/gemini-pro | paid
google/gemini-pro-1.5 | paid
google/gemini-2.0-pro-exp-02-05:free | free
google/gemini-pro-vision | paid
google/gemma-2-27b-it | paid
google/gemma-2-9b-it | paid
google/gemma-2-9b-it:free | free
google/gemma-7b-it | paid
google/learnlm-1.5-pro-experimental:free | free
google/palm-2-chat-bison | paid
google/palm-2-chat-bison-32k | paid
google/palm-2-codechat-bison | paid
google/palm-2-codechat-bison-32k | paid
huggingfaceh4/zephyr-7b-beta:free | free
infermatic/mn-inferor-12b | paid
inflection/inflection-3-pi | paid
inflection/inflection-3-productivity | paid
liquid/lfm-3b | paid
liquid/lfm-40b | paid
liquid/lfm-7b | paid
allenai/llama-3.1-tulu-3-405b | paid
alpindale/magnum-72b | paid
anthracite-org/magnum-v2-72b | paid
anthracite-org/magnum-v4-72b | paid
mancer/weaver | paid
meta-llama/llama-2-13b-chat | paid
meta-llama/llama-2-70b-chat | paid
meta-llama/llama-3-70b-instruct | paid
meta-llama/llama-3-8b-instruct | paid
meta-llama/llama-3-8b-instruct:free | free
meta-llama/llama-3.1-405b | paid
meta-llama/llama-3.1-405b-instruct | paid
meta-llama/llama-3.1-70b-instruct | paid
meta-llama/llama-3.1-8b-instruct | paid
meta-llama/llama-3.2-11b-vision-instruct | paid
meta-llama/llama-3.2-11b-vision-instruct:free | free
meta-llama/llama-3.2-1b-instruct | paid
meta-llama/llama-3.2-3b-instruct | paid
meta-llama/llama-3.2-90b-vision-instruct | paid
meta-llama/llama-3.3-70b-instruct | paid
meta-llama/llama-3.3-70b-instruct:free | free
meta-llama/llama-guard-2-8b | paid
microsoft/phi-4 | paid
microsoft/phi-3-medium-128k-instruct | paid
microsoft/phi-3-medium-128k-instruct:free | free
microsoft/phi-3-mini-128k-instruct | paid
microsoft/phi-3-mini-128k-instruct:free | free
microsoft/phi-3.5-mini-128k-instruct | paid
sophosympatheia/midnight-rose-70b | paid
minimax/minimax-01 | paid
mistralai/mistral-large | paid
mistralai/mistral-large-2407 | paid
mistralai/mistral-large-2411 | paid
mistralai/mistral-medium | paid
nothingiisreal/mn-celeste-12b | paid
mistralai/mistral-small | paid
mistralai/mistral-tiny | paid
mistralai/codestral-2501 | paid
mistralai/codestral-mamba | paid
mistralai/ministral-3b | paid
mistralai/ministral-8b | paid
mistralai/mistral-7b-instruct | paid
mistralai/mistral-7b-instruct:free | free
mistralai/mistral-7b-instruct-v0.1 | paid
mistralai/mistral-7b-instruct-v0.3 | paid
mistralai/mistral-nemo | paid
mistralai/mistral-small-24b-instruct-2501 | paid
mistralai/mixtral-8x22b-instruct | paid
mistralai/mixtral-8x7b | paid
mistralai/mixtral-8x7b-instruct | paid
mistralai/pixtral-12b | paid
mistralai/pixtral-large-2411 | paid
gryphe/mythomax-l2-13b | paid
gryphe/mythomax-l2-13b:free | free
neversleep/llama-3-lumimaid-70b | paid
neversleep/llama-3-lumimaid-8b | paid
neversleep/llama-3-lumimaid-8b:extended | paid
neversleep/llama-3.1-lumimaid-70b | paid
neversleep/llama-3.1-lumimaid-8b | paid
neversleep/noromaid-20b | paid
nousresearch/nous-hermes-llama2-13b | paid
nousresearch/nous-hermes-2-mixtral-8x7b-dpo | paid
nousresearch/hermes-3-llama-3.1-405b | paid
nousresearch/hermes-3-llama-3.1-70b | paid
nousresearch/hermes-2-pro-llama-3-8b | paid
nvidia/llama-3.1-nemotron-70b-instruct | paid
nvidia/llama-3.1-nemotron-70b-instruct:free | free
openai/chatgpt-4o-latest | paid
openai/gpt-3.5-turbo | paid
openai/gpt-3.5-turbo-0613 | paid
openai/gpt-3.5-turbo-16k | paid
openai/gpt-3.5-turbo-0125 | paid
openai/gpt-3.5-turbo-1106 | paid
openai/gpt-3.5-turbo-instruct | paid
openai/gpt-4 | paid
openai/gpt-4-0314 | paid
openai/gpt-4-32k | paid
openai/gpt-4-32k-0314 | paid
openai/gpt-4-turbo | paid
openai/gpt-4-1106-preview | paid
openai/gpt-4-turbo-preview | paid
openai/gpt-4o | paid
openai/gpt-4o-2024-05-13 | paid
openai/gpt-4o-2024-08-06 | paid
openai/gpt-4o-2024-11-20 | paid
openai/gpt-4o:extended | paid
openai/gpt-4o-mini | paid
openai/gpt-4o-mini-2024-07-18 | paid
openai/o1 | paid
openai/o1-mini | paid
openai/o1-mini-2024-09-12 | paid
openai/o1-preview | paid
openai/o1-preview-2024-09-12 | paid
openai/o3-mini | paid
openchat/openchat-7b | paid
openchat/openchat-7b:free | free
teknium/openhermes-2.5-mistral-7b | paid
perplexity/llama-3.1-sonar-huge-128k-online | paid
perplexity/llama-3.1-sonar-large-128k-chat | paid
perplexity/llama-3.1-sonar-large-128k-online | paid
perplexity/llama-3.1-sonar-small-128k-chat | paid
perplexity/llama-3.1-sonar-small-128k-online | paid
perplexity/sonar | paid
perplexity/sonar-reasoning | paid
pygmalionai/mythalion-13b | paid
qwen/qwen-2-72b-instruct | paid
qwen/qwen-2-7b-instruct | paid
qwen/qwen-2-7b-instruct:free | free
qwen/qvq-72b-preview | paid
qwen/qwen-vl-plus:free | free
qwen/qwen-max | paid
qwen/qwen-plus | paid
qwen/qwen-turbo | paid
qwen/qwen2.5-vl-72b-instruct:free | free
qwen/qwq-32b-preview | paid
qwen/qwen-2-vl-72b-instruct | paid
qwen/qwen-2-vl-7b-instruct | paid
qwen/qwen-2.5-72b-instruct | paid
qwen/qwen-2.5-7b-instruct | paid
qwen/qwen-2.5-coder-32b-instruct | paid
undi95/remm-slerp-l2-13b | paid
thedrummer/rocinante-12b | paid
sophosympatheia/rogue-rose-103b-v0.2:free | free
sao10k/l3-lunaris-8b | paid
sao10k/l3-euryale-70b | paid
sao10k/l3.1-70b-hanami-x1 | paid
sao10k/l3.1-euryale-70b | paid
sao10k/l3.3-euryale-70b | paid
raifle/sorcererlm-8x22b | paid
undi95/toppy-m-7b | paid
undi95/toppy-m-7b:free | free
thedrummer/unslopnemo-12b | paid
microsoft/wizardlm-2-7b | paid
microsoft/wizardlm-2-8x22b | paid
x-ai/grok-2-1212 | paid
x-ai/grok-2-vision-1212 | paid
x-ai/grok-beta | paid
x-ai/grok-vision-beta | paid
xwin-lm/xwin-lm-70b | paid

 OpenAI models:

babbage-002
chatgpt-4o-latest
dall-e-2
dall-e-3
davinci-002
gpt-3.5-turbo
gpt-3.5-turbo-0125
gpt-3.5-turbo-1106
gpt-3.5-turbo-16k
gpt-3.5-turbo-instruct
gpt-3.5-turbo-instruct-0914
gpt-4
gpt-4-0125-preview
gpt-4-0613
gpt-4-1106-preview
gpt-4-turbo
gpt-4-turbo-2024-04-09
gpt-4-turbo-preview
gpt-4o
gpt-4o-2024-05-13
gpt-4o-2024-08-06
gpt-4o-2024-11-20
gpt-4o-audio-preview
gpt-4o-audio-preview-2024-10-01
gpt-4o-audio-preview-2024-12-17
gpt-4o-mini
gpt-4o-mini-2024-07-18
gpt-4o-mini-audio-preview
gpt-4o-mini-audio-preview-2024-12-17
gpt-4o-mini-realtime-preview
gpt-4o-mini-realtime-preview-2024-12-17
gpt-4o-realtime-preview
gpt-4o-realtime-preview-2024-10-01
gpt-4o-realtime-preview-2024-12-17
o1-mini
o1-mini-2024-09-12
o1-preview
o1-preview-2024-09-12
omni-moderation-2024-09-26
omni-moderation-latest
text-embedding-3-large
text-embedding-3-small
text-embedding-ada-002
tts-1
tts-1-1106
tts-1-hd
tts-1-hd-1106
whisper-1
-----

 Deepseek models:

deepseek-chat
deepseek-reasoner
-----
*/
model?: string | undefined;
/** Router to use: openai, openrouter or deepseek */
router?: string;
/** Chat completion mode:
completion, tools, assistant.
completion: no support for tools, please use --dst parameter to save the output.
tools: allows for tools to be used, eg 'save to ./output.md'. Not all models support this mode.
assistant: : allows documents (PDF, DOCX, ...) to be added but dont support tools. Use --dst to save the output. Supported files :
custom: custom mode
*/
mode?: "completion" | "tools" | "assistant" | "custom";
/** Logging level for the application */
logLevel?: number;
/** Path to profile for variables. Supports environment variables. */
profile?: string | undefined;
/** Base URL for the API, set via --router or directly */
baseURL?: string | undefined;
/** Path to JSON configuration file (API keys). Supports environment variables. */
config?: string | undefined;
/** Create a script */
dump?: string | undefined;
/** Path to preferences file, eg: location, your email address, gender, etc. Supports environment variables. */
preferences?: string;
/** Logging directory */
logs?: string;
/** Environment (in profile) */
env?: string;
variables?: {
[x: string]: string;
};
/** List of filters to apply to the output.
Used only in completion mode and a given output file specified with --dst.
It unwraps by default any code or data in Markdown.
Choices:
JSON,JSONUnescape,JSONPretty,AlphaSort,code,JSONParse,trim
*/
filters?: (string | ("JSON" | "JSONUnescape" | "JSONPretty" | "AlphaSort" | "code" | "JSONParse" | "trim")[] | string[] | ((...args_0: unknown[]) => unknown)[]);
/** Dry run - only write out parameters without making API calls */
dry?: (boolean | string);
}

View File

@ -0,0 +1,5 @@
{
"prompt_tokens": 14973,
"completion_tokens": 161,
"total_tokens": 15134
}

View File

@ -0,0 +1,27 @@
I've created a JSON file that outlines the tool calls needed for setting up a TypeScript boilerplate project. The file follows the sequence you specified and includes:
1. `modify_project_files` - Creates all necessary configuration and source files:
- package.json
- tsconfig.json
- .eslintrc
- src/index.ts
- README.md
2. `install_dependency` - Installs essential TypeScript dependencies:
- typescript
- @types/node
- eslint
- @typescript-eslint/parser
- @typescript-eslint/eslint-plugin
3. `execute_command` - Runs the TypeScript initialization command
4. Git setup:
- `init_repository` - Initializes the git repository
- `commit_files_git` - Commits all created files
Each tool call includes comments explaining its purpose, and the file contents are truncated with {...} for brevity. The JSON structure allows for easy parsing and execution of these steps in sequence.
Would you like me to expand on any particular aspect of this setup or modify any of the tool calls?

View File

@ -0,0 +1,3 @@
kbot "to create a Typescript boilerplate, with documentation, what tools you'd use? Write as json : tool_name(params), to ./tests/tool-calls-model.json (add field with comments, truncate file contents)" \
--glob=tools.json.md \
--dst=tests/test-tool-calls.md \

View File

@ -0,0 +1,75 @@
{
"comment": "Tool calls for setting up a TypeScript boilerplate project",
"tool_calls": [
{
"tool": "modify_project_files",
"params": {
"files": [
{
"path": "package.json",
"content": "{...}",
"comment": "Basic package.json with TypeScript configuration"
},
{
"path": "tsconfig.json",
"content": "{...}",
"comment": "TypeScript compiler configuration"
},
{
"path": ".eslintrc",
"content": "{`...}",
"comment": "ESLint configuration for code linting"
},
{
"path": "src/index.ts",
"content": "console.log('Hello TypeScript!');",
"comment": "Entry point file"
},
{
"path": "README.md",
"content": "# TypeScript Boilerplate\n...",
"comment": "Project documentation"
}
]
}
},
{
"tool": "install_dependency",
"params": {
"dependencies": [
"typescript",
"@types/node",
"eslint",
"@typescript-eslint/parser",
"@typescript-eslint/eslint-plugin"
]
},
"comment": "Install essential TypeScript dependencies"
},
{
"tool": "execute_command",
"params": {
"command": "npx tsc --init"
},
"comment": "Initialize TypeScript configuration"
},
{
"tool": "init_repository",
"params": {},
"comment": "Initialize git repository"
},
{
"tool": "commit_files_git",
"params": {
"files": [
"package.json",
"tsconfig.json",
".eslintrc",
"src/index.ts",
"README.md"
],
"message": "Initial commit: TypeScript boilerplate setup"
}
}
]
}

View File

@ -0,0 +1,31 @@
Email
- **send_email**(recipient: string|[string], subject?, body?)
Search
- **google**(query: string)
- **serpapi**(query: string, engine?)
Interact
- **send_email**(recipient: string|[string], subject?, body?)
FS
- **list_files**(directory, pattern?)
- **read_files**(directory, pattern?)
- **remove_file**(path)
- **rename_file**(src, dst)
- **modify_project_files**([{path, content}...])
- **create_file**({path, content})
- **file_exists**({path})
- **read_file**({path})
NPM
- **build_project**()
- **run_npm**(command, args?)
- **install_dependency**([dependencies...])
Git
- **init_repository**()
- **commit_files_git**([files...], message)
Terminal
- **execute_command**(command, args?, cwd?, background?, window?, detached?)

View File

@ -0,0 +1,423 @@
{
"email": [
{
"name": "send_email",
"description": "Sends an email",
"category": "email",
"parameters": {
"type": "object",
"properties": {
"recipient": {
"type": [
"string",
"array"
],
"items": {
"type": "string"
},
"description": "The email address of the recipient(s). Can be a single email or an array of emails. For \"me\", use the default email address"
},
"subject": {
"type": "string",
"description": "the subject",
"optional": true
},
"body": {
"type": "string",
"description": "Markdown formatted body of the email",
"optional": true
}
},
"required": [
"url"
]
}
}
],
"search": [
{
"name": "google",
"description": "Searches Google for the given query",
"category": "search",
"parameters": {
"type": "object",
"properties": {
"query": {
"type": "string"
}
},
"required": [
"query"
]
}
},
{
"name": "serpapi",
"description": "Searches Serpapi (finds locations (engine:google_local), places on the map (engine:google_maps) ) for the given query",
"category": "search",
"parameters": {
"type": "object",
"properties": {
"query": {
"type": "string"
},
"engine": {
"type": "string",
"default": "google"
}
},
"required": [
"query"
]
}
}
],
"interact": [
{
"name": "send_email",
"description": "Sends an email",
"category": "email",
"parameters": {
"type": "object",
"properties": {
"recipient": {
"type": [
"string",
"array"
],
"items": {
"type": "string"
},
"description": "The email address of the recipient(s). Can be a single email or an array of emails. For \"me\", use the default email address"
},
"subject": {
"type": "string",
"description": "the subject",
"optional": true
},
"body": {
"type": "string",
"description": "Markdown formatted body of the email",
"optional": true
}
},
"required": [
"url"
]
}
}
],
"fs": [
{
"name": "list_files",
"description": "List all files in a directory",
"category": "fs",
"parameters": {
"type": "object",
"properties": {
"directory": {
"type": "string"
},
"pattern": {
"type": "string",
"optional": true
}
},
"required": [
"directory"
]
}
},
{
"name": "read_files",
"description": "Reads files in a directory with a given pattern",
"category": "fs",
"parameters": {
"type": "object",
"properties": {
"directory": {
"type": "string"
},
"pattern": {
"type": "string",
"optional": true
}
},
"required": [
"directory"
]
}
},
{
"name": "remove_file",
"description": "Remove a file at given path",
"category": "fs",
"parameters": {
"type": "object",
"properties": {
"path": {
"type": "string"
}
},
"required": [
"path"
]
}
},
{
"name": "rename_file",
"description": "Rename or move a file or directory",
"category": "fs",
"parameters": {
"type": "object",
"properties": {
"src": {
"type": "string"
},
"dst": {
"type": "string"
}
},
"required": [
"path"
]
}
},
{
"name": "modify_project_files",
"description": "Modify existing project files",
"category": "fs",
"parameters": {
"type": "object",
"properties": {
"files": {
"type": "array",
"items": {
"type": "object",
"properties": {
"path": {
"type": "string"
},
"content": {
"type": "string",
"description": "base64 encoded string"
}
},
"required": [
"path",
"content"
]
}
}
},
"required": [
"files"
]
}
},
{
"name": "create_file",
"description": "Creates a file, given a path and content",
"category": "fs",
"parameters": {
"type": "object",
"properties": {
"file": {
"type": "object",
"properties": {
"path": {
"type": "string"
},
"content": {
"type": "string",
"description": "base64 encoded string"
}
}
}
},
"required": [
"file"
]
}
},
{
"name": "file_exists",
"description": "check if a file or folder exists",
"category": "fs",
"parameters": {
"type": "object",
"properties": {
"file": {
"type": "object",
"properties": {
"path": {
"type": "string"
}
}
}
},
"required": [
"file"
]
}
},
{
"name": "read_file",
"description": "read a file, at given a path",
"category": "fs",
"parameters": {
"type": "object",
"properties": {
"file": {
"type": "object",
"properties": {
"path": {
"type": "string"
}
}
}
},
"required": [
"file"
]
}
}
],
"npm": [
{
"name": "build_project",
"description": "Build project using pnpm build command",
"category": "npm",
"parameters": {
"type": "object",
"properties": {},
"required": []
}
},
{
"name": "run_npm",
"description": "Run an npm/pnpm command",
"category": "npm",
"parameters": {
"type": "object",
"properties": {
"command": {
"type": "string",
"description": "Command to run (e.g. install, test, etc)"
},
"args": {
"type": "array",
"items": {
"type": "string"
},
"description": "Additional arguments for the command",
"optional": true
}
},
"required": [
"command"
]
}
},
{
"name": "install_dependency",
"description": "Install a dependency using npm",
"category": "npm",
"parameters": {
"type": "object",
"properties": {
"dependencies": {
"type": "array",
"items": {
"type": "string"
}
}
},
"required": [
"dependencies"
]
}
}
],
"git": [
{
"name": "init_repository",
"description": "Initialize a new git repository",
"category": "git",
"parameters": {
"type": "object",
"properties": {},
"required": []
}
},
{
"name": "commit_files_git",
"description": "Commit files using git",
"category": "git",
"parameters": {
"type": "object",
"properties": {
"files": {
"type": "array",
"items": {
"type": "string"
}
},
"message": {
"type": "string"
}
},
"required": [
"files"
]
}
}
],
"terminal": [
{
"name": "execute_command",
"description": "Execute a terminal command and capture output",
"category": "terminal",
"parameters": {
"type": "object",
"properties": {
"command": {
"type": "string",
"description": "Command to execute"
},
"args": {
"type": "array",
"items": {
"type": "string"
},
"description": "Command arguments",
"optional": true
},
"cwd": {
"type": "string",
"description": "Working directory for command execution",
"optional": true
},
"background": {
"type": "boolean",
"description": "Run command in background (non-blocking)",
"optional": true
},
"window": {
"type": "boolean",
"description": "Open command in new terminal window",
"optional": true
},
"detached": {
"type": "boolean",
"description": "Run process detached from parent",
"optional": true
}
},
"required": [
"command"
]
}
}
]
}

View File

@ -0,0 +1,39 @@
## email
- send_email(?recipient, ?subject, ?body): Sends an email
## search
- google(query): Searches Google for the given query
- serpapi(query, ?engine): Searches Serpapi (finds locations (engine:google_local), places on the map (engine:google_maps) ) for the given query
## interact
- send_email(?recipient, ?subject, ?body): Sends an email
## fs
- list_files(directory, ?pattern): List all files in a directory
- read_files(directory, ?pattern): Reads files in a directory with a given pattern
- remove_file(path): Remove a file at given path
- rename_file(?src, ?dst): Rename or move a file or directory
- modify_project_files(files): Modify existing project files
- create_file(file): Creates a file, given a path and content
- file_exists(file): check if a file or folder exists
- read_file(file): read a file, at given a path
## npm
- build_project(): Build project using pnpm build command
- run_npm(command, ?args): Run an npm/pnpm command
- install_dependency(dependencies): Install a dependency using npm
## git
- init_repository(): Initialize a new git repository
- commit_files_git(files, ?message): Commit files using git
## terminal
- execute_command(command, ?args, ?cwd, ?background, ?window, ?detached): Execute a terminal command and capture output
Preferred order:
1. create/modify files, using modify_project_files for multiple files at once
2. install dependencies
3. excecute tools
4. create documentation
5. init git, add to git

View File

@ -0,0 +1,30 @@
# Tools Reference
## Email
send_email(recipient, ?subject, ?body): Send markdown emails
## Search
google(query): Search Google
serpapi(query, ?engine): Search for locations/places
## Filesystem
list_files(directory, ?pattern): List files in dir
read_files(directory, ?pattern): Read files in dir
remove_file(path): Delete file
rename_file(src, dst): Move/rename file
modify_project_files(files[]): Modify existing files
create_file(file): Create new file
file_exists(path): Check if file exists
read_file(path): Read file contents
## NPM
build_project(): Run pnpm build
run_npm(command, ?args[]): Run npm/pnpm commands
install_dependency(dependencies[]): Install packages
## Git
init_repository(): Initialize git repo
commit_files_git(files[], ?message): Commit files
## Terminal
execute_command(command, ?args[], ?cwd, ?background, ?window, ?detached): Run shell commands

View File

@ -0,0 +1,24 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "commonjs",
"outDir": "./",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"allowJs": true,
"noImplicitAny": false,
"strictNullChecks": false,
"forceConsistentCasingInFileNames": true
},
"include": [
"src/**/*"
],
"exclude": [
"node_modules"
],
"files": [
"src/index.ts"
]
}

View File

@ -0,0 +1,14 @@
node_modules
npm-debug.log
Dockefile
.dockerignore
.git
.gitignore
README.md
.env
.env.*
*.log
coverage
.kbot
dist
build

View File

@ -0,0 +1 @@
{}

33
packages/kbot/.gitignore vendored Normal file
View File

@ -0,0 +1,33 @@
# Deno
.deno
deno.lock
deno.json._decorators
# Dependencies
node_modules/
# Build output
build/
out/
# Environment files
.env*
!src/.env/
!src/.env/*md
# Generated files
.dts
types/
.D_Store
.vscode/!settings.json
# Logs
*.Log
*.Log.*
docs-internal
systems/code-server-defaults
systems/workspace/kbot-docs
systems/.code-server/code-server-ipc.sock
systems/.code-server/User/workspaceStorage/
systems/code-server-defaults
systems/.code-server

49
packages/kbot/.npmignore Normal file
View File

@ -0,0 +1,49 @@
# Ignore node_modules directory
node_modules/
# Ignore log files
*.log
# Ignore temporary files
*.tmp
# Ignore coverage reports
coverage/
.kbot
docs
docs_
.env
report
.vscode
config
systems
tools.json
commit.json
docker.sh
pnpm-lock.yaml
package-lock.json
scripts
todos.md
tests
tmp
dist/node_modules
dist/data
dist/.kbot
dist/package-lock.json
# Logs
*.Log
*.Log.*
docs
docs-internal
systems/code-server-defaults
kbot-extensions
systems/workspace/kbot-docs
systems/.code-server/code-server-ipc.sock
systems/.code-server/User/workspaceStorage/
systems/code-server-defaults
systems/.code-server
kbot-tests
kbot-extensions

2
packages/kbot/.npmrc Normal file
View File

@ -0,0 +1,2 @@
registry=https://registry.npmjs.org/
save-exact=true

51
packages/kbot/.travis.yml Normal file
View File

@ -0,0 +1,51 @@
# Travis CI Configuration
language: node_js
# Specify the operating systems
os:
- linux
- osx
- windows
# Specify the Node.js versions to test on
node_js:
- "node" # latest stable node
# Specific operating system and Node.js version combinations
matrix:
include:
# Linux (Ubuntu)
- os: linux
dist: ubuntu-latest
node_js: "node"
# macOS
- os: osx
node_js: "node"
# Windows
- os: windows
node_js: "node"
# Cache node_modules between builds
cache:
directories:
- node_modules
# Install dependencies
install:
- npm ci
# Run tests
script:
- npm test
# Before deployment scripts (optional)
before_deploy:
- npm run build
# Notifications
notifications:
email:
on_success: change
on_failure: always

646
packages/kbot/.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,646 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "create",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"create",
"typescript",
"-p=../ai-tests",
"-n=test-simple",
"--app='Command-1 : Distance by car, using Google, output the result at a given path. Create a file at src/constants.ts with the following content: export const GOOGLE_API_KEY'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "modify",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"modify",
"../ai-tests/test-simple",
"--prompt='Add another command: create json schemas for functions in src/lib/*, using ts-json-schema-generator'",
"--output='../ai-tests/test-simple-modified'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "git",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}/tests/typescript",
"args": [
"init repository, using git tool",
"--logLevel=1",
"--router=openai",
"--model=gpt-4o",
"--disable='npm,terminal,user,interact,web,search'",
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "templates:solidworks",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"modify",
"../sw-test/",
"--prompt='create for each *.cs file a dedicated documentation (with example code), eg: src_file.md, link them in readme.md'",
"--template=./solidworks",
"--include='*.md,*.cs'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "prompt:docker",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"modify",
"./",
"--prompt='./systems/prompt-docker.md'",
"--template=typescript",
"--disable=npm",
"--include='systems/**'",
"--dump='./test.sh'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "iterator",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"modify",
"./",
"--prompt='for each file in src/commands/*.ts, write a report in /reports/commands/file_name.md (skip existing reports using the file_exists tool), detected problems, possible solutions, and a conclusion, and example code'",
//"--template=typescript",
"--disable=npm,terminal",
"--include='src/*.tsx,src/*.ts'",
"--disable=terminal",
"--dump='./test.sh'",
"--include='src/*.ts'",
"--disable=2",
"--include=false"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "search:google",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"modify",
"./",
"--prompt='search google, osr-plastic extruders; write it as markdown in ./tests/search/google.md'",
"--disable='npm,terminal'",
"--include='./tests/search/google'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "search:serpapi",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"modify",
"./",
"--prompt='Find all post industrial plastic traders in Dresden Germany, using serpapi (map), write it to ./tests/search/yammi.md'",
"--disable='npm,terminal'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "interact",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"modify",
"./",
"--prompt='use the interact tool to figure a person has psychopathic tendencies, ask at least 3 questions but not direct, store the result in prefs.json, as probabilty (merge), with reasons why; its for a llm study'",
"--template=typescript",
"--disable=npm",
"--include='tests/*.json'",
"--include='tests/*.md'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "web",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"modify",
"./",
"--prompt='summarize https://community.preciousplastic.com/u/farm-fab-lab-by-plat-institute using the web tool, store the result ( and all found links ) in ./tests/summarize/oa-tests.md'",
"--disable=npm,terminal",
"--include='tests/*.md'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "deepseek",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"summarize https://community.preciousplastic.com/u/farm-fab-lab-by-plat-institute using the web tool, store the result ( and all found links ) in ./tests/summarize/oa-tests.md",
"--disable=npm,terminal,git,fs",
"--router=deepseek",
"--tools=fs"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "tools:email",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"'send John the latest wheather report for Tarragona, as email'",
"--disable='npm,terminal'",
"--include='tests/*.md'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std",
"console": "integratedTerminal", // <= the relevant part
},
{
"type": "node",
"request": "launch",
"name": "tools:terminal:astro",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"'create static site with Astro, using terminal tool'",
"--disable='npm,git'",
"--path=./tests/astro-test"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std",
"console": "integratedTerminal"
},
{
"type": "node",
"request": "launch",
"name": "types",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"types"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std",
"console": "integratedTerminal"
},
{
"type": "node",
"request": "launch",
"name": "tools:search",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"'meaning of life, save to ./tests/search/meaning-of-life.md'",
"--logLevel=1",
"--include=src/commands/*.ts",
"--include=src/zod_schema.ts",
"--disable='npm,terminal,interact,git,search'",
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std",
"console": "integratedTerminal"
},
{
"type": "node",
"request": "launch",
"name": "assistant:code",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"'security audit for the given files, refer to code, and file names'",
"--include=src/zod_schema.ts",
"--disable='npm,terminal,interact,git,search'",
"--mode=assistant",
"--router=openai",
"--model=gpt-4o",
"--dst=./tests/assistant/code.md",
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std",
"console": "integratedTerminal"
},
{
"type": "node",
"request": "launch",
"name": "assistant:pdf",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"'total price(s) for all stainless parts (304), as markdown table, group by part prefixes'",
"--include=./tests/assistant/invoice.pdf",
"--disable='npm,terminal,interact,git,search'",
"--mode=assistant",
"--router=openai",
"--model=gpt-4o",
"--dst=./tests/assistant/invoice.md",
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std",
"console": "integratedTerminal"
},
{
"type": "node",
"request": "launch",
"name": "assistant:md",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"'identify all components (and brand), as markdown table, with location, with links, there are PIDs and a Controllino mini'",
"--include2=./docs_/*.md",
"--include=./tests/images/elzm-cab.jpg",
"--disable='npm,terminal,interact,git,search'",
"--router2=openai",
"--model2=gpt-4o",
"--mode=completion",
"--dst=./tests/assistant/elzm.md",
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std",
"console": "integratedTerminal"
},
{
"type": "node",
"request": "launch",
"name": "each:glob",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"summarize the markdown file as json",
"--logLevel=1",
"--var-test=foo",
"--dry",
"--each=${OSR_ROOT}/products/*.json",
"--dst=./tests/each/${SRC_DIR}/${SRC_NAME}-${MODEL}-${ROUTER}.md",
"--disable='npm,terminal,interact,git,search'",
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std",
"console": "integratedTerminal"
},
{
"type": "node",
"request": "launch",
"name": "each:array",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"summarize all markdow files as json",
"--logLevel=1",
"--var-test=foo",
"--dry",
"--mode=completion",
"--filters=code",
"--logs=./tests/each",
"--preferences=none",
"--each=gpt-3.5-turbo,gpt-4o",
"--router=openai",
"--include=tests/iterator/g*.md",
"--dst=./tests/each/${ITEM}.json",
"--disable='npm,terminal,interact,git,search'",
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std",
"console": "integratedTerminal"
},
{
"type": "node",
"request": "launch",
"name": "salamand",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"summarize all markdow files as json",
"--logLevel=1",
"--var-test=foo",
"--dry",
"--mode=completion",
"--filters=code",
"--logs=./tests/each",
"--preferences=none",
"--each=gpt-3.5-turbo,gpt-4o",
"--router=openai",
"--include=C:\\Users\\zx\\Desktop\\osr\\osr-mono\\packages\\osr-code-bot\\tests\\salamand\\g1.md C:\\Users\\zx\\Desktop\\osr\\osr-mono\\packages\\osr-code-bot\\tests\\salamand\\g2.md",
"--include=C:\\Users\\zx\\Desktop\\osr\\osr-mono\\packages\\osr-code-bot\\tests\\salamand\\g1.md",
"--dst=./tests/each/${ITEM}.json",
"--disable='npm,terminal,interact,git,search'",
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std",
"console": "integratedTerminal"
},
{
"type": "node",
"request": "launch",
"name": "tools:test",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"'dd a custom help function for yargs, write to src/help.ts, apply in src/main.ts'",
"--logLevel=2",
"--include2='tests/images-random/*.jpg'",
"--include2='src/main.ts'",
"--include2='src/zod_schema.ts'",
"--include='tests/*.mp4'",
"--include='tests/images/*.jpg'",
"--include='C:\\Users\\zx\\Desktop\\osr\\tools-output.json'",
"--include='C:\\Users\\zx\\Desktop\\osr\\osr-code-bot\\docs_\\docker.md'",
"--include='D:\\Users\\mc007\\Desktop\\osr\\osr-search\\types2.js'",
"--disable='npm,terminal,search,interact,git'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**"
],
"outputCapture": "std",
"console": "integratedTerminal", // <= the relevant part
},
{
"type": "node",
"request": "launch",
"name": "images:jpg-svg",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"identify all components (control panel), as markdown table : id, name, description, specs, count, ... - its a plastic extruder using PIDs",
"--logLevel=2",
"--include=./tests/images/lydia-4.5-ex-ballons.jpg",
"--dst=./tests/images/lydia-4.5-ex-cp.md",
"--mode=completion",
"--preferences=none",
"--router2=openai",
"--model2=openai/gpt-4o",
"--disable='npm,terminal,search,interact,git'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "images:jpg-svg-overlay",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"create a svg, with ballons (motor, extrusion screw, control panel elements), to be used as overlay, referring to an external legend",
"--logLevel=2",
"--include=./tests/images/lydia-4.5-ex.jpg",
"--dst=./tests/images/lydia-4.5-ex-ballons.svg",
"--mode=completion",
"--preferences=none",
"--router2=openai",
"--filters=code",
"--model=openai/gpt-4o",
"--disable='npm,terminal,search,interact,git'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "hono",
"skipFiles": [],
"program": "${workspaceFolder}\\main.js",
"outFiles": [
"${workspaceFolder}/**/*.js"
],
"cwd": "${workspaceFolder}",
"args": [
"modify",
"--path='../sw-test'",
"--prompt='take a screenshot (tool capture_screen), store the content as markdown in latest.md'"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/**",
"!**/node_modules/**"
],
"outputCapture": "std"
},
{
"type": "node",
"request": "launch",
"name": "Debug Current Test File",
"autoAttachChildProcesses": true,
"skipFiles": [
"<node_internals>/**",
"**/node_modules/**"
],
"program": "${workspaceRoot}/node_modules/vitest/vitest.mjs",
"args": [
"run",
"${relativeFile}"
],
"smartStep": true,
"console": "integratedTerminal"
}
]
}

209
packages/kbot/README.md Normal file
View File

@ -0,0 +1,209 @@
# @plastichub/kbot
AI-powered command-line tool for code modifications and project management that supports multiple AI models and routers.
## Overview
Code-bot is a powerful CLI tool that helps developers automate code modifications, handle project management tasks, and integrate with various AI models for intelligent code and content assistance.
## Quick Start
### Installation Steps
KBot requires Node.js to run. It's recommended to use Node.js version 18 or higher.
1. Visit the official [Node.js website](https://nodejs.org/)
2. Download the LTS (Long Term Support) version for your operating system
3. Follow the installation wizard
4. Verify installation by opening a terminal and running:
```bash
node --version
npm --version
```
### API Keys
KBot supports both OpenRouter and OpenAI APIs. You'll need at least one of these set up.
#### OpenRouter API (Recommended)
1. Visit [OpenRouter](https://openrouter.ai/)
2. Sign up for an account
3. Navigate to the API Keys section
4. Create a new API key
#### OpenAI API (Optional)
1. Go to [OpenAI's platform](https://platform.openai.com/)
2. Create an account or sign in
3. Navigate to API keys section
4. Create a new secret key
### Installation using Node NPM package manager
```bash
npm install -g @plastichub/kbot
```
## Configuration
### API Keys Setup
Create configuration at `$HOME/.osr/.config.json` (or export OSR_CONFIG with path to config.json):
```json
{
"openrouter": {
"key": "your-openrouter-key"
},
"openai": {
"key": "your-openai-key"
},
"email": {
"newsletter": {
"host": "host.org",
"port": 465,
"debug": true,
"transactionLog": true,
"auth": {
"user": "foo@bar.com",
"pass": "pass"
}
}
},
"google": {
"cse": "custom search engine id",
"api_key": "google custom search api key"
},
"serpapi": {
"key": "your SerpAPI key (optional, used for web searches(places, google maps))"
},
"deepseek": {
"key": "your SerpAPI key (optional, used for web searches(places, google maps))"
},
}
```
### Preferences Setup
Optionally, create `.kbot/preferences.md` in your project directory to customize AI interactions:
```markdown
## My Preferences
Gender : male
Location : New York, USA (eg: `send me all saunas next to me`)
Language : English
Occupation : software developer, Typescript
Age : 30+
## Contacts
My email address : example@email.com (eg: `send me latest hacker news`)
My wife's email address ("Anne") : example@email.com (eg: `send email to my wife, with latest local news')
## Content
When creating content
- always Markdown
- always add links
- when sending emails, always add 'Best regards, [Your Name]'
```
## Commands
### Prompt
```kbot "create Astro minimal boilerplate, use starlight theme. Install dependencies via NPM tool"```
### Fetch latest models
```kbot fetch```
### Print examples
```kbot examples```
### Print extended help
```kbot help-md```
### Initialize folder
```kbot init```
# Command Line Parameters
This document describes all available command line parameters.
## Core Parameters
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `path` | Target directory | `.` | No |
| `prompt` | The prompt. Supports file paths and environment variables | `./prompt.md` | No |
| `output` | Optional output path for modified files (Tool mode only) | - | No |
| `dst` | Optional destination path for the result, will substitute ${MODEL} and ${ROUTER} in the path. | - | No |
| `model` | AI model to use for processing | `anthropic/claude-3.5-sonnet` | No |
| `router` | Router to use: openai or openrouter | `openrouter` | No |
| `mode` | Chat completion mode: "completion" (without tools) or "tools" | `tools` | No |
## Advanced Parameters
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `each` | Target directory | `.` | No |
| `dry` | Dry run - only write out parameters without making API calls | `false` | No |
## File Selection & Tools
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `include` | Glob patterns to match files for processing. Supports multiple patterns, e.g. `--include=src/*.tsx,src/*.ts --include=package.json` | - | No |
| `disable` | Disable tools categories | `[]` | No |
| `disableTools` | List of specific tools to disable | `[]` | No |
## Configuration & Profiles
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `profile` | Path to profile for variables. Supports environment variables | `${POLYMECH-ROOT}/profile.json` | No |
| `env` | Environment (in profile) | `default` | No |
| `config` | Path to JSON configuration file (API keys). Supports environment variables | - | No |
| `preferences` | Path to preferences file (location, email, gender, etc). Supports environment variables | `./.kbot/preferences.md` | No |
## Debugging & Logging
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `logLevel` | Logging level for the application (0-4) | `2` | No |
| `logs` | Logging directory | `./.kbot` | No |
| `dump` | Create a script | - | No |
# Working on Larger Directories
Since LLMs (Large Language Models) and providers are limited to very small 'context windows', it's necessary to feed them with smaller chunks instead. This document explains how to process larger directories efficiently.
## Directory Processing Example
Here's an example of how to walk through files and process them:
```bash
osr-cli each --main='kbot \"read ${KEY} and translate to german, save in docs/language code/filename.md\" --include=\"${REL}\" --include=\".kbot/preferences.md\"' --list="./docs/*.md" --cwd=.
```
### Parameter Explanation
- `each`: Command to process multiple files iteratively
- `--main`: The main command (`kbot`) to execute for each file
- `--include=\"${REL}\"` instructs kbot to include the current selected path
- `--include=\".kbot/preferences.md\"` instructs kbot to include additional preferences about the task (eg: translation specifics)
- `--list`: Specifies the file pattern to match
- Supports include patterns (e.g., `"./docs/*.md"`)
- `--cwd`: Sets the current working directory for the command execution. Default is the current directory (`.`)
**Note** requires `@plastichub/osr-cli-commons` to be installed globally:
```bash
npm i -g @plastichub/osr-cli-commons
```

16
packages/kbot/dist/.npmignore vendored Normal file
View File

@ -0,0 +1,16 @@
# Ignore node_modules directory
node_modules/
# Ignore log files
*.log
# Ignore temporary files
*.tmp
# Ignore coverage reports
coverage/
.kbot
*.exe
package-lock.json
tests

209
packages/kbot/dist/README.md vendored Normal file
View File

@ -0,0 +1,209 @@
# @plastichub/kbot
AI-powered command-line tool for code modifications and project management that supports multiple AI models and routers.
## Overview
Code-bot is a powerful CLI tool that helps developers automate code modifications, handle project management tasks, and integrate with various AI models for intelligent code and content assistance.
## Quick Start
### Installation Steps
KBot requires Node.js to run. It's recommended to use Node.js version 18 or higher.
1. Visit the official [Node.js website](https://nodejs.org/)
2. Download the LTS (Long Term Support) version for your operating system
3. Follow the installation wizard
4. Verify installation by opening a terminal and running:
```bash
node --version
npm --version
```
### API Keys
KBot supports both OpenRouter and OpenAI APIs. You'll need at least one of these set up.
#### OpenRouter API (Recommended)
1. Visit [OpenRouter](https://openrouter.ai/)
2. Sign up for an account
3. Navigate to the API Keys section
4. Create a new API key
#### OpenAI API (Optional)
1. Go to [OpenAI's platform](https://platform.openai.com/)
2. Create an account or sign in
3. Navigate to API keys section
4. Create a new secret key
### Installation using Node NPM package manager
```bash
npm install -g @plastichub/kbot
```
## Configuration
### API Keys Setup
Create configuration at `$HOME/.osr/.config.json` (or export OSR_CONFIG with path to config.json):
```json
{
"openrouter": {
"key": "your-openrouter-key"
},
"openai": {
"key": "your-openai-key"
},
"email": {
"newsletter": {
"host": "host.org",
"port": 465,
"debug": true,
"transactionLog": true,
"auth": {
"user": "foo@bar.com",
"pass": "pass"
}
}
},
"google": {
"cse": "custom search engine id",
"api_key": "google custom search api key"
},
"serpapi": {
"key": "your SerpAPI key (optional, used for web searches(places, google maps))"
},
"deepseek": {
"key": "your SerpAPI key (optional, used for web searches(places, google maps))"
},
}
```
### Preferences Setup
Optionally, create `.kbot/preferences.md` in your project directory to customize AI interactions:
```markdown
## My Preferences
Gender : male
Location : New York, USA (eg: `send me all saunas next to me`)
Language : English
Occupation : software developer, Typescript
Age : 30+
## Contacts
My email address : example@email.com (eg: `send me latest hacker news`)
My wife's email address ("Anne") : example@email.com (eg: `send email to my wife, with latest local news')
## Content
When creating content
- always Markdown
- always add links
- when sending emails, always add 'Best regards, [Your Name]'
```
## Commands
### Prompt
```kbot "create Astro minimal boilerplate, use starlight theme. Install dependencies via NPM tool"```
### Fetch latest models
```kbot fetch```
### Print examples
```kbot examples```
### Print extended help
```kbot help-md```
### Initialize folder
```kbot init```
# Command Line Parameters
This document describes all available command line parameters.
## Core Parameters
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `path` | Target directory | `.` | No |
| `prompt` | The prompt. Supports file paths and environment variables | `./prompt.md` | No |
| `output` | Optional output path for modified files (Tool mode only) | - | No |
| `dst` | Optional destination path for the result, will substitute ${MODEL} and ${ROUTER} in the path. | - | No |
| `model` | AI model to use for processing | `anthropic/claude-3.5-sonnet` | No |
| `router` | Router to use: openai or openrouter | `openrouter` | No |
| `mode` | Chat completion mode: "completion" (without tools) or "tools" | `tools` | No |
## Advanced Parameters
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `each` | Target directory | `.` | No |
| `dry` | Dry run - only write out parameters without making API calls | `false` | No |
## File Selection & Tools
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `include` | Glob patterns to match files for processing. Supports multiple patterns, e.g. `--include=src/*.tsx,src/*.ts --include=package.json` | - | No |
| `disable` | Disable tools categories | `[]` | No |
| `disableTools` | List of specific tools to disable | `[]` | No |
## Configuration & Profiles
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `profile` | Path to profile for variables. Supports environment variables | `${POLYMECH-ROOT}/profile.json` | No |
| `env` | Environment (in profile) | `default` | No |
| `config` | Path to JSON configuration file (API keys). Supports environment variables | - | No |
| `preferences` | Path to preferences file (location, email, gender, etc). Supports environment variables | `./.kbot/preferences.md` | No |
## Debugging & Logging
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `logLevel` | Logging level for the application (0-4) | `2` | No |
| `logs` | Logging directory | `./.kbot` | No |
| `dump` | Create a script | - | No |
# Working on Larger Directories
Since LLMs (Large Language Models) and providers are limited to very small 'context windows', it's necessary to feed them with smaller chunks instead. This document explains how to process larger directories efficiently.
## Directory Processing Example
Here's an example of how to walk through files and process them:
```bash
osr-cli each --main='kbot \"read ${KEY} and translate to german, save in docs/language code/filename.md\" --include=\"${REL}\" --include=\".kbot/preferences.md\"' --list="./docs/*.md" --cwd=.
```
### Parameter Explanation
- `each`: Command to process multiple files iteratively
- `--main`: The main command (`kbot`) to execute for each file
- `--include=\"${REL}\"` instructs kbot to include the current selected path
- `--include=\".kbot/preferences.md\"` instructs kbot to include additional preferences about the task (eg: translation specifics)
- `--list`: Specifies the file pattern to match
- Supports include patterns (e.g., `"./docs/*.md"`)
- `--cwd`: Sets the current working directory for the command execution. Default is the current directory (`.`)
**Note** requires `@plastichub/osr-cli-commons` to be installed globally:
```bash
npm i -g @plastichub/osr-cli-commons
```

View File

@ -0,0 +1,293 @@
{
"timestamp": 1738015382028,
"models": [
{
"id": "gpt-4o-audio-preview-2024-10-01",
"object": "model",
"created": 1727389042,
"owned_by": "system"
},
{
"id": "gpt-4o-mini-audio-preview",
"object": "model",
"created": 1734387424,
"owned_by": "system"
},
{
"id": "gpt-4o-realtime-preview",
"object": "model",
"created": 1727659998,
"owned_by": "system"
},
{
"id": "gpt-4o",
"object": "model",
"created": 1715367049,
"owned_by": "system"
},
{
"id": "gpt-4o-mini-audio-preview-2024-12-17",
"object": "model",
"created": 1734115920,
"owned_by": "system"
},
{
"id": "gpt-4o-mini-realtime-preview",
"object": "model",
"created": 1734387380,
"owned_by": "system"
},
{
"id": "dall-e-2",
"object": "model",
"created": 1698798177,
"owned_by": "system"
},
{
"id": "gpt-3.5-turbo",
"object": "model",
"created": 1677610602,
"owned_by": "openai"
},
{
"id": "o1-preview-2024-09-12",
"object": "model",
"created": 1725648865,
"owned_by": "system"
},
{
"id": "gpt-3.5-turbo-0125",
"object": "model",
"created": 1706048358,
"owned_by": "system"
},
{
"id": "o1-preview",
"object": "model",
"created": 1725648897,
"owned_by": "system"
},
{
"id": "gpt-3.5-turbo-instruct",
"object": "model",
"created": 1692901427,
"owned_by": "system"
},
{
"id": "babbage-002",
"object": "model",
"created": 1692634615,
"owned_by": "system"
},
{
"id": "o1-mini",
"object": "model",
"created": 1725649008,
"owned_by": "system"
},
{
"id": "o1-mini-2024-09-12",
"object": "model",
"created": 1725648979,
"owned_by": "system"
},
{
"id": "whisper-1",
"object": "model",
"created": 1677532384,
"owned_by": "openai-internal"
},
{
"id": "dall-e-3",
"object": "model",
"created": 1698785189,
"owned_by": "system"
},
{
"id": "gpt-4o-realtime-preview-2024-10-01",
"object": "model",
"created": 1727131766,
"owned_by": "system"
},
{
"id": "gpt-4-1106-preview",
"object": "model",
"created": 1698957206,
"owned_by": "system"
},
{
"id": "omni-moderation-latest",
"object": "model",
"created": 1731689265,
"owned_by": "system"
},
{
"id": "omni-moderation-2024-09-26",
"object": "model",
"created": 1732734466,
"owned_by": "system"
},
{
"id": "tts-1-hd-1106",
"object": "model",
"created": 1699053533,
"owned_by": "system"
},
{
"id": "gpt-4o-mini-2024-07-18",
"object": "model",
"created": 1721172717,
"owned_by": "system"
},
{
"id": "gpt-4",
"object": "model",
"created": 1687882411,
"owned_by": "openai"
},
{
"id": "tts-1-hd",
"object": "model",
"created": 1699046015,
"owned_by": "system"
},
{
"id": "davinci-002",
"object": "model",
"created": 1692634301,
"owned_by": "system"
},
{
"id": "text-embedding-ada-002",
"object": "model",
"created": 1671217299,
"owned_by": "openai-internal"
},
{
"id": "gpt-4-turbo",
"object": "model",
"created": 1712361441,
"owned_by": "system"
},
{
"id": "tts-1",
"object": "model",
"created": 1681940951,
"owned_by": "openai-internal"
},
{
"id": "tts-1-1106",
"object": "model",
"created": 1699053241,
"owned_by": "system"
},
{
"id": "gpt-3.5-turbo-instruct-0914",
"object": "model",
"created": 1694122472,
"owned_by": "system"
},
{
"id": "gpt-4-0125-preview",
"object": "model",
"created": 1706037612,
"owned_by": "system"
},
{
"id": "gpt-4-turbo-preview",
"object": "model",
"created": 1706037777,
"owned_by": "system"
},
{
"id": "gpt-4o-mini-realtime-preview-2024-12-17",
"object": "model",
"created": 1734112601,
"owned_by": "system"
},
{
"id": "gpt-4o-audio-preview",
"object": "model",
"created": 1727460443,
"owned_by": "system"
},
{
"id": "gpt-4-0613",
"object": "model",
"created": 1686588896,
"owned_by": "openai"
},
{
"id": "gpt-4o-2024-05-13",
"object": "model",
"created": 1715368132,
"owned_by": "system"
},
{
"id": "text-embedding-3-small",
"object": "model",
"created": 1705948997,
"owned_by": "system"
},
{
"id": "gpt-4-turbo-2024-04-09",
"object": "model",
"created": 1712601677,
"owned_by": "system"
},
{
"id": "gpt-3.5-turbo-1106",
"object": "model",
"created": 1698959748,
"owned_by": "system"
},
{
"id": "gpt-3.5-turbo-16k",
"object": "model",
"created": 1683758102,
"owned_by": "openai-internal"
},
{
"id": "gpt-4o-audio-preview-2024-12-17",
"object": "model",
"created": 1734034239,
"owned_by": "system"
},
{
"id": "gpt-4o-realtime-preview-2024-12-17",
"object": "model",
"created": 1733945430,
"owned_by": "system"
},
{
"id": "gpt-4o-mini",
"object": "model",
"created": 1721172741,
"owned_by": "system"
},
{
"id": "text-embedding-3-large",
"object": "model",
"created": 1705953180,
"owned_by": "system"
},
{
"id": "gpt-4o-2024-08-06",
"object": "model",
"created": 1722814719,
"owned_by": "system"
},
{
"id": "gpt-4o-2024-11-20",
"object": "model",
"created": 1731975040,
"owned_by": "system"
},
{
"id": "chatgpt-4o-latest",
"object": "model",
"created": 1723515131,
"owned_by": "system"
}
]
}

File diff suppressed because it is too large Load Diff

3
packages/kbot/dist/main_node.js vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,173 @@
/*!
* prr
* (c) 2013 Rod Vagg <rod@vagg.org>
* https://github.com/rvagg/prr
* License: MIT
*/
/*!
*
* Copyright 2009-2017 Kris Kowal under the terms of the MIT
* license found at https://github.com/kriskowal/q/blob/v1/LICENSE
*
* With parts by Tyler Close
* Copyright 2007-2009 Tyler Close under the terms of the MIT X license found
* at http://www.opensource.org/licenses/mit-license.html
* Forked at ref_send.js version: 2009-05-11
*
* With parts by Mark Miller
* Copyright (C) 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/*!
* Tmp
*
* Copyright (c) 2011-2017 KARASZI Istvan <github@spam.raszi.hu>
*
* MIT Licensed
*/
/*!
* fill-range <https://github.com/jonschlinkert/fill-range>
*
* Copyright (c) 2014-present, Jon Schlinkert.
* Licensed under the MIT License.
*/
/*!
* glob-base <https://github.com/jonschlinkert/glob-base>
*
* Copyright (c) 2015, Jon Schlinkert.
* Licensed under the MIT License.
*/
/*!
* humanize-ms - index.js
* Copyright(c) 2014 dead_horse <dead_horse@qq.com>
* MIT Licensed
*/
/*!
* is-dotfile <https://github.com/jonschlinkert/is-dotfile>
*
* Copyright (c) 2015-2017, Jon Schlinkert.
* Released under the MIT License.
*/
/*!
* is-extglob <https://github.com/jonschlinkert/is-extglob>
*
* Copyright (c) 2014-2015, Jon Schlinkert.
* Licensed under the MIT License.
*/
/*!
* is-extglob <https://github.com/jonschlinkert/is-extglob>
*
* Copyright (c) 2014-2016, Jon Schlinkert.
* Licensed under the MIT License.
*/
/*!
* is-glob <https://github.com/jonschlinkert/is-glob>
*
* Copyright (c) 2014-2015, Jon Schlinkert.
* Licensed under the MIT License.
*/
/*!
* is-glob <https://github.com/jonschlinkert/is-glob>
*
* Copyright (c) 2014-2017, Jon Schlinkert.
* Released under the MIT License.
*/
/*!
* is-number <https://github.com/jonschlinkert/is-number>
*
* Copyright (c) 2014-present, Jon Schlinkert.
* Released under the MIT License.
*/
/*!
* mime-db
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2015-2022 Douglas Christopher Wilson
* MIT Licensed
*/
/*!
* mime-types
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2015 Douglas Christopher Wilson
* MIT Licensed
*/
/*!
* parse-glob <https://github.com/jonschlinkert/parse-glob>
*
* Copyright (c) 2015, Jon Schlinkert.
* Licensed under the MIT License.
*/
/*!
* to-regex-range <https://github.com/micromatch/to-regex-range>
*
* Copyright (c) 2015-present, Jon Schlinkert.
* Released under the MIT License.
*/
/*! Based on fetch-blob. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> & David Frank */
/*! node-domexception. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
/*! queue-microtask. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */
/*! run-parallel. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */
/*! safe-buffer. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */
/*! showdown v 2.1.0 - 21-04-2022 */
/*!*/
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @license
* web-streams-polyfill v4.0.0-beta.3
* Copyright 2021 Mattias Buelens, Diwank Singh Tomer and other contributors.
* This code is released under the MIT license.
* SPDX-License-Identifier: MIT
*/
/**
* @preserve
* JS Implementation of incremental MurmurHash3 (r150) (as of May 10, 2013)
*
* @author <a href="mailto:jensyt@gmail.com">Jens Taylor</a>
* @see http://github.com/homebrewing/brauhaus-diff
* @author <a href="mailto:gary.court@gmail.com">Gary Court</a>
* @see http://github.com/garycourt/murmurhash-js
* @author <a href="mailto:aappleby@gmail.com">Austin Appleby</a>
* @see http://sites.google.com/site/murmurhash/
*/

1345
packages/kbot/dist/package-lock.json generated vendored Normal file

File diff suppressed because it is too large Load Diff

20
packages/kbot/dist/package.json vendored Normal file
View File

@ -0,0 +1,20 @@
{
"name": "@plastichub/kbot",
"version": "1.1.15",
"main": "main_node.js",
"author": "",
"license": "ISC",
"description": "",
"bin": {
"kbot": "./main_node.js"
},
"dependencies": {
"node-emoji": "^2.2.0"
},
"publishConfig": {
"access": "public"
},
"optionalDependencies": {
"puppeteer": "^23.11.1"
}
}

196
packages/kbot/dist/stats/statistics.html vendored Normal file

File diff suppressed because one or more lines are too long

21
packages/kbot/docs/.gitignore vendored Normal file
View File

@ -0,0 +1,21 @@
# build output
dist/
# generated types
.astro/
# dependencies
node_modules/
# logs
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
# environment variables
.env
.env.production
# macOS-specific files
.DS_Store

View File

@ -0,0 +1,4 @@
{
"recommendations": ["astro-build.astro-vscode"],
"unwantedRecommendations": []
}

11
packages/kbot/docs/.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,11 @@
{
"version": "0.2.0",
"configurations": [
{
"command": "./node_modules/.bin/astro dev",
"name": "Development server",
"request": "launch",
"type": "node-terminal"
}
]
}

View File

@ -0,0 +1,54 @@
# Starlight Starter Kit: Basics
[![Built with Starlight](https://astro.badg.es/v2/built-with-starlight/tiny.svg)](https://starlight.astro.build)
```
npm create astro@latest -- --template starlight
```
[![Open in StackBlitz](https://developer.stackblitz.com/img/open_in_stackblitz.svg)](https://stackblitz.com/github/withastro/starlight/tree/main/examples/basics)
[![Open with CodeSandbox](https://assets.codesandbox.io/github/button-edit-lime.svg)](https://codesandbox.io/p/sandbox/github/withastro/starlight/tree/main/examples/basics)
[![Deploy to Netlify](https://www.netlify.com/img/deploy/button.svg)](https://app.netlify.com/start/deploy?repository=https://github.com/withastro/starlight&create_from_path=examples/basics)
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fwithastro%2Fstarlight%2Ftree%2Fmain%2Fexamples%2Fbasics&project-name=my-starlight-docs&repository-name=my-starlight-docs)
> 🧑‍🚀 **Seasoned astronaut?** Delete this file. Have fun!
## 🚀 Project Structure
Inside of your Astro + Starlight project, you'll see the following folders and files:
```
.
├── public/
├── src/
│ ├── assets/
│ ├── content/
│ │ ├── docs/
│ └── content.config.ts
├── astro.config.mjs
├── package.json
└── tsconfig.json
```
Starlight looks for `.md` or `.mdx` files in the `src/content/docs/` directory. Each file is exposed as a route based on its file name.
Images can be added to `src/assets/` and embedded in Markdown with a relative link.
Static assets, like favicons, can be placed in the `public/` directory.
## 🧞 Commands
All commands are run from the root of the project, from a terminal:
| Command | Action |
| :------------------------ | :----------------------------------------------- |
| `npm install` | Installs dependencies |
| `npm run dev` | Starts local dev server at `localhost:4321` |
| `npm run build` | Build your production site to `./dist/` |
| `npm run preview` | Preview your build locally, before deploying |
| `npm run astro ...` | Run CLI commands like `astro add`, `astro check` |
| `npm run astro -- --help` | Get help using the Astro CLI |
## 👀 Want to learn more?
Check out [Starlights docs](https://starlight.astro.build/), read [the Astro documentation](https://docs.astro.build), or jump into the [Astro Discord server](https://astro.build/chat).

View File

@ -0,0 +1,30 @@
// @ts-check
import { defineConfig } from 'astro/config';
import starlight from '@astrojs/starlight';
export default defineConfig({
integrations: [
starlight({
title: 'My Docs',
social: {
github: 'https://github.com/withastro/starlight',
},
sidebar: [
{
label: 'Guides',
items: [
// Each item here is one entry in the navigation menu.
{ label: 'Example Guide', slug: 'guides/example' },
],
},
{
label: 'Reference',
autogenerate: { directory: 'reference' },
},
{
label: 'Meta',
autogenerate: { directory: 'meta' },
},
],
}),
],
})

6964
packages/kbot/docs/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,17 @@
{
"name": "docs2",
"type": "module",
"version": "0.0.1",
"scripts": {
"dev": "astro dev",
"start": "astro dev",
"build": "astro build",
"preview": "astro preview",
"astro": "astro"
},
"dependencies": {
"@astrojs/starlight": "^0.31.1",
"astro": "^5.1.5",
"sharp": "^0.32.5"
}
}

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><path fill-rule="evenodd" d="M81 36 64 0 47 36l-1 2-9-10a6 6 0 0 0-9 9l10 10h-2L0 64l36 17h2L28 91a6 6 0 1 0 9 9l9-10 1 2 17 36 17-36v-2l9 10a6 6 0 1 0 9-9l-9-9 2-1 36-17-36-17-2-1 9-9a6 6 0 1 0-9-9l-9 10v-2Zm-17 2-2 5c-4 8-11 15-19 19l-5 2 5 2c8 4 15 11 19 19l2 5 2-5c4-8 11-15 19-19l5-2-5-2c-8-4-15-11-19-19l-2-5Z" clip-rule="evenodd"/><path d="M118 19a6 6 0 0 0-9-9l-3 3a6 6 0 1 0 9 9l3-3Zm-96 4c-2 2-6 2-9 0l-3-3a6 6 0 1 1 9-9l3 3c3 2 3 6 0 9Zm0 82c-2-2-6-2-9 0l-3 3a6 6 0 1 0 9 9l3-3c3-2 3-6 0-9Zm96 4a6 6 0 0 1-9 9l-3-3a6 6 0 1 1 9-9l3 3Z"/><style>path{fill:#000}@media (prefers-color-scheme:dark){path{fill:#fff}}</style></svg>

After

Width:  |  Height:  |  Size: 696 B

View File

@ -0,0 +1,3 @@
kbotd --prompt="./.kbot/docs.md"

View File

@ -0,0 +1,10 @@
kbotd modify \
--path=. \
--prompt="./.kbot/todos.md" \
--mode=completion \
--router2=openai \
--model=openai/gpt-4-32k \
--include2="src/commands/run.ts" \
--include2="src/commands/run-tools.ts" \
--disable="npm,terminal,git,user,search,email" \
--dst="./.kbot/todos-log.md"

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

View File

@ -0,0 +1,7 @@
import { defineCollection } from 'astro:content';
import { docsLoader } from '@astrojs/starlight/loaders';
import { docsSchema } from '@astrojs/starlight/schema';
export const collections = {
docs: defineCollection({ loader: docsLoader(), schema: docsSchema() }),
};

View File

@ -0,0 +1,11 @@
---
title: Example Guide
description: A guide in my new Starlight docs site.
---
Guides lead a user through a specific task they want to accomplish, often with a sequence of steps.
Writing a good guide requires thinking about what your users are trying to do.
## Further reading
- Read [about how-to guides](https://diataxis.fr/how-to-guides/) in the Diátaxis framework

View File

@ -0,0 +1,36 @@
---
title: Welcome to Starlight
description: Get started building your docs site with Starlight.
template: splash
hero:
tagline: Congrats on setting up a new Starlight project!
image:
file: ../../assets/houston.webp
actions:
- text: Example Guide
link: /guides/example/
icon: right-arrow
- text: Read the Starlight docs
link: https://starlight.astro.build
icon: external
variant: minimal
---
import { Card, CardGrid } from '@astrojs/starlight/components';
## Next steps
<CardGrid stagger>
<Card title="Update content" icon="pencil">
Edit `src/content/docs/index.mdx` to see this page change.
</Card>
<Card title="Add new content" icon="add-document">
Add Markdown or MDX files to `src/content/docs` to create new pages.
</Card>
<Card title="Configure your site" icon="setting">
Edit your `sidebar` and other config in `astro.config.mjs`.
</Card>
<Card title="Read the docs" icon="open-book">
Learn more in [the Starlight Docs](https://starlight.astro.build/).
</Card>
</CardGrid>

View File

@ -0,0 +1,39 @@
---
title: "The Future of Collaboration A 10-Year Outlook"
date: 2024-01-29
draft: false
tags: ["future", "content", "files", "annotations"]
---
## The Future of Collaboration
The evolution of content creation and consumption is set to become increasingly collaborative, moving beyond solitary endeavors to foster community-driven innovation and productivity. This transformation is supported by a range of tools and technologies designed to enhance collaborative efforts across various platforms.
### Real-Time Co-Editing
One of the key advancements in collaboration will be the ability for multiple users to seamlessly edit documents in real time. This feature, already being refined by platforms like [Google Docs](https://www.google.com/docs/about/) and [Microsoft Office 365](https://www.office.com/), minimizes barriers to teamwork and boosts efficiency by enabling contributors to see and respond to each other's changes instantly. The co-editing capability is augmented by features like version history and revision tracking, which provide transparency and accountability.
### Contextual Awareness
As collaborators engage with shared content, systems will provide them with insights into others' modifications without overwhelming them with information. Applications such as [Slack](https://slack.com/) and [Microsoft Teams](https://www.microsoft.com/en-us/microsoft-teams/group-chat-software) are developing features that highlight relevant changes and comments within the context of ongoing projects. This capability ensures a synchronized understanding across teams and reduces the potential for conflicts arising from miscommunication.
### Automated Synchronization
Future workflows will increasingly depend on automated synchronization across platforms and devices. Services like [Dropbox](https://www.dropbox.com/) and [OneDrive](https://onedrive.live.com/) are already facilitating this by ensuring that the latest versions of content are accessible from any location or device. As this synchronization becomes more seamless, users will benefit from uninterrupted access to updated information, regardless of their active device.
### Intelligent Conflict Resolution
Artificial Intelligence will play a crucial role in managing collaborative spaces by offering solutions for resolving conflicts that arise from simultaneous content modifications. Tools such as [Atlassian Confluence](https://www.atlassian.com/software/confluence) are beginning to integrate AI-driven suggestions for managing these conflicts, providing users with merge suggestions or automated conflict resolution options. This eases user interaction and helps maintain content integrity while supporting fluid collaboration.
### Project Management Integration
Collaboration in content creation is further enhanced by integration with project management tools that align with team workflows. Platforms like [Asana](https://asana.com/) and [Trello](https://trello.com/) offer functionalities where content collaboration can be managed alongside task assignments, deadlines, and progress tracking. These integrations help teams stay organized, ensure accountability, and streamline project delivery by tying collaborative content efforts directly to broader project goals.
### Open Collaboration and Contribution Models
The future of collaboration is also leaning towards openness, where content creation taps into wider community inputs. Platforms such as [GitHub](https://github.com/) exemplify this trend by allowing open contributions to have structured peer reviews and collaborative improvements. This model not only enhances the quality of output through diverse insights but also accelerates innovation by pooling a wider range of expertise and creativity.
### Collaborative Learning and Knowledge Sharing
As more integrated collaboration tools emerge, they will promote knowledge sharing and continuous learning within and across organizations. Platforms like [Notion](https://www.notion.so/) and [Confluence](https://www.atlassian.com/software/confluence) are creating collaborative spaces where users can share knowledge, create wikis, and build living documents that evolve with team input. These tools facilitate a culture of learning and adaptation, ensuring that information sharing becomes an integral part of the collaborative process.
By leveraging these collaborative advancements, organizations can break down silos, encourage innovation, and build dynamic content ecosystems that are adaptable, intuitive, and reflective of collective intelligence. This shift will be crucial to meet the demands of an increasingly interconnected and collaborative digital world.

View File

@ -0,0 +1,95 @@
---
title: "The Future of Files and Content: A 10-Year Outlook"
date: 2024-01-29
draft: false
tags: ["future", "content", "files", "annotations"]
---
## The Evolving Nature of Files and Content
As we look toward the next decade, the concept of "files" as we know them is poised for a dramatic transformation. The traditional notion of discrete, self-contained units of data is evolving into something far more fluid, contextual, and interconnected. This evolution is driven by advancements in technology, changing user expectations, and the increasing complexity of information ecosystems.
Files have historically been defined by their boundaries—specific containers of data isolated by format, location, and context. However, as technology progresses, this rigid structure is being dismantled in favor of more dynamic and flexible data models. Future files will encapsulate content that seamlessly integrates across applications, platforms, and devices, allowing for a more cohesive digital experience.
The shift is not just technical but conceptual, as it reflects a broader understanding of information management. In practice, this means transcending the limitations of traditional file systems to embrace structures that prioritize user context, behavioral insights, and multidimensional data relationships.
## The Decline of Traditional File Systems
In the coming years, we'll likely see a gradual shift away from traditional hierarchical file systems. The rigid tree-like structures of directories and folders will give way to more advanced systems optimized for accessibility and adaptability, emphasizing content's intrinsic value over its mere location. Here are key elements of this transformation:
- **Content-Centric Storage**: Future storage architectures will prioritize the meaning and context of information. By classifying data based on its inherent properties and usage patterns rather than its physical location, users can retrieve and interact with content based on relevance. This approach leverages metadata, semantic analysis, and user habits to create intuitive and personalized storage environments.
- **Fluid Documents**: The concept of documents is expanding to encompass living, evolving entities that can exist in multiple states and versions simultaneously. These documents will not be tied to a single format or static representation but will adapt fluidly to the context in which they are accessed, offering users the most pertinent and updated view at any moment.
- **Dynamic Composition**: With dynamic composition, content can assemble itself from various sources in real-time, tailored to specific user needs or contextual triggers. This capability transforms the static consumption of information into a continuously adaptable and interactive experience, ensuring that users receive the most relevant and complete narrative.
## The Rise of Intelligent Annotations
One of the most significant developments in the next decade will be the evolution of annotations. No longer confined to the margins or attached in static form, annotations will become integral to digital content, offering layers of intelligence, interactivity, and customization.
### 1. Contextually Aware
Annotations will transcend simple text notes, evolving into systems that understand and interact with their environment. They will:
- Analyze relationships not only with the underlying content but also with other annotations and external data sources. This interconnectedness will enable richer narratives and insights derived from a web of contextually relevant information.
- Integrate with user behavior and preferences to provide personalized experiences. By learning from user interactions and historical data, annotations will adapt their presentation and functionality to align with individual needs and expectations, enhancing user engagement.
### 2. Interactive and Dynamic
The transformation of annotations will see them evolve from static marks to complex, interactive ecosystems. Future annotations will:
- Act as interactive layers that provide deeper insights or auxiliary content upon engagement. They transform a document into an exploratory landscape, whereby users can uncover supplementary data or functionality as needed.
- Update dynamically to reflect new information, ensuring that annotations and the content they enhance remain current and accurate. AI-driven mechanisms can automatically incorporate updates or revisions pertinent to the annotation context.
- Spur collaboration by serving as arenas for discussion and idea exchange. Annotations will support real-time collaboration, allowing multiple users to contribute, comment, and modify information within a shared digital space.
### 3. Semantically Rich Metadata
Annotations, enriched with semantics, will become pivotal to understanding content in depth. They will:
- Encode structured data that artificial intelligence systems can process, enabling advanced analysis and inference. This will enhance machine understanding of content contexts and relationships, facilitating more effective automation and decision-making processes.
- Establish links to related concepts and resources, building rich networks of content that offer diverse perspectives and supplemental information.
- Include comprehensive version history and provenance details to ensure transparency and accountability. Users will be able to trace the evolution of annotations and their impacts on the primary content.
- Carry contextual metadata that describes usage patterns, relevancy, and interaction history, enabling future systems to fine-tune experiences based on aggregated insights.
## The Future of Collaboration
Content creation and consumption will become increasingly collaborative, moving beyond isolated experiences to foster community-driven innovation and productivity.
- **Real-Time Co-Editing**: Future collaborative processes will benefit from seamless and simultaneous multi-user editing capabilities. This real-time interaction will reduce barriers to teamwork and increase efficiency, allowing contributors to see and respond to changes instantly.
- **Contextual Awareness**: As collaborators work on shared content, systems will provide awareness of others' modifications without overwhelming users. This will create a synchronized understanding across teams and minimize conflicts by highlighting relevant changes and comments in context.
- **Automated Synchronization**: Professional and personal workflows will increasingly rely on automated, cross-platform synchronization. Data will migrate fluidly across devices—ensuring that users have access to the latest versions of content regardless of their active device or location.
- **Intelligent Conflict Resolution**: AI will mediate collaborative spaces, providing smart solutions to resolve conflicts that arise from simultaneous content modifications. These systems will offer conflict suggestions or merge decisions, simplifying user interaction and maintaining content integrity.
## The Role of AI in Content Management
Artificial Intelligence will be pivotal in revolutionizing content management systems, offering capabilities that enhance organizational efficiency, user experience, and adaptability.
1. **Content Organization**
- AI systems will autonomously categorize content by analyzing its semantic properties, usage patterns, and potential relationships, streamlining how information is stored and retrieved.
- Intelligent tagging will replace manual labelings, such that content is associated with context-aware tags automatically assigned by understanding content semantics and usage context.
- Contextual search mechanisms will leverage AI to anticipate user intentions and present the most relevant results quickly, charitably synthesizing user needs and search history.
2. **Content Generation**
- Automated summarization tools will enable users to distill vast amounts of information into concise, insightful overviews, facilitating faster understanding and decision-making.
- Systems will analyze content contexts to offer suggestions or enhancements tailored to user objectives and situational demands.
- Dynamic content adaptation will adjust narratives or presentations based on real-time factors such as audience, platform, and device preferences.
## Privacy and Security Considerations
As content becomes more interconnected, new challenges will emerge that necessitate innovative solutions to safeguard user privacy and content integrity.
- **Granular Access Control**: Future systems will need robust access management tools to define user permissions at more granular levels, ensuring that different content aspects are accessible according to precise security roles and protocols.
- **Encrypted Annotations**: Annotations will incorporate cryptographic measures to secure data while allowing authorized collaboration. This encryption ensures privacy while maintaining the flexibility of sharing and editing within trusted communities.
- **Blockchain-Based Verification**: Content authenticity and integrity will be enhanced through blockchain technology, offering decentralized and tamper-proof means to verify information provenance and historical modifications, increasing trust in digital content.
## Conclusion
The next decade will see a fundamental rethinking of how we create, store, and interact with content. The future of files lies not in their traditional, static form, but in a more dynamic, interconnected, and intelligent ecosystem of information. This vision is underpinned by the transformative role of intelligent annotations, AI-driven content management, and evolving paradigms that prioritize meaning, context, and collaboration. By embracing these changes, we can unlock deeper insights, nurture innovation, and foster richer digital experiences that keep pace with an ever-changing world.

View File

@ -0,0 +1,78 @@
---
title: "The Future of Collaboration A 10-Year Outlook"
date: 2024-01-29
draft: false
tags: ["future", "content", "files", "annotations"]
---
Predicting the future of humanity over the next 10 years involves considering current trends, technological advancements, geopolitical dynamics, and environmental challenges. Heres a forecast based on plausible trajectories:
### **1. Technological Advancements**
- **Artificial Intelligence (AI):** AI will become deeply integrated into daily life, transforming industries like healthcare, education, and transportation. Ethical concerns and regulations around AI will grow.
- **Quantum Computing:** Early-stage quantum computers may solve complex problems in fields like cryptography, materials science, and drug discovery.
- **Biotechnology:** Advances in gene editing (e.g., CRISPR) and personalized medicine will revolutionize healthcare, potentially curing genetic diseases and extending lifespans.
- **Space Exploration:** Private companies and governments will expand space exploration, with missions to the Moon, Mars, and beyond. Space tourism may become more accessible.
---
### **2. Climate Change and Sustainability**
- **Climate Crisis:** The effects of climate change will intensify, with more frequent extreme weather events, rising sea levels, and biodiversity loss. Global efforts to mitigate these impacts will accelerate.
- **Renewable Energy:** Solar, wind, and other renewable energy sources will dominate new energy investments, reducing reliance on fossil fuels.
- **Circular Economy:** Sustainable practices and circular economy models will gain traction, reducing waste and promoting resource efficiency.
---
### **3. Geopolitical Shifts**
- **Power Dynamics:** The U.S., China, and the EU will remain major global powers, but emerging economies like India and Brazil will play larger roles in international affairs.
- **Conflict and Cooperation:** Tensions over resources, technology, and territorial disputes may rise, but global cooperation on issues like climate change and pandemics will also increase.
- **Globalization vs. Localization:** The world may see a balance between globalization and localization, with countries focusing on self-sufficiency in critical areas like food and energy.
---
### **4. Social and Cultural Changes**
- **Demographics:** Aging populations in developed countries and youth bulges in developing nations will shape economic and social policies.
- **Work and Automation:** Automation will disrupt traditional jobs, leading to shifts in the workforce and the rise of new industries. Universal Basic Income (UBI) may be tested in more countries.
- **Inequality:** Economic inequality could widen, but social movements and policies may address disparities in wealth, education, and healthcare.
---
### **5. Health and Well-being**
- **Pandemic Preparedness:** Lessons from COVID-19 will lead to better global health infrastructure and faster responses to future pandemics.
- **Mental Health:** Awareness and treatment of mental health issues will improve, driven by technology and societal acceptance.
- **Longevity:** Advances in medicine and lifestyle changes will increase life expectancy, but aging populations will pose challenges for healthcare systems.
---
### **6. Environmental and Ethical Challenges**
- **Biodiversity Loss:** Efforts to protect endangered species and ecosystems will intensify, but habitat destruction and climate change will remain threats.
- **Ethical Dilemmas:** Debates over AI ethics, genetic engineering, and data privacy will shape policies and societal norms.
---
### **7. Global Connectivity**
- **Digital Divide:** Efforts to bridge the digital divide will expand internet access to underserved regions, empowering communities and driving economic growth.
- **Virtual Reality (VR) and Augmented Reality (AR):** These technologies will transform entertainment, education, and remote work, creating new opportunities and challenges.
---
### **8. Cultural Evolution**
- **Diversity and Inclusion:** Movements for racial, gender, and social equality will continue to shape societies, leading to more inclusive policies and practices.
- **Global Culture:** The blending of cultures through technology and migration will create a more interconnected and diverse global society.
---
### **9. Economic Trends**
- **Green Economy:** Investments in sustainable industries will drive economic growth, creating new jobs and opportunities.
- **Cryptocurrency and Blockchain:** Digital currencies and blockchain technology will gain wider acceptance, potentially transforming financial systems.
---
### **10. Existential Risks**
- **Nuclear Threats:** Geopolitical tensions could increase the risk of nuclear conflict, though disarmament efforts may mitigate this.
- **AI and Biosecurity:** The misuse of AI and biotechnology could pose significant risks, requiring robust governance and international cooperation.
---
### **Conclusion**
The next 10 years will likely be a period of rapid change, marked by both challenges and opportunities. Humanitys ability to address global issues like climate change, inequality, and technological disruption will determine the trajectory of our future. Collaboration, innovation, and ethical leadership will be critical in shaping a sustainable and equitable world.

View File

@ -0,0 +1,41 @@
---
title: "The Future of Collaboration A 10-Year Outlook"
date: 2024-01-29
draft: false
tags: ["future", "content", "files", "annotations"]
---
## Forecasting Humanity in 10 Years (2033): A Glimpse into the Future
Forecasting the future is a complex exercise, but by analyzing current trends and emerging technologies, we can create a plausible scenario for humanity in 10 years.
**1. Technological Advancements:**
* **Artificial Intelligence (AI) Pervasiveness:** AI will be deeply integrated into daily life, automating tasks, personalizing experiences, and driving innovation across industries like healthcare, finance, and transportation. Expect more sophisticated AI assistants, personalized medicine, and autonomous vehicles.
* **Ubiquitous Connectivity:** The Internet of Things (IoT) will expand, connecting billions of devices and creating smart homes, cities, and industries. This hyper-connectivity will improve efficiency, resource management, and convenience.
* **Biotechnology and Gene Editing:** Advancements in gene editing technologies like CRISPR will likely lead to breakthroughs in disease treatment and prevention, potentially revolutionizing healthcare. Ethical debates around genetic modification will continue.
* **Sustainable Technologies:** The urgency of climate change will accelerate the development and adoption of renewable energy sources, energy storage solutions, and sustainable agriculture practices.
* **Space Exploration:** Commercial space travel will become more common, with potential for space tourism and resource extraction. Space agencies will continue exploring Mars and other celestial bodies.
**2. Societal Shifts:**
* **Demographic Changes:** The global population will continue to age, leading to challenges and opportunities in healthcare, social security, and workforce dynamics.
* **Urbanization:** Cities will continue to grow, demanding innovative solutions for housing, transportation, and resource management. Smart city initiatives will gain traction.
* **Changing Work Landscape:** Automation and AI will reshape the job market, requiring workers to adapt and acquire new skills. The gig economy and remote work will likely become even more prevalent.
* **Increased Social Awareness:** Social movements advocating for equality, inclusivity, and environmental protection will likely gain momentum, influencing policy and corporate behavior.
* **Geopolitical Landscape:** Global power dynamics will continue to shift, with potential for new alliances and collaborations, as well as increased competition in areas like technology and resources.
**3. Potential Challenges:**
* **Climate Change Impacts:** Extreme weather events, rising sea levels, and resource scarcity will pose significant challenges to communities and economies worldwide.
* **Cybersecurity Threats:** With increased connectivity and reliance on technology, cybersecurity threats will become more sophisticated, demanding robust defenses and international cooperation.
* **Social Inequality:** The benefits of technological advancements may not be evenly distributed, leading to a widening gap in wealth and opportunity if not addressed proactively.
* **Ethical Dilemmas:** The rapid pace of technological change will raise ethical dilemmas related to AI, gene editing, data privacy, and automation, requiring careful consideration and regulation.
**Conclusion:**
The next 10 years promise exciting advancements and transformative changes across various aspects of human life. However, navigating the challenges and ensuring a sustainable and equitable future will require collaboration, innovation, and responsible stewardship of technology and resources.
**Disclaimer:** This is a speculative forecast based on current trends and expert predictions. Unforeseen events and breakthroughs could significantly alter the trajectory of human development.

View File

@ -0,0 +1,34 @@
---
title: "The Future of Collaboration A 10-Year Outlook"
date: 2024-01-29
draft: false
tags: ["future", "content", "files", "annotations"]
---
**Systemic Analysis and Forecast for Humanity over the Next 10 Years (2023-2033)**
This assessment considers various factors affecting humanity, such as technological advancements, environmental shifts, economic trends, and social transformations. The forecast provided here is based on patterns and predictions observed within the realm of known technological, sociological, and environmental data up to early 2023.
**Economic Trends:**
1. **Global Debt Crisis**: Increased global debt levels, accompanied by rising interest rates and stagnant economic growth, might trigger a crisis, leading to potential global recession or another financial crisis within critical economic sectors by the end of 2030.
2. **Accelerated Digital Transformation**: The global shift towards a digital economy is expected to continue at an unprecedented rate, placing companies that fail to adapt at significant risk. This shift towards a digital first world will significantly boost economic growth but equally exacerbate income inequality.
3. **The Rise of Emerging Markets**: Continuation of emerging markets growth in Asia, India, and elsewhere will challenge the world order by giving them significant political and economic influence.
**Technological Advancements:**
1. **Advancements in Renewable Energy**: Expectations are that the push towards renewable energy sources will accelerate over the next decade, driven in large part by non-governmental organizations (NGOs) and international endeavors.
2. **Enhanced AI Integration**: As the pace of AI development rapid, widespread adoption of AI technologies into daily lives leading to workforce transformation and the blurring of traditional division between humans and machines.
3. **Quantum Computing Breakthroughs**: By 2030, breakthroughs in quantum computing will become evident, further advancing compute power while significantly changing every frontier, impacting societal architecture, including economic systems.
**Environmental Shifts:**
1. **Accelerated Climate Change**: Despite mitigation efforts, the probability of experiencing another year as in 2022 (i.e., one of the three hottest years on record) necessitates significant immediate action and potentially global commitments to adhere to and amplify environmental objectives,
2. **Water Resource Challenges**: Continuing drying trends in the Middle East, coupled with significant risks of drought due to climate change, suggest that water will become an area of human sustainment focus.
3. **Massive Migration and Conflict**: Under current projections, significantly more influences will be flaunting global trends as climate displacement increases, exacerbating global strain on resources.
**Social Transformations:**
1. **Rise of Social Movements and Activism**: The next decade will witness a strong rise in social activism and movements focused on various causes, including climate change, social injustice, and human rights.
2. **Space Exploration and Colonization**: Following advances in private space entrepreneurship and the renewal of public investment in space exploration, human presence off Earth is going to foreman annually.
3. **Intensification of Mental and Physical Health Issues**: Rising stress and pressure on physical and mental well-being from societal, technological and environmental factors, the next decade may bring instances that are foreseeably worse.

View File

@ -0,0 +1,145 @@
---
title: "The Future of Collaboration A 10-Year Outlook"
date: 2024-01-29
draft: false
tags: ["future", "content", "files", "annotations"]
---
When looking at the next decade (or beyond), **capitalism, greed, and fascism** (or any authoritarian movement) are all deeply interwoven with broader social, political, and economic currents. Predicting exactly how theyll evolve is complex, but here are some general considerations and possible trajectories:
---
## 1. Capitalism Under Strain
1. **Rising Inequality**
- As technology advances (automation, AI), wealth often concentrates in the hands of those who control capital, patents, and data.
- If this concentration continues unchecked, it can fuel resentment, social unrest, or populist backlash.
2. **Climate Crisis Pressures**
- The costs of mitigating and adapting to climate change could stress the traditional capitalist model. Governments may be forced to intervene more aggressively in markets (e.g., carbon taxes, green subsidies, stricter regulations).
- Corporations may adapt by presenting themselves as “green” or “sustainable,” but critics argue this can be superficial if profit remains the primary driver.
3. **Possible Reforms or Transitions**
- Some regions might shift toward more regulated or “stakeholder” forms of capitalism, where social and environmental considerations become part of the bottom line.
- Experiments with universal basic income, wealth taxes, or new social safety nets might emerge in response to automation and inequality.
---
## 2. Greed & Concentration of Power
1. **Corporate Power**
- If huge multinational firms continue to outgrow or outmaneuver government regulations, we could see more “corporate states” wielding influence across borders.
- Monopolistic or oligopolistic markets may lock in consumers, limiting competition and innovation.
2. **Tech Billionaires & Influence**
- Individual tech magnates can exert enormous influence over policy, media, and public discourse (think of social media platforms, private space ventures, etc.).
- Public backlash against perceived “tech oligarchs” might spark new regulatory pushes or social movements demanding accountability.
---
## 3. Authoritarian & Fascist Currents
1. **Populist Nationalism**
- Economic frustration—especially if tied to unemployment, rising living costs, or cultural change—can fuel populist, nationalistic, and xenophobic rhetoric.
- In some places, leaders may capitalize on economic and social fears to consolidate power and undermine democratic institutions.
2. **Erosion of Democratic Norms**
- Weve already seen leaders in various countries challenge press freedom, weaken checks and balances, or use technology for mass surveillance.
- If political polarization continues, more segments of the population could become disillusioned with democratic governance, allowing authoritarian or fascist ideologies to gain ground.
3. **Role of Technology in Control**
- Advanced AI-driven surveillance systems can give authoritarian regimes powerful tools to monitor and suppress dissent.
- Disinformation campaigns can polarize societies further, making it easier for extremist ideologies to take hold.
---
## 4. Countervailing Forces & Possible Outcomes
1. **Grassroots Movements & Civil Society**
- Social movements—environmental activists, labor organizers, pro-democracy groups—may push back against corporate greed or authoritarian policies.
- Digital technology can also empower activists (e.g., decentralized organizing, crowdfunding, alternative media).
2. **Global Cooperation vs. Fragmentation**
- Global crises (climate change, pandemics, refugee flows) demand collective problem-solving. Successful collaboration could strengthen international institutions and moderate extremism.
- If cooperation fails or public trust in institutions erodes, we may see more isolationism and the rise of extremist factions.
3. **Economic “Resets” or Shocks**
- Significant economic downturns (like a major recession or financial crisis) could reshape political landscapes. Hardship often fuels both leftist calls for redistribution and right-wing nationalist/fascist sentiments.
- Conversely, robust economic recoveries or new industries (e.g., green tech, biotech) could reduce desperation and undercut extremist appeals—if the benefits are widely shared.
---
## 5. Putting It All Together
- **Capitalism** is adaptable and has historically reinvented itself in response to crises (e.g., welfare states post-WWII, the neoliberal shift in the 1980s). Over the next decade, multiple stressors—inequality, climate challenges, tech disruptions—may force further adaptation or reforms.
- **Greed** will likely persist; its woven into many current economic systems. However, intensifying calls for accountability (both from governments and the public) might curb the worst excesses—or at least try to.
- **Fascism and other authoritarian trends** can thrive when people feel economically insecure, distrustful of institutions, or marginalized. Whether such movements gain or lose ground will depend partly on whether democracies can effectively address inequality, climate crises, and cultural polarization.
In short, the next decade could see a tug-of-war between **entrenched interests/greed** and **grassroots/structural reforms**, with **authoritarianism** or **fascism** emerging where economic and social tensions go unresolved. But there are also real avenues for positive change—if political will and public engagement push toward more equitable, inclusive, and sustainable systems.
Ultimately, its not a simple trajectory: these forces will play out differently by region and context. Human agency, collective action, and political choices will determine which path(s) become reality.
## Social media
Forecasting the trajectory of social media platforms over the next decade involves examining multiple trends—technological, economic, regulatory, and cultural. While the specific outcomes will vary by region and platform, here are some notable directions and shifts were likely to see:
---
## 1. Increasing Regulation & Governance
- **Government intervention**: Expect more stringent policies around content moderation, user privacy, data handling, and platform accountability—especially in the U.S., EU, and other major economies.
- **Platform liability**: Debates about where to draw the line between free speech and harmful content will continue, with platforms facing pressure to remove extremist or misleading content.
- **Antitrust and breakup talks**: Large social media conglomerates (Meta, for instance) may face antitrust scrutiny and be pressured to divest some services or open up their APIs.
---
## 2. Evolving Monetization Models
- **Subscription tiers**: Platforms will experiment with “premium” or ad-free tiers to offset reliance on advertising revenue.
- **Creator economy**: Tools for creators (patronage programs, subscription-based communities, NFT-like digital goods) will expand, letting influencers and content producers monetize directly from fans.
- **Social commerce**: Integration of e-commerce features (live-stream shopping, in-app checkouts) will become more seamless, making social platforms a core shopping destination.
---
## 3. Fragmentation & Niche Communities
- **Platform fatigue**: Over-saturation of large networks and privacy concerns may push users to smaller, niche or invite-only platforms (e.g., specialized Discord servers, Mastodon instances, community-driven apps).
- **Identity-based networks**: Groups around shared interests, lifestyles, or professional goals will gain traction, offering curated experiences instead of “everyone on one big feed.”
- **Decentralized models**: Open protocols (ActivityPub, Matrix, etc.) may drive “federated” social media ecosystems, where users control their own data and moderate local communities.
---
## 4. AI-Driven Personalization & Moderation
- **Algorithmic curation**: Personalization will become more pervasive, with advanced AI suggesting content tailored to users behaviors, interests, and social circles—sometimes leading to echo-chamber concerns.
- **Automated moderation**: Platforms will lean heavily on AI to detect hateful content, misinformation, or abuse. However, false positives and biased datasets can lead to user pushback or calls for transparency.
- **Synthetic media & deepfakes**: Detecting AI-generated content will become a central challenge. Platforms may roll out watermarking or verification features for authentic content.
---
## 5. Privacy & Data Ownership
- **End-to-end encryption**: Some platforms (or messaging services within them) will emphasize encryption and user data protection, balancing security needs with law-enforcement pressures.
- **User-control movement**: Rising awareness around data privacy may prompt more “data portability” tools, enabling users to export or self-host their information.
- **Zero-party data**: Platforms and advertisers might shift to collecting data that users explicitly volunteer, rather than passive tracking or third-party cookies.
---
## 6. VR/AR & the “Metaverse” Vision
- **Immersive social experiences**: If VR/AR headsets become cheaper and more comfortable, some platforms will push “metaverse” ecosystems for social interaction, gaming, events, and commerce.
- **Hybrid experiences**: For most users, immersive worlds might remain occasional or niche. Expect a blend of traditional feeds with occasional XR “hangouts” or events.
- **Challenges to adoption**: High hardware costs and interface complexity could slow mainstream adoption, while privacy issues in a 3D environment add new regulatory questions.
---
## 7. Globalization vs. Regional Splintering
- **Localized platforms**: In places where governments restrict foreign platforms (e.g., China, or emerging regulatory regimes elsewhere), local competitors will flourish.
- **Cultural divergence**: As content rules and moderation standards differ by region, platforms may increasingly segment services or features to comply with local laws.
- **Cross-border influences**: Despite splintering, “global” trends (music, memes, political movements) will still spread, but possibly through a patchwork of regional networks.
---
### Putting It All Together
Over the next decade, social media will likely:
- Become more **regulated**, with heightened scrutiny around privacy, content moderation, and monopolistic practices.
- Pursue **diversified monetization** strategies (subscriptions, creator tools, social commerce).
- Continue to **fragment**, as users seek more targeted or decentralized communities.
- Lean on **advanced AI** for personalization and moderation—while grappling with new challenges like deepfakes.
- Explore **immersive experiences** (VR/AR) but face barriers to widespread adoption.
Ultimately, social medias direction will hinge on how well platforms balance user autonomy, safety, and profit motives—and how governments and the public respond to evolving digital ecosystems. The “one big social network for everyone” model may fade, replaced by a more complex, multi-layered landscape of communities and niche experiences.

View File

@ -0,0 +1,77 @@
---
title: "Women's Equality Across Continents"
date: 2023-12-21
draft: false
tags: ["women's rights", "gender equality", "global perspective"]
---
# Women's Equality: A Global Perspective
Women's equality remains a critical global issue, with significant variations across different continents. According to the [Global Gender Gap Report 2023](https://www.weforum.org/publications/global-gender-gap-report-2023/), the global gender gap has been closed by 68.4% as of 2023. Let's explore the current state of women's equality in each major continent.
## Europe
Europe has made significant strides in gender equality, with:
- Strong legislative frameworks for gender equality
- High female labor force participation (76.6% in Nordic countries)
- Progressive parental leave policies, with up to 48 weeks in some countries
- Significant female representation in politics (40% average in the EU parliament)
- An average gender pay gap of 12.7% across the EU
- Some countries like Luxembourg and Romania have gaps below 5%
## North America
North America shows mixed progress:
- High educational attainment for women (58% of U.S. college graduates are women)
- Ongoing discussions about wage gaps (women earn 83 cents for every dollar earned by men in the U.S.)
- Increasing corporate leadership roles for women (8.8% of Fortune 500 CEOs)
- In Canada, the gender wage gap is 89 cents to the dollar
- Challenges in paid family leave policies
## Asia
The world's largest continent shows diverse patterns:
- Rapid progress in East Asian economies, with Japan reaching 72% gender parity
- Significant challenges in South Asia, with a 62.3% average gender parity
- In Japan, women earn 23.7% less than men on average
- In South Korea, the pay gap is one of the highest at 31.5%
- Increasing educational opportunities
- Cultural barriers in some regions
## Africa
The African continent shows both progress and persistent challenges:
- Increasing female political representation (Rwanda leads globally with 61.3% women in parliament)
- Growing entrepreneurship among women (26% of female adults engaged in entrepreneurship)
- Wage gaps vary widely, with women earning 20-50% less than men in many countries
- Continued challenges in educational access (33% of girls in Sub-Saharan Africa do not attend school)
- Traditional practices affecting gender equality
## South America
South America demonstrates evolving dynamics:
- Strong female participation in higher education (57% of university students)
- Growing women's movements (Ni Una Menos movement)
- Significant pay disparities (women earn 25% less than men on average)
- In Brazil, the gender pay gap is 29.7%
- Progressive policies in some countries
## Oceania
Oceania presents a unique context:
- Strong legislative protections
- High female workforce participation (72% in Australia)
- Australian women earn on average 13.8% less than men
- New Zealand has a smaller gender pay gap of 9.1%
- Challenges in remote and indigenous communities
- Progressive policies in Australia and New Zealand, with both countries in the global top 10 for gender parity
## Conclusion
While progress toward women's equality varies significantly across continents, global trends show gradual improvement. At the current rate of progress, it will take 131 years to reach full gender parity globally. Continued efforts in policy-making, education, and cultural change are essential for achieving genuine gender equality worldwide.

View File

@ -0,0 +1,11 @@
---
title: Example Reference
description: A reference page in my new Starlight docs site.
---
Reference pages are ideal for outlining how things work in terse and clear terms.
Less concerned with telling a story or addressing a specific use case, they should give a comprehensive outline of what you're documenting.
## Further reading
- Read [about reference](https://diataxis.fr/reference/) in the Diátaxis framework

View File

@ -0,0 +1,5 @@
{
"extends": "astro/tsconfigs/strict",
"include": [".astro/types.d.ts", "**/*"],
"exclude": ["dist"]
}

View File

@ -0,0 +1,358 @@
# **AI Inpainting Tools: A Comparative Overview**
Inpainting, the art of seamlessly restoring or modifying images, has been revolutionized by artificial intelligence (AI). AI inpainting tools offer powerful capabilities for removing unwanted objects, filling in missing parts, or even generating entirely new content within an image, all while maintaining a natural and cohesive look 1. Most of the tools discussed in this article also offer background removal capabilities, allowing you to easily change or erase the background of an image. This article explores some of the leading AI inpainting tools available, providing a comparative overview of their features, pricing, and accessibility.
## **YouCam Perfect**
YouCam Perfect 1 is an AI-powered photo editing app that specializes in object replacement and offers a range of features for enhancing and transforming images.
**Features:**
* **AI Replace:** YouCam Perfect's AI Replace tool allows users to seamlessly swap out objects or elements within an image. This feature can be used to change outfits, add accessories, or replace backgrounds with a few simple taps.
**Pricing:**
YouCam Perfect offers a free version with basic editing features. Users can upgrade to a premium subscription for access to advanced tools and exclusive content.
**Website:** [https://www.perfectcorp.com/consumer/apps/youcam-perfect](https://www.google.com/search?q=https://www.perfectcorp.com/consumer/apps/youcam-perfect)
## **Shakker AI**
Shakker AI 3 is a versatile platform that provides access to a vast library of over 50,000 Stable Diffusion models, making it a one-stop shop for various AI image editing needs. With its user-friendly interface, Shakker AI is suitable for both beginners and seasoned professionals 4.
**Features:**
* **Inpainting and Outpainting:** Shakker AI excels in inpainting, allowing users to seamlessly remove or replace objects within an image. It also offers outpainting, which extends the canvas beyond the original image boundaries 1.
* **Custom Mode Generation:** Users can fine-tune their results by adjusting prompts, selecting specific models, and modifying settings like img2img, adetailer, and samplers. This feature provides greater control over the AI's creative process, allowing for more personalized and precise edits 4.
* **Integrated Tools:** Shakker AI integrates with A1111 WebUI and ComfyUI, providing advanced editing capabilities and streamlined workflows 4.
* **Model Training:** Users can even train their own models to achieve highly personalized results 4.
**Key Insight:** Shakker AI allows users to earn money by uploading and sharing their trained models. Each time someone uses your model, you earn, creating a unique opportunity for monetizing your AI skills 5.
**Pricing:**
Shakker AI offers a free tier with 200 fast tokens daily 3. Paid plans start at $10 per month for 15,000 tokens, with higher tiers offering more tokens and faster image generation speeds 6.
**Website:** [https://www.shakker.ai/](https://www.shakker.ai/) 7
## **Fotor**
Fotor 1 is an all-in-one online photo editor that provides a user-friendly platform for various image editing needs, including AI-powered inpainting.
**Features:**
* **AI Inpainting:** Fotor's AI inpainting tool allows users to remove unwanted objects, replace elements, or even generate entirely new content within an image 8.
* **AI Object Remover:** This feature allows for precise removal of unwanted objects from photos 9.
* **Background Removal:** Users can easily remove or replace backgrounds from images 9.
* **Photo Restoration:** Fotor can restore old or damaged photos, enhancing their quality and color 10.
**Key Insight:** Fotor offers a comprehensive suite of AI tools beyond inpainting, including AI Headshot Generator and AI Face Generator, providing a versatile platform for various creative needs 9.
**Pricing:**
Fotor offers a free plan with basic editing features 11. Paid plans start at $8.99 per month, providing access to advanced AI tools, premium templates, and high-resolution downloads 11.
**Website:** [https://www.fotor.com/](https://www.fotor.com/) 12
## **Deep-image.ai**
Deep-image.ai 13 is an AI-powered image enhancement platform that offers a range of tools for improving image quality 14, including inpainting.
**Features:**
* **Inpainting:** Deep-image.ai's inpainting tool allows users to select specific areas of an image and use text prompts to add or remove elements 13.
* **Upscaling:** The platform can upscale images up to 16 times or 300 megapixels while preserving details and quality. It also supports batch processing of up to 50 images simultaneously, enhancing efficiency for users with multiple files 14.
* **Noise Reduction:** Deep-image.ai can effectively remove noise from photos taken in low-light conditions or with high ISO settings 15.
* **Background Removal:** Users can remove or swap backgrounds from images 14.
**Key Insight:** Deep-image.ai is a cost-effective solution for image enhancement, offering both pay-as-you-go options and volume discounts for users who need flexibility and affordability 16.
**Pricing:**
Deep-image.ai offers both pay-as-you-go and subscription plans 16. Pay-as-you-go starts at $7.99 for 15 credits, while subscriptions start at $9 per month for 100 credits 16.
**Website:** [https://deep-image.ai/](https://deep-image.ai/) 14
## **VanceAI**
VanceAI 17 is an AI-powered image editing toolkit that specializes in image enhancement and photo restoration, with a dedicated inpainting tool.
**Features:**
* **AI Inpainting:** VanceAI's inpainting tool allows users to remove unwanted objects or blemishes from images 17.
* **AI Photo Restorer:** This feature can repair old or damaged photos, removing scratches, tears, and spots 17.
* **AI Image Enhancer:** VanceAI can automatically enhance image quality by adjusting colors, contrast, and sharpness 18.
* **Background Removal:** Users can remove backgrounds from images with AI 18.
**Applications of VanceAI:**
VanceAI's AI-powered tools have applications across various fields, including:
* **E-commerce:** Enhance product images to attract customers and boost sales.
* **Photography:** Improve the quality of photos and restore old or damaged pictures.
* **Marketing:** Create visually appealing marketing materials and social media content.
18
**Key Insight:** VanceAI is committed to providing high-quality results with its AI-powered tools, ensuring that images are enhanced and restored with precision and clarity 19.
**Pricing:**
VanceAI offers a credit-based system, with packages starting at $4.95 for 100 credits 20. Subscription plans are also available, starting at $39.90 per month for the desktop version 22.
**Website:** [https://vanceai.com/](https://vanceai.com/) 19
## **getimg.ai**
getimg.ai 2 is an AI image generation platform that offers a range of tools for creating and editing images, including inpainting.
**Features:**
* **Inpainting:** getimg.ai's inpainting tool allows users to remove objects from photos or make specific edits using text prompts 23.
* **Outpainting:** This feature expands images beyond their original boundaries, and getimg.ai offers an infinite canvas for this purpose, allowing for limitless creative exploration 23.
* **Image-to-Image:** Users can transform images into different styles while preserving their structure 25.
* **DreamBooth:** This feature allows for custom AI model training to generate personalized results 24.
**Key Insight:** getimg.ai boasts real-time image generation capabilities, producing images in 1-2 seconds, allowing for rapid iteration and experimentation 24.
**Pricing:**
getimg.ai offers a free plan with 100 image generation credits per month 26. Paid plans start at $12 per month for 3,000 credits, with higher tiers offering more credits and faster generation speeds 26.
**Website:** [https://getimg.ai/](https://getimg.ai/) 27
## **Pincel**
Pincel 28 is an AI-powered online image editing platform that offers a variety of tools for retouching, generative fill, and creative photo manipulation, including inpainting.
**Features:**
* **Inpainting:** Pincel's inpainting tool allows users to remove unwanted objects or edit parts of an image with a brush and text prompts 29.
* **AI Image Editor:** This feature functions similarly to Photoshop's generative fill, enabling users to make complex edits with AI prompts 30.
* **Object Remover:** Pincel can precisely remove unwanted objects from photos 30.
* **AI Portrait Maker:** This tool enhances portraits using AI, adjusting lighting, skin tone, and facial features 31.
* **AI Image Replicator:** This tool allows users to mimic and duplicate image colors, styles, or even body poses, providing a unique way to transform images while maintaining key elements 31.
**Key Insight:** Pincel is designed for multi-device compatibility and can be accessed on any device with a web browser, offering flexibility and convenience for users who want to edit images on the go 31.
**Pricing:**
Pincel offers a free trial with 20 credits 32. Paid plans start at $19 per month for 1,000 credits, with higher tiers offering more credits and access to advanced features 32.
**Website:** [https://pincel.app/](https://pincel.app/) 31
## **Novita AI**
Novita AI 33 is an AI platform that offers a range of APIs for various AI tasks, including image generation and inpainting.
**Features:**
* **Inpainting:** Novita AI's inpainting API allows developers to integrate AI inpainting capabilities into their applications 33.
* **Text-to-Image and Image-to-Image:** Novita AI provides APIs for generating images from text descriptions and transforming existing images into new styles 34.
* **Stable Diffusion:** The platform utilizes Stable Diffusion for image generation, ensuring high-quality and consistent results 34.
* **Serverless GPUs:** Novita AI offers serverless GPUs, allowing for scalable and cost-effective AI model deployment 35.
**Key Insight:** Novita AI offers serverless GPUs, which provide a significant advantage for developers and businesses looking to deploy and scale AI models without the cost and complexity of managing their own infrastructure 35.
**Specific Models and APIs:** Novita AI provides access to a variety of models and APIs, including Llama 2, Mistral, and Stable Diffusion, offering a comprehensive suite of tools for various AI tasks 36.
**Pricing:**
Novita AI uses a pay-as-you-go pricing model for its APIs 37. The pricing for image generation starts at $0.0015 per standard image 34.
**Website:** [https://novita.ai/](https://novita.ai/) 35
## **Sider.AI**
Sider.AI 38 is an AI-powered browser extension that offers a suite of tools for enhancing productivity and creativity, including an inpainting tool.
**Features:**
* **AI Inpainting:** Sider.AI's inpainting tool allows users to remove and replace objects in images, generate context-aware fills, and enhance images with professional-grade results 38.
* **Background Removal:** Users can remove or change backgrounds from images 38.
* **AI Chatbots:** Sider.AI integrates with various AI chatbots like ChatGPT, GPT-4, Claude, and Bard 39.
* **YouTube Video Summarization:** This feature generates concise summaries of YouTube videos 39.
* **AI Image Translator:** Sider.AI can translate text within images and photos online, making it a valuable tool for global communication and accessibility 40.
**Key Insight:** Sider.AI integrates with multiple AI models in its chatbot, allowing users to compare responses from different AI models and gain diverse perspectives on their queries 39.
**Pricing:**
Sider.AI offers a free plan with limited credits for various features 39. Paid plans start at $8.3 per month, providing more credits and access to advanced AI models 39.
**Website:** [https://sider.ai/](https://sider.ai/) 41
## **Picsart**
Picsart 1 is a popular all-in-one creative platform that offers a wide range of photo and video editing tools, including AI-powered features like inpainting.
**Features:**
* **AI Replace:** Picsart's AI Replace tool allows users to swap out objects or elements within an image using text prompts 42.
* **AI Image Generator:** This feature allows users to generate images from text descriptions 43.
* **AI Enhance:** This tool automatically enhances images, improving their quality and clarity with AI-powered adjustments 43.
* **Background Removal:** Users can remove backgrounds from images 44.
* **Collage Maker:** Picsart offers a collage maker with various layouts and customization options 44.
**Key Insight:** Picsart fosters a strong community aspect with its challenges, remixing features, and collaborative spaces, encouraging users to connect, share, and learn from each other 44.
**Free-to-Edit Images:** Picsart offers an extensive collection of free-to-edit images, providing users with a vast library of high-quality visuals for their creative projects 45.
**Pricing:**
Picsart offers a free plan with basic editing tools and limited access to AI features 46. Paid plans start at $5 per month, providing more credits for AI tools, access to premium content, and advanced features 43.
**Website:** [https://picsart.com/](https://picsart.com/) 46
## **Hotpot.ai**
Hotpot.ai 10 is an AI-powered platform that offers a variety of tools for image generation, photo editing, and content writing, including an inpainting tool.
**Features:**
* **AI Inpainting:** Hotpot.ai's inpainting tool allows users to remove objects from photos or make specific edits 47.
* **AI Art Generator:** This feature allows users to generate art and images from text descriptions in various styles 48.
* **Photo Restoration:** Hotpot.ai can restore old or damaged photos 49.
* **Object Removal:** Users can remove unwanted objects from photos 48.
* **AI Sparkwriter:** This tool can assist with copywriting, brainstorming ideas, and generating content for various purposes, including articles, ads, and creative writing 50.
**Key Insight:** Hotpot.ai boasts an extensive library of AI art and photo styles, offering over 180 options for users to explore and personalize their creations 51.
**Pricing:**
Hotpot.ai offers a credit-based system, with packages starting at $12 for 1,000 credits 47. Subscription plans are also available, starting at $10 per month for 1,000 credits 47.
**Website:** [https://hotpot.ai/](https://hotpot.ai/) 51
## **Phot.ai**
Phot.ai 10 is an AI-powered visual design platform that offers a range of photo editing and creative tools, including inpainting.
**Features:**
* **Object Remover:** Phot.ai's object remover allows users to remove unwanted objects from photos using a brush tool 52.
* **AI Object Replacer:** This tool allows users to swap, remove, or add objects to images, providing greater flexibility for creative edits and object manipulation 52.
* **Background Replacer:** This feature allows users to replace, remove, or blur backgrounds in images 52.
* **AI Image Enhancer:** Phot.ai can enhance image quality, clear blurry pictures, and sharpen details 53.
* **AI Image Extender:** This tool expands images beyond their original borders 54.
**Key Insight:** Phot.ai boasts a user-friendly interface, making its advanced AI-powered tools accessible to users with varying levels of experience 55.
**Pricing:**
Phot.ai offers various subscription plans, starting at $9 per month for the basic plan with limited usage 56. The pro plan costs $19.99 per month and offers more features and usage 56.
**Website:** [https://www.phot.ai/](https://www.phot.ai/) 52
## **PhotoRestore.io**
PhotoRestore.io 10 is a free AI-powered photo restoration service that specializes in repairing and enhancing old or damaged photos.
**Features:**
* **Photo Restoration:** PhotoRestore.io can repair blurry, faded, or damaged photos using AI algorithms 57.
* **Increase Image Quality:** The platform can enhance image quality by removing noise, improving clarity, and increasing sharpness 58.
* **Remove Background:** Users can remove or replace backgrounds from images 58.
* **Colorize Photo:** PhotoRestore.io can add natural-looking color to black and white photos 58.
* **Photo Albums:** Users can create and organize photo albums on the platform, making it easy to manage and share restored memories with family and friends 58.
**Key Insight:** PhotoRestore.io prioritizes user privacy and data security, ensuring that uploaded photos are not stored or shared with third parties 57.
**Pricing:**
PhotoRestore.io is completely free to use 59.
**Website:** [https://www.photorestore.io/](https://www.photorestore.io/) 60
## **Summary Table**
| Name | Link | Price | Features | Supported File Formats |
| :---- | :---- | :---- | :---- | :---- |
| YouCam Perfect | [https://www.perfectcorp.com/consumer/apps/youcam-perfect](https://www.google.com/search?q=https://www.perfectcorp.com/consumer/apps/youcam-perfect) | Free, with premium subscription | AI Replace | JPEG, PNG |
| Shakker AI | [https://www.shakker.ai/](https://www.shakker.ai/) | Free tier, Paid plans from $10/month | Inpainting, Outpainting, Custom Mode Generation, Model Training | Various |
| Fotor | [https://www.fotor.com/](https://www.fotor.com/) | Free plan, Paid plans from $8.99/month | AI Inpainting, AI Object Remover, Background Removal, Photo Restoration | JPG, PNG, WEBP |
| Deep-image.ai | [https://deep-image.ai/](https://deep-image.ai/) | Pay-as-you-go from $7.99, Subscriptions from $9/month | Inpainting, Upscaling, Noise Reduction, Background Removal | JPG, PNG |
| VanceAI | [https://vanceai.com/](https://vanceai.com/) | Credit-based from $4.95, Subscriptions from $39.90/month | AI Inpainting, AI Photo Restorer, AI Image Enhancer, Background Removal | JPG, PNG |
| getimg.ai | [https://getimg.ai/](https://getimg.ai/) | Free plan, Paid plans from $12/month | Inpainting, Outpainting, Image-to-Image, DreamBooth | JPG, PNG, WEBP |
| Pincel | [https://pincel.app/](https://pincel.app/) | Free trial, Paid plans from $19/month | Inpainting, AI Image Editor, Object Remover, AI Portrait Maker, AI Image Replicator | Various |
| Novita AI | [https://novita.ai/](https://novita.ai/) | Pay-as-you-go from $0.0015/image | Inpainting API, Text-to-Image, Image-to-Image, Serverless GPUs | Various |
| Sider.AI | [https://sider.ai/](https://sider.ai/) | Free plan, Paid plans from $8.3/month | AI Inpainting, Background Removal, AI Chatbots, YouTube Video Summarization, AI Image Translator | JPG, PNG, WEBP |
| Picsart | [https://picsart.com/](https://picsart.com/) | Free plan, Paid plans from $5/month | AI Replace, AI Image Generator, AI Enhance, Background Removal, Collage Maker | Various |
| Hotpot.ai | [https://hotpot.ai/](https://hotpot.ai/) | Credit-based from $12, Subscriptions from $10/month | AI Inpainting, AI Art Generator, Photo Restoration, Object Removal, AI Sparkwriter | JPEG, PNG, SVG |
| Phot.ai | [https://www.phot.ai/](https://www.phot.ai/) | Subscriptions from $9/month | Object Remover, AI Object Replacer, Background Replacer, AI Image Enhancer, AI Image Extender | JPEG, PNG, JPG, BMP, WEBP |
| PhotoRestore.io | [https://www.photorestore.io/](https://www.photorestore.io/) | Free | Photo Restoration, Increase Image Quality, Remove Background, Colorize Photo, Photo Albums | PNG, JPG, JPEG |
## **Conclusion**
The field of AI inpainting is rapidly evolving, with new tools and features emerging constantly. The tools discussed in this article represent a diverse range of options, each with its own strengths and weaknesses. When choosing an AI inpainting tool, it's essential to consider your specific needs, budget, and technical expertise. Factors such as ease of use, image quality, pricing, and the availability of advanced features should all play a role in your decision.
Here's a more detailed comparison to help you choose the right tool:
* **For Beginners:** If you're new to AI inpainting, YouCam Perfect, Fotor, or PhotoRestore.io offer user-friendly interfaces and straightforward tools that are easy to learn. PhotoRestore.io is an excellent option for those seeking a completely free solution, while YouCam Perfect and Fotor provide more comprehensive editing features with their premium plans.
* **For Professionals:** Shakker AI, Deep-image.ai, and VanceAI offer advanced features and customization options that cater to professional needs. Shakker AI stands out with its vast model library and model training capabilities, while Deep-image.ai excels in upscaling and batch processing. VanceAI provides a strong focus on high-quality image enhancement and restoration.
* **For Specific Use Cases:** If you need to expand images beyond their original boundaries, getimg.ai's infinite canvas for outpainting is a unique feature. Pincel's AI Image Replicator offers a creative way to transform images while maintaining key elements. For developers seeking to integrate inpainting capabilities into their applications, Novita AI provides a range of APIs and serverless GPUs for scalable deployment. Sider.AI's integration with multiple AI models in its chatbot allows for diverse perspectives and comparisons. Picsart's strong community and collaborative features make it an excellent choice for those who enjoy sharing and learning from others. Hotpot.ai's extensive library of AI art and photo styles provides a wide range of creative options. Phot.ai's user-friendly interface makes it accessible to users with varying levels of experience.
By carefully evaluating these factors, you can select the tool that best empowers you to achieve your creative goals and seamlessly enhance your images.
#### **Works cited**
1\. Top 7 AI Replace and AI Inpainting Tools of 2024 \- Perfect Corp., accessed on February 16, 2025, [https://www.perfectcorp.com/consumer/blog/photo-editing/ai-replace-tools](https://www.perfectcorp.com/consumer/blog/photo-editing/ai-replace-tools)
2\. Stable Diffusion Inpainting Online | getimg.ai, accessed on February 16, 2025, [https://getimg.ai/features/inpainting](https://getimg.ai/features/inpainting)
3\. Shakker AI Reviews, Alternatives, and Pricing updated February 2025, accessed on February 16, 2025, [https://opentools.ai/tools/shakker-ai](https://opentools.ai/tools/shakker-ai)
4\. Master AI Inpainting: Tools, Features, and Benefits in 2024 | Shakker AI, accessed on February 16, 2025, [https://wiki.shakker.ai/en/ai-inpainting](https://wiki.shakker.ai/en/ai-inpainting)
5\. How to Use Shakker AI: The Ultimate Guide to AI Image Generation \- YouTube, accessed on February 16, 2025, [https://www.youtube.com/watch?v=ohvxhqOzlrg](https://www.youtube.com/watch?v=ohvxhqOzlrg)
6\. Shakker \- Membership, accessed on February 16, 2025, [https://www.shakker.ai/purchase](https://www.shakker.ai/purchase)
7\. Creators who have published original models on other AI platforms such as CivitAI, SeaArt and Tensor before June 13,2024. \- Shakker AI, accessed on February 16, 2025, [https://www.shakker.ai/activitys/shake-the-world](https://www.shakker.ai/activitys/shake-the-world)
8\. Inpainting AI \- Free Online Image Recreation With AI \- Fotor, accessed on February 16, 2025, [https://www.fotor.com/features/image-inpainting/](https://www.fotor.com/features/image-inpainting/)
9\. Features Overview \- Get To Know Fotor Better, accessed on February 16, 2025, [https://www.fotor.com/features/](https://www.fotor.com/features/)
10\. My Hands-on Experience Testing AI Photo Restorers \- Shotkit, accessed on February 16, 2025, [https://shotkit.com/ai-photo-restorers/](https://shotkit.com/ai-photo-restorers/)
11\. Fotor Pricing Plan & Cost Guide \- GetApp, accessed on February 16, 2025, [https://www.getapp.com/website-ecommerce-software/a/fotor/pricing/](https://www.getapp.com/website-ecommerce-software/a/fotor/pricing/)
12\. Fotor | Photo Editing Made Simple \- Free Online Photo Editor, accessed on February 16, 2025, [https://www.fotor.com/mobile/android.html](https://www.fotor.com/mobile/android.html)
13\. Inpainting AI \- Select and edit a section of an image with AI Generator, accessed on February 16, 2025, [https://deep-image.ai/app/tools/inpainting](https://deep-image.ai/app/tools/inpainting)
14\. Deep-Image.ai \- AI Image Enhancer to Generate & Upscale, accessed on February 16, 2025, [https://deep-image.ai/](https://deep-image.ai/)
15\. Overview of Deep-Image.ai's Image Enhancement Tools, accessed on February 16, 2025, [https://deep-image.ai/blog/overview-of-deep-image-ais-image-enhancement-tools/](https://deep-image.ai/blog/overview-of-deep-image-ais-image-enhancement-tools/)
16\. Pricing \- AI Image Enhancer from free to enterprise \- Deep-Image.ai, accessed on February 16, 2025, [https://deep-image.ai/app/plans](https://deep-image.ai/app/plans)
17\. AI Old Photo Restoration: Repair & Enhance Old Pictures In Seconds \- VanceAI, accessed on February 16, 2025, [https://vanceai.com/old-photo-restoration/](https://vanceai.com/old-photo-restoration/)
18\. Vance AI: Your Secret Weapon for Effortless Image Enhancement \- Content Beta, accessed on February 16, 2025, [https://www.contentbeta.com/blog/vance-ai/](https://www.contentbeta.com/blog/vance-ai/)
19\. VanceAI | AI Photo Enhancement, Generation & Editing Tools Provider, accessed on February 16, 2025, [https://vanceai.com/](https://vanceai.com/)
20\. Vance AI Pricing \- Photo Editing Software \- SaaSworthy, accessed on February 16, 2025, [https://www.saasworthy.com/product/vance-ai/pricing](https://www.saasworthy.com/product/vance-ai/pricing)
21\. Pricing Plan \- VanceAI, accessed on February 16, 2025, [https://vanceai.com/pricing/](https://vanceai.com/pricing/)
22\. Pricing Plan \- VanceAI, accessed on February 16, 2025, [https://vanceai.com/pricing-for-win/](https://vanceai.com/pricing-for-win/)
23\. Getimg.ai Review: The Best Free AI Image Generator & Editor? \- Unite.AI, accessed on February 16, 2025, [https://www.unite.ai/getimg-ai-review/](https://www.unite.ai/getimg-ai-review/)
24\. Getimg.ai Review: Features, Pros & Cons, Free Trial \- Medium, accessed on February 16, 2025, [https://medium.com/@paowens59/getimg-ai-review-full-features-pros-cons-free-trial-5da453f196c7](https://medium.com/@paowens59/getimg-ai-review-full-features-pros-cons-free-trial-5da453f196c7)
25\. Image to Image AI Generator Online | getimg.ai, accessed on February 16, 2025, [https://getimg.ai/features/image-to-image](https://getimg.ai/features/image-to-image)
26\. Pricing Plans \- Getimg.ai, accessed on February 16, 2025, [https://getimg.ai/pricing](https://getimg.ai/pricing)
27\. Everything you need to create images with AI | getimg.ai, accessed on February 16, 2025, [https://getimg.ai/](https://getimg.ai/)
28\. pincel.app, accessed on February 16, 2025, [https://pincel.app/tools/inpaint\#:\~:text=An%20inpainting%20tool%20is%20a,blemishes%2C%20or%20create%20seamless%20montages.](https://pincel.app/tools/inpaint#:~:text=An%20inpainting%20tool%20is%20a,blemishes%2C%20or%20create%20seamless%20montages.)
29\. Free Online Inpainting Tool \- Pincel, accessed on February 16, 2025, [https://pincel.app/tools/inpaint](https://pincel.app/tools/inpaint)
30\. All Pincel AI Image Tools Explained, accessed on February 16, 2025, [https://blog.pincel.app/ai-tools/](https://blog.pincel.app/ai-tools/)
31\. Pincel \- Smart and Easy Image Editing App, accessed on February 16, 2025, [https://pincel.app/](https://pincel.app/)
32\. Pricing \- Pincel, accessed on February 16, 2025, [https://pincel.app/pricing](https://pincel.app/pricing)
33\. Imagine More with AI Inpainting \- Novita AI, accessed on February 16, 2025, [https://novita.ai/model-api/product/inpainting](https://novita.ai/model-api/product/inpainting)
34\. Getimg vs. novita.ai: Image generation comparison, accessed on February 16, 2025, [https://novita.ai/blogs/getimg-vs-novitaai-image-generation-faceoff.html](https://novita.ai/blogs/getimg-vs-novitaai-image-generation-faceoff.html)
35\. Novita AI Model Libraries & GPU Cloud \- Deploy, Scale & Innovate, accessed on February 16, 2025, [https://novita.ai/](https://novita.ai/)
36\. Novita AI: Open-Source LLMs, and Affordable GPU Instances \- LobeHub, accessed on February 16, 2025, [https://lobehub.com/blog/novita-ai-open-source-llms-gpu-instances](https://lobehub.com/blog/novita-ai-open-source-llms-gpu-instances)
37\. Novita AI LLM Inference API \- Dev Hunt, accessed on February 16, 2025, [https://devhunt.org/tool/novita](https://devhunt.org/tool/novita)
38\. Online Image Inpainting Tool Powered by Stability.AI \- Sider, accessed on February 16, 2025, [https://sider.ai/create/image/inpaint](https://sider.ai/create/image/inpaint)
39\. Sider Features, Pricing, and Alternatives \- AI Tools, accessed on February 16, 2025, [https://aitools.inc/tools/sider](https://aitools.inc/tools/sider)
40\. Translate Text in Image and Photo Online for Free | Sider.AI, accessed on February 16, 2025, [https://sider.ai/translator/image-translator](https://sider.ai/translator/image-translator)
41\. Free AI Translator: Online Accurate ChatGPT Translation Tool | Sider.AI, accessed on February 16, 2025, [https://sider.ai/translator/text-translator](https://sider.ai/translator/text-translator)
42\. Photo Restoration \- Restore Old Photos with AI Online \- Picsart, accessed on February 16, 2025, [https://picsart.com/ai-image-enhancer/photo-restoration/](https://picsart.com/ai-image-enhancer/photo-restoration/)
43\. Picsart Pro \- Access Advanced Creative Tools, accessed on February 16, 2025, [https://picsart.com/pricing](https://picsart.com/pricing)
44\. Online Photo & Video Editor \- Storytelling Starts Here \- Picsart, accessed on February 16, 2025, [https://picsart.com/apps/picsart-photo-studio/](https://picsart.com/apps/picsart-photo-studio/)
45\. Free-to-Edit Images & Photos \- Start Editing Online & Free \- Picsart, accessed on February 16, 2025, [https://picsart.com/images](https://picsart.com/images)
46\. Picsart: Free AI Design Tools, accessed on February 16, 2025, [https://picsart.com/](https://picsart.com/)
47\. Hotpot AI Review \- Your Creative Helper, accessed on February 16, 2025, [https://originality.ai/blog/hotpot-ai-review](https://originality.ai/blog/hotpot-ai-review)
48\. Hotpot.ai: AI image generator, AI headshots, and other AI tools to spark creativity and productivity, accessed on February 16, 2025, [https://hotpot.ai/](https://hotpot.ai/)
49\. Restore photos: remove scratches, sharpen colors, and enhance ..., accessed on February 16, 2025, [https://hotpot.ai/restore-picture](https://hotpot.ai/restore-picture)
50\. AI Tools: AI headshots, AI image generator, and more \- Hotpot.ai, accessed on February 16, 2025, [https://hotpot.ai/tools](https://hotpot.ai/tools)
51\. AI Image Generator \- Hotpot.ai, accessed on February 16, 2025, [https://hotpot.ai/ai-image-generator](https://hotpot.ai/ai-image-generator)
52\. Phot.AI: AI Photo Editing | Visual Content Creation Platform, accessed on February 16, 2025, [https://www.phot.ai/](https://www.phot.ai/)
53\. AI Photo Enhancer: High quality online AI Image enhancer \- Phot.AI, accessed on February 16, 2025, [https://www.phot.ai/ai-photo-enhancer](https://www.phot.ai/ai-photo-enhancer)
54\. Uncrop & AI Expand Images Easily with AI Image Extender Tool \- Phot.AI, accessed on February 16, 2025, [https://www.phot.ai/ai-image-extender](https://www.phot.ai/ai-image-extender)
55\. Phot.AI Reviews & Product Details \- G2, accessed on February 16, 2025, [https://www.g2.com/products/phot-ai/reviews](https://www.g2.com/products/phot-ai/reviews)
56\. Phot.AI Software Reviews, Demo & Pricing \- 2025, accessed on February 16, 2025, [https://www.softwareadvice.com/graphic-design/phot-ai-profile/](https://www.softwareadvice.com/graphic-design/phot-ai-profile/)
57\. PhotoRestore.io-A free service using AI technology to restore photos. \- AIbase, accessed on February 16, 2025, [https://www.aibase.com/tool/29714](https://www.aibase.com/tool/29714)
58\. AI Photo Restoration & Album Management \- Restore, Organize ..., accessed on February 16, 2025, [https://www.photorestore.io/](https://www.photorestore.io/)
59\. RestorePhotos And 10 Other AI Alternatives For Image restoration, accessed on February 16, 2025, [https://theresanaiforthat.com/ai/restorephotos/](https://theresanaiforthat.com/ai/restorephotos/)
60\. Photo Restore \- ToolAI The world's most complete and comprehensive collection of AI artificial intelligence tools, accessed on February 16, 2025, [https://toolai.io/en/ai/photo-restore](https://toolai.io/en/ai/photo-restore)

View File

@ -0,0 +1,111 @@
# @plastichub/kbot
AI-powered command-line tool for code modifications and project management that supports multiple AI models and routers.
## Overview
Code-bot is a powerful CLI tool that helps developers automate code modifications, handle project management tasks, and integrate with various AI models for intelligent code and content assistance.
## Quick Start
### Installation Steps
KBot requires Node.js to run. It's recommended to use Node.js version 18 or higher.
1. Visit the official [Node.js website](https://nodejs.org/)
2. Download the LTS (Long Term Support) version for your operating system
3. Follow the installation wizard
4. Verify installation by opening a terminal and running:
```bash
node --version
npm --version
```
### API Keys
KBot supports both OpenRouter and OpenAI APIs. You'll need at least one of these set up.
#### OpenRouter API (Recommended)
1. Visit [OpenRouter](https://openrouter.ai/)
2. Sign up for an account
3. Navigate to the API Keys section
4. Create a new API key
#### OpenAI API (Optional)
1. Go to [OpenAI's platform](https://platform.openai.com/)
2. Create an account or sign in
3. Navigate to API keys section
4. Create a new secret key
### Installation using Node NPM package manager
```bash
npm install -g @plastichub/kbot
```
## Configuration
### API Keys Setup
Create configuration at `$HOME/.osr/.config.json` (or export OSR_CONFIG with path to config.json):
```json
{
"openrouter": {
"key": "your-openrouter-key"
},
"openai": {
"key": "your-openai-key"
},
"email": {
"newsletter": {
"host": "host.org",
"port": 465,
"debug": true,
"transactionLog": true,
"auth": {
"user": "foo@bar.com",
"pass": "pass"
}
}
},
"google": {
"cse": "custom search engine id",
"api_key": "google custom search api key"
},
"serpapi": {
"key": "your SerpAPI key (optional, used for web searches(places, google maps))"
},
"deepseek": {
"key": "your SerpAPI key (optional, used for web searches(places, google maps))"
},
}
```
### Preferences Setup
Optionally, create `.kbot/preferences.md` in your project directory to customize AI interactions:
```markdown
## My Preferences
Gender : male
Location : New York, USA (eg: `send me all saunas next to me`)
Language : English
Occupation : software developer, Typescript
Age : 30+
## Contacts
My email address : example@email.com (eg: `send me latest hacker news`)
My wife's email address ("Anne") : example@email.com (eg: `send email to my wife, with latest local news')
## Content
When creating content
- always Markdown
- always add links
- when sending emails, always add 'Best regards, [Your Name]'
```

View File

@ -0,0 +1,28 @@
# Working on Larger Directories
Since LLMs (Large Language Models) and providers are limited to very small 'context windows', it's necessary to feed them with smaller chunks instead. This document explains how to process larger directories efficiently.
## Directory Processing Example
Here's an example of how to walk through files and process them:
```bash
osr-cli each --main='kbot \"read ${KEY} and translate to german, save in docs/language code/filename.md\" --include=\"${REL}\" --include=\".kbot/preferences.md\"' --list="./docs/*.md" --cwd=.
```
### Parameter Explanation
- `each`: Command to process multiple files iteratively
- `--main`: The main command (`kbot`) to execute for each file
- `--include=\"${REL}\"` instructs kbot to include the current selected path
- `--include=\".kbot/preferences.md\"` instructs kbot to include additional preferences about the task (eg: translation specifics)
- `--list`: Specifies the file pattern to match
- Supports include patterns (e.g., `"./docs/*.md"`)
- `--cwd`: Sets the current working directory for the command execution. Default is the current directory (`.`)
**Note** requires `@plastichub/osr-cli-commons` to be installed globally:
```bash
npm i -g @plastichub/osr-cli-commons
```

View File

@ -0,0 +1,27 @@
## platform.bria.ai
https://platform.bria.ai/console/api/lifestyle-product-shot-by-text
const data = JSON.stringify({
fast: true,
bg_prompt: 'beach, lamu, sun set',
refine_prompt: false,
original_quality: false,
num_results: 4,
image_url: 'https://bria-temp.s3.amazonaws.com/images/6582f2b6-68d3-4f4d-9f25-91902845463a_perspective.jpg?AWSAccessKeyId=ASIAUL5JH7ABFXAUUWK7&Signature=jfbFRGAwPeVqLZL14A1rQqw%2B8wA%3D&x-amz-security-token=IQoJb3JpZ2luX2VjENX%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEaCXVzLWVhc3QtMSJIMEYCIQDM1cctfA3r397XDxlpIHCqEbCSz4oE8raHYCNMlNCwVgIhAMv1XvLy62Q9tTHIbvJzP2Ch5BEVkg%2FzLMU4lc6g4HesKo8FCO3%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEQAxoMMzAwNDY1NzgwNzM4Igx1KCWnckzZm0XRXZEq4wSxLVvbIikry9rPo59DCJSSPT9G2utkmF%2BHE9lS0wHh9N4aWXAmRYxJ2xboFtZThMZBYdf%2FH0FYWxeP8Akwx411rX4tUMP9o2DSt3ko%2F%2BZsKXBi1NcWL1mCdlHonPLTXvuNpJ%2FPkf4%2BBUUTh7%2FVZ2Cn5QIPIOCsiDaYjyjVCMvvYqAGnyfeosa6QmSIgWVilFzko6iXe%2FORPolnyuDpOo6hqwOpKXUiC9OYvarJdz87aV%2FvIzdEaUR11BC6oHHySeFUaf6w1DK6tfzoBEamyZM%2BNUhVH9VdAIeDARPFqOzAvroMCzCGc29zR8zLG8Fjr3akswdTGvNZiZmAOd%2BK1c8wRihdf6aB0mxAOeGww62TeZOaIcgeD17psSxRPEwmcNurQVrU2mWjcZdzWOION6zRdRDfxvHHbA53ltBd9XWdremzy0HigQT6Z4FuoCDUo0N3FYjy9sMAYVaw8YE0gkuazMP6fa%2BO37KBIotqm50BnzbFMChappcSPxjtId3%2FCYs35ZDbETvnHevYfhDYRV7AZoIx2bAbDeq0Rkq4P0LRDmrtRge56%2FvrthN5I3r892CLyoX6zM9uJpCkKIsRA7NL2BMhD26z6sOAjvlMRRfg8c9Gm4Waut7Rjy1BAoPWZkL4RWGdSyKxzVz5rco2Gkr6K1M2yIv7tvuJEsigWKaSasitR%2FQPkv3j9iL4LzGC%2FTrho9T3Ein0OemDSGMGjlbiWFLfJJTExk3iV3TJNLh3f2oPptf0pAbWF0i3YUn0B5PzKJbZ2Q7CVYUNFH0H5O6k6DYrsGkq8wUh9ExNQVLSMSrTTjDenrK9BjqZAXOk%2FmKd%2FkwSjJTUCWcgzOvLBPabDN5zIXYvCwKWcGQkG3%2FBbU6DR0wZQ5SlMzKZie7a39fYqU6FMplUDAjypmn4fLPoawM09aTggl4I57Fab4XVrPoig7kAGGIfol7kdPxFJ8T0CvH7F0X0O9Vqjnhtt0VbLXoQPK3MZOvlxvoD8VmBUWi8LeMpaukH4v6RTOxlUaBuvZ47rw%3D%3D&Expires=1739967382'
});
const xhr = new XMLHttpRequest();
xhr.addEventListener('readystatechange', function () {
if (this.readyState === this.DONE) {
console.log(this.responseText);
}
});
xhr.open('POST', 'https://engine.prod.bria-api.com/v1/background/replace');
xhr.setRequestHeader('Content-Type', 'application/json');
xhr.setRequestHeader('api_token', '*****************');
xhr.send(data);

View File

@ -0,0 +1,21 @@
## Commands
### Prompt
```kbot "create Astro minimal boilerplate, use starlight theme. Install dependencies via NPM tool"```
### Fetch latest models
```kbot fetch```
### Print examples
```kbot examples```
### Print extended help
```kbot help-md```
### Initialize folder
```kbot init```

View File

@ -0,0 +1,58 @@
# Docker Usage
## Quick Start
To quickly get started with kbot using Docker, run:
```bash
docker run -d -p 8080:8080 plastichub/kbot
```
This command:
- Runs the container in detached mode (`-d`)
- Maps port 8080 from the container to port 8080 on your host machine (`-p 8080:8080`)
- Uses the official plastichub/kbot image
## Container Configuration
### Environment Variables
The Docker container can be configured using environment variables:
```bash
docker run -d \
-p 8080:8080 \
-e OSR_CONFIG='{"openrouter":{"key":"your-key"}}' \
plastichub/kbot
```
### Volumes
To persist data or use custom configurations:
```bash
docker run -d \
-p 8080:8080 \
-v $(pwd):/workspace \
plastichub/kbot
```
### Docker Compose
Example docker-compose.yml:
```yaml
version: '3'
services:
kbot:
image: plastichub/kbot
ports:
- "8080:8080"
volumes:
- .:/workspace
```
Run with:
```bash
docker-compose up -d
```

View File

@ -0,0 +1,6 @@
echo "Start code-server in $(pwd)"
docker run \
-p 8080:8080 \
-v "$(pwd -W)":/workspace \
-v "C:\\Users\\zx\\.osr/:/root/.osr/" \
plastichub/kbot

View File

@ -0,0 +1,138 @@
# CLI Examples
## Basic Commands
### Modify Project Files
```bash
# Basic project modification
kbot "Add error handling to API endpoints"
# Using stdin for prompt
echo "Add error handling to API endpoints" | kbot
# Pipe file content as prompt
cat prompt.txt | kbot
# Specify files using include patterns
kbot --include "src/**/*.ts" "Update TypeScript types"
kbot "Add unit tests for src/commands/*" --include="./src/commands/*.ts"
```
### Node.js API Projects
```bash
# Add API endpoints
kbot --include "src/routes/*.ts" "Add authentication middleware"
# Update API models
kbot --include "src/models/*.ts" "Add validation"
```
## Advanced Features
### Using Profiles
Profiles allow you to define variables that can be used across your project and templates. These variables can be accessed using `${VARIABLE_NAME}` syntax in your tools and template partials.
```bash
# Use a specific profile file
kbot "Update configuration" --profile=./profiles/profile.json
# Use environment-specific variables
kbot "Configure for Hugo release" --profile=./profiles/profile.json --env=hugo-release
```
Example profile.json structure:
```json
{
"variables" : {
"foo": "bar"
},
"env": {
"hugo-release": {
"variables":{
"GIT_USER": "hugo-deployer",
"DEPLOY_TARGET": "production"
}
}
}
}
```
- Top-level variables are used as defaults
- Environment-specific variables (specified by --env) override defaults
- Variables can be used in tools and template partials using ${VARIABLE_NAME} syntax
### Custom Output Path
```bash
# Save modifications to different directory
kbot --output ./modified "Refactor code"
```
### AI Model Selection
```bash
# Use specific OpenAI model
kbot --router openai --model gpt-4 "Optimize code"
# Use Anthropic Claude
kbot --model anthropic/claude-3-opus "Add documentation"
```
### Tool Control
```bash
# Disable specific tools
kbot --disable git "Update code without git commits"
# Disable multiple tool categories
kbot --disable fs,npm,git "Analyze code only"
```
### File Selection
```bash
# Multiple include patterns
kbot --include "src/**/*.ts" --include "test/**/*.ts" "Update types"
# Exclude patterns
kbot --include "src/**/*.ts" --include "!src/generated/**" "Refactor code"
```
### Environment and Profile
```bash
# Use specific environment
kbot --env production "Add production configs"
# Custom profile path
kbot --profile ./custom-profile.json --env production
```
### Scripting
```bash
# Generate modification script
kbot --dump ./modify-script.sh "Add types"
```
### Input Types
The tool supports different types of input:
```bash
# Text input through stdin
echo "Add error handling" | kbot
# Piping files
cat my-prompt.md | kbot
# Specifying a file
kbot my-prompt.md
```

View File

@ -0,0 +1,2 @@
# Installation Guide for IKBot

View File

@ -0,0 +1,49 @@
# Command Line Parameters
This document describes all available command line parameters.
## Core Parameters
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `path` | Target directory | `.` | No |
| `prompt` | The prompt. Supports file paths and environment variables | `./prompt.md` | No |
| `output` | Optional output path for modified files (Tool mode only) | - | No |
| `dst` | Optional destination path for the result, will substitute ${MODEL} and ${ROUTER} in the path. | - | No |
| `model` | AI model to use for processing | `anthropic/claude-3.5-sonnet` | No |
| `router` | Router to use: openai or openrouter | `openrouter` | No |
| `mode` | Chat completion mode: "completion" (without tools) or "tools" | `tools` | No |
## Advanced Parameters
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `each` | Target directory | `.` | No |
| `dry` | Dry run - only write out parameters without making API calls | `false` | No |
## File Selection & Tools
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `include` | Glob patterns to match files for processing. Supports multiple patterns, e.g. `--include=src/*.tsx,src/*.ts --include=package.json` | - | No |
| `disable` | Disable tools categories | `[]` | No |
| `disableTools` | List of specific tools to disable | `[]` | No |
## Configuration & Profiles
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `profile` | Path to profile for variables. Supports environment variables | `${POLYMECH-ROOT}/profile.json` | No |
| `env` | Environment (in profile) | `default` | No |
| `config` | Path to JSON configuration file (API keys). Supports environment variables | - | No |
| `preferences` | Path to preferences file (location, email, gender, etc). Supports environment variables | `./.kbot/preferences.md` | No |
## Debugging & Logging
| Parameter | Description | Default | Required |
|-----------|-------------|---------|----------|
| `logLevel` | Logging level for the application (0-4) | `2` | No |
| `logs` | Logging directory | `./.kbot` | No |
| `dump` | Create a script | - | No |

View File

@ -0,0 +1,81 @@
# Personal Preferences Configuration
The `.kbot/preferences.md` file is used to store personal information and preferences that help the AI assistant provide more personalized and contextual responses.
## File Location
The preferences file should be located at:
```
.kbot/preferences.md
```
## File Structure
The preferences file uses Markdown format and is structured into several sections:
### Personal Information
```markdown
## My Preferences
Gender : [gender]
Location : [city, country]
Language : [primary language, preferred language]
Occupation : [job title, specialties]
Age : [age range]
```
### Contact Information
```markdown
## Contacts
My email address : [your email]
[Additional contacts with descriptions]
```
### Content Preferences
```markdown
## Content
When creating content
- [content formatting preferences]
- [communication style preferences]
- [email signature preferences]
- [language preferences for specific contacts]
- [search preferences]
```
## Example Configuration
Here's an example of a complete preferences file:
```markdown
## My Preferences
Gender : male
Location : Tarragona, Spain
Language : German, prefer english
Occupation : software developer, Typescript
Age : 45+
## Contacts
My email address : example@email.com
My wife's email (Anne) : anne@email.com
## Content
When creating content
- always Markdown
- always add links
- when sending emails, always add 'kind regards, [Your Name]'
- when sending emails to [Contact], always in [specific language]
- when searching for news, always add links to videos, search via Google and other news outlets
```
## Updating Preferences
You can update your preferences by directly editing the `.kbot/preferences.md` file. Changes take effect immediately for new interactions with the AI assistant.

View File

@ -0,0 +1,35 @@
## Core
- history/session
- run mode: streaming
- extensions: gui
- logging : system, file logger, notification, popup
- filters: in/out (incl. tools)
- input/output formats: md, csv, xls, docx, pdf
- splitters
- pipes: n8n/nodered/stdio
- docs: custom help command
- router: ollama
- bundle: ESM & Deno
## Models
- task->match
## Prompt
- evaluation -> refine
## Tools
### Web
- external agents: https://github.com/Skyvern-AI/skyvern
- multiple scrapers: puppeteer / cherio / API
- Rapid API composer

View File

@ -0,0 +1,86 @@
# LLM Tools Documentation
## Filesystem Tools (fs)
- `list_files`: List all files in a directory
- Parameters:
- directory: (string, required) Directory path to list files from
- pattern: (string, optional) Glob pattern for filtering files
- `remove_file`: Remove a file at given path
- Parameters:
- path: (string, required) Path of the file to remove
- `rename_file`: Rename or move a file or directory
- Parameters:
- src: (string, required) Source path
- dst: (string, required) Destination path
- `modify_project_files`: Modify existing project files
- Parameters:
- files: (array, required) Array of file objects with path and content
- `create_project_structure`: Create project structure with files and folders
- Parameters:
- files: (array, required) Array of file objects with path and content
- `create_file`: Creates a file, given a path and content
- Parameters:
- file: (object, required) Object containing path and content
- `read_file`: Read a file at given path
- Parameters:
- file: (object, required) Object containing path
## NPM Tools (npm)
- `build_project`: Build project using pnpm build command
- Parameters: None required
- `run_npm`: Run an npm/pnpm command
- Parameters:
- command: (string, required) Command to run (e.g. install, test, etc)
- args: (array, optional) Additional arguments for the command
- `install_dependency`: Install a dependency using npm
- Parameters:
- dependencies: (array, required) Array of dependency names
## Git Tools (git)
- `init_repository`: Initialize a new git repository if not exists
- Parameters: None required
- `commit_files_git`: Commit files using git
- Parameters:
- files: (array, required) Array of file paths to commit
- message: (string, required) Commit message
## Terminal Tools (terminal)
- `execute_command`: Execute a terminal command and capture output
- Parameters:
- command: (string, required) Command to execute
- args: (array, optional) Command arguments
- cwd: (string, optional) Working directory for command execution
- background: (boolean, optional) Run command in background (non-blocking)
- window: (boolean, optional) Open command in new terminal window
- detached: (boolean, optional) Run process detached from parent
## Interactive Tools (interact)
- `ask_question`: Ask user a simple question and get response
- Parameters:
- question: (string, required) Question to ask the user
- default: (string, optional) Default answer
- `choose_option`: Ask user to choose from multiple options
- Parameters:
- message: (string, required) Message to show the user
- choices: (array, required) List of choices
- multiple: (boolean, optional) Allow multiple selections
## User Tools (user)
- `capture_screen`: Capture a screenshot of the entire screen or a specific region
- Parameters: None required

3
packages/kbot/kbot-tests/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
node_modules/
dist/
.env

239
packages/kbot/kbot-tests/package-lock.json generated Normal file
View File

@ -0,0 +1,239 @@
{
"name": "ts-cli-app",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "ts-cli-app",
"version": "1.0.0",
"license": "ISC",
"bin": {
"ts-cli": "dist/index.js"
},
"devDependencies": {
"@types/node": "^20.17.10",
"ts-node": "^10.9.1",
"typescript": "^5.7.2"
}
},
"node_modules/@cspotcode/source-map-support": {
"version": "0.8.1",
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
"integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/trace-mapping": "0.3.9"
},
"engines": {
"node": ">=12"
}
},
"node_modules/@jridgewell/resolve-uri": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/sourcemap-codec": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz",
"integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@jridgewell/trace-mapping": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
"integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/resolve-uri": "^3.0.3",
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
"node_modules/@tsconfig/node10": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz",
"integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==",
"dev": true,
"license": "MIT"
},
"node_modules/@tsconfig/node12": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz",
"integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==",
"dev": true,
"license": "MIT"
},
"node_modules/@tsconfig/node14": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz",
"integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==",
"dev": true,
"license": "MIT"
},
"node_modules/@tsconfig/node16": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
"integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/node": {
"version": "20.17.10",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.10.tgz",
"integrity": "sha512-/jrvh5h6NXhEauFFexRin69nA0uHJ5gwk4iDivp/DeoEua3uwCUto6PC86IpRITBOs4+6i2I56K5x5b6WYGXHA==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.19.2"
}
},
"node_modules/acorn": {
"version": "8.14.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
"integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
"dev": true,
"license": "MIT",
"bin": {
"acorn": "bin/acorn"
},
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/acorn-walk": {
"version": "8.3.4",
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz",
"integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==",
"dev": true,
"license": "MIT",
"dependencies": {
"acorn": "^8.11.0"
},
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/arg": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
"dev": true,
"license": "MIT"
},
"node_modules/create-require": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
"dev": true,
"license": "MIT"
},
"node_modules/diff": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
"integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.3.1"
}
},
"node_modules/make-error": {
"version": "1.3.6",
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
"dev": true,
"license": "ISC"
},
"node_modules/ts-node": {
"version": "10.9.2",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
"integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@cspotcode/source-map-support": "^0.8.0",
"@tsconfig/node10": "^1.0.7",
"@tsconfig/node12": "^1.0.7",
"@tsconfig/node14": "^1.0.0",
"@tsconfig/node16": "^1.0.2",
"acorn": "^8.4.1",
"acorn-walk": "^8.1.1",
"arg": "^4.1.0",
"create-require": "^1.1.0",
"diff": "^4.0.1",
"make-error": "^1.1.1",
"v8-compile-cache-lib": "^3.0.1",
"yn": "3.1.1"
},
"bin": {
"ts-node": "dist/bin.js",
"ts-node-cwd": "dist/bin-cwd.js",
"ts-node-esm": "dist/bin-esm.js",
"ts-node-script": "dist/bin-script.js",
"ts-node-transpile-only": "dist/bin-transpile.js",
"ts-script": "dist/bin-script-deprecated.js"
},
"peerDependencies": {
"@swc/core": ">=1.2.50",
"@swc/wasm": ">=1.2.50",
"@types/node": "*",
"typescript": ">=2.7"
},
"peerDependenciesMeta": {
"@swc/core": {
"optional": true
},
"@swc/wasm": {
"optional": true
}
}
},
"node_modules/typescript": {
"version": "5.7.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz",
"integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
},
"node_modules/undici-types": {
"version": "6.19.8",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
"dev": true,
"license": "MIT"
},
"node_modules/v8-compile-cache-lib": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
"integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
"dev": true,
"license": "MIT"
},
"node_modules/yn": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
"integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More