280 lines
10 KiB
JavaScript
280 lines
10 KiB
JavaScript
"use strict";
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
}
|
|
Object.defineProperty(o, k2, desc);
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || (function () {
|
|
var ownKeys = function(o) {
|
|
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
var ar = [];
|
|
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
return ar;
|
|
};
|
|
return ownKeys(o);
|
|
};
|
|
return function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
})();
|
|
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.createOpenAIFile = exports.applyFilters = exports.assistant = exports.prompt = exports.toImages = exports.dumpAsScript = exports.patchOptions = void 0;
|
|
const path = __importStar(require("path"));
|
|
const fs = __importStar(require("fs"));
|
|
const openai_1 = require("openai");
|
|
__exportStar(require("./query"), exports);
|
|
__exportStar(require("./system"), exports);
|
|
__exportStar(require("./types"), exports);
|
|
const read_1 = require("@plastichub/fs/read");
|
|
const write_1 = require("@plastichub/fs/write");
|
|
const exists_1 = require("@plastichub/fs/exists");
|
|
const osr_commons_1 = require("@plastichub/osr-commons");
|
|
const primitives_1 = require("@plastichub/core/primitives");
|
|
const index_1 = require("../../index");
|
|
const index_2 = require("../index");
|
|
const options_1 = require("./options");
|
|
const patchOptions = (obj) => {
|
|
const newObj = {};
|
|
for (const key in obj) {
|
|
if (obj.hasOwnProperty(key)) {
|
|
const value = obj[key];
|
|
if (value !== null && value !== undefined && value !== '') {
|
|
if ((0, primitives_1.isString)(value) && value.length > 0) {
|
|
newObj[key] = `\"${value}\"`;
|
|
}
|
|
else if ((0, primitives_1.isArray)(value)) {
|
|
newObj[key] = `\"${value.join(',')}\"`;
|
|
}
|
|
else {
|
|
newObj[key] = value;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return newObj;
|
|
};
|
|
exports.patchOptions = patchOptions;
|
|
const dumpAsScript = async (opts) => {
|
|
if (!opts.dump) {
|
|
return;
|
|
}
|
|
const scriptPath = path.resolve((0, osr_commons_1.resolve)(opts.dump, opts.alt, {
|
|
...{
|
|
cwd: process.cwd(),
|
|
},
|
|
...opts.variables,
|
|
...(0, options_1.pathVariables)(opts)
|
|
}));
|
|
let optsOut = {
|
|
query: opts.query,
|
|
alt: opts.alt,
|
|
debug: opts.debug,
|
|
append: opts.append,
|
|
showPrompt: opts.showPrompt,
|
|
model: opts.model,
|
|
logLevel: opts.logLevel,
|
|
cache: opts.cache,
|
|
gui: opts.gui,
|
|
assistant: opts.assistant,
|
|
system: opts.system,
|
|
source: opts.source,
|
|
stdout: opts.stdOut,
|
|
files: opts.files,
|
|
dst: opts.dst,
|
|
dump: opts.dump
|
|
};
|
|
optsOut = (0, exports.patchOptions)(optsOut);
|
|
const optionString = Object.keys(optsOut).map((k) => {
|
|
return `--${k}=${optsOut[k]}`;
|
|
}).join(' ');
|
|
const script = `osr-ai chatgpt prompt ${optionString}`;
|
|
(0, write_1.sync)(scriptPath, script);
|
|
};
|
|
exports.dumpAsScript = dumpAsScript;
|
|
const toImages = (files, opts) => files.filter((f) => (0, exists_1.sync)(f)).map((f) => {
|
|
return {
|
|
type: "image_url",
|
|
image_url: {
|
|
url: (0, index_2.fileToBase64)(f)
|
|
}
|
|
};
|
|
});
|
|
exports.toImages = toImages;
|
|
const prompt = async (opts) => {
|
|
let q = opts.query;
|
|
const queryPath = path.resolve((0, osr_commons_1.resolve)(opts.query));
|
|
if ((0, exists_1.sync)(queryPath)) {
|
|
q = (0, read_1.sync)(queryPath);
|
|
}
|
|
opts = (0, options_1.parse)(opts);
|
|
const src = opts.source && (0, exists_1.sync)(opts.source) ? (0, read_1.sync)(opts.source, 'string') : '';
|
|
opts.query = opts.source ? `${opts.query} : ${src}` : opts.query;
|
|
opts = await (0, options_1.fromUI)(opts);
|
|
const client = new openai_1.OpenAI({ apiKey: opts.api_key });
|
|
// let defaults = [...Typescript(), ...Markdown(), ...Commons()]
|
|
const options = await (0, options_1.complete)(opts);
|
|
let messages = options.messages || [];
|
|
const requestMessage = {
|
|
role: "user",
|
|
content: [
|
|
{
|
|
type: 'text',
|
|
text: options.query
|
|
}
|
|
]
|
|
};
|
|
if (options.files && options.filesInfo && options.filesInfo.FILES) {
|
|
requestMessage.content.push(...(0, exports.toImages)(options.filesInfo.FILES));
|
|
}
|
|
let ret = null;
|
|
messages.push(requestMessage);
|
|
try {
|
|
const completion = await client.chat.completions.create({
|
|
model: options.model || "gpt-4o",
|
|
messages: messages
|
|
});
|
|
if (completion.choices.length === 0) {
|
|
index_1.logger.error('OpenAI response is empty');
|
|
return;
|
|
}
|
|
ret = completion.choices[0].message.content;
|
|
}
|
|
catch (error) {
|
|
index_1.logger.error('Error querying OpenAI:', error.message);
|
|
}
|
|
index_1.logger.debug(`OpenAI response:${ret}`);
|
|
ret = (0, exports.applyFilters)(ret, opts.filters || []);
|
|
if (opts.dst) {
|
|
let header = `${opts.showPrompt ? `// ${q}` : ''}\n`;
|
|
let content = `${opts.append ? (0, read_1.sync)(opts.dst) || '' : ''}\n${header}${ret}`;
|
|
const src = opts.source || opts.files ? opts.files[0] : null;
|
|
let dst = opts.dst;
|
|
if (src) {
|
|
dst = (0, options_1.targets)(src, opts)[0];
|
|
}
|
|
index_1.logger.debug(`Writing to ${dst}`);
|
|
(0, write_1.sync)(dst, content);
|
|
}
|
|
if (opts.stdOut && (0, primitives_1.isString)(ret) || Buffer.isBuffer(ret)) {
|
|
process.stdout.write(ret);
|
|
}
|
|
return ret;
|
|
};
|
|
exports.prompt = prompt;
|
|
const assistant = async (opts) => {
|
|
let q = opts.query;
|
|
const queryPath = path.resolve((0, osr_commons_1.resolve)(opts.query));
|
|
if ((0, exists_1.sync)(queryPath)) {
|
|
q = (0, read_1.sync)(queryPath);
|
|
}
|
|
opts = (0, options_1.parse)(opts);
|
|
opts = await (0, options_1.fromUI)(opts);
|
|
const client = new openai_1.OpenAI({ apiKey: opts.api_key });
|
|
// let defaults = [...Typescript(), ...Markdown(), ...Commons()]
|
|
const options = await (0, options_1.completeAssistant)(opts, client);
|
|
const assistant = await client.beta.assistants.create({
|
|
name: "Documents Assistant",
|
|
instructions: "You are an expert data analyst.",
|
|
model: opts.model,
|
|
tools: [{ type: "file_search" }],
|
|
});
|
|
const thread = await client.beta.threads.create({
|
|
messages: [
|
|
{
|
|
role: "user",
|
|
content: options.query,
|
|
attachments: options.attachments
|
|
}
|
|
]
|
|
});
|
|
const ret = await new Promise((resolve, reject) => {
|
|
try {
|
|
const stream = client.beta.threads.runs
|
|
.stream(thread.id, {
|
|
assistant_id: assistant.id,
|
|
})
|
|
.on("textCreated", () => console.log("assistant >"))
|
|
.on("toolCallCreated", (event) => console.log("assistant " + event.type))
|
|
.on("messageDone", async (event) => {
|
|
if (event.content[0].type === "text") {
|
|
const { text } = event.content[0];
|
|
const { annotations } = text;
|
|
const citations = [];
|
|
index_1.logger.debug('OpenAI response:', text.value);
|
|
resolve(text.value);
|
|
}
|
|
});
|
|
return stream;
|
|
}
|
|
catch (error) {
|
|
reject(error);
|
|
}
|
|
});
|
|
(0, exports.applyFilters)(ret, opts.filters || []);
|
|
let first = null;
|
|
if (opts.dst) {
|
|
let header = `${opts.showPrompt ? `// ${q}` : ''}\n`;
|
|
let content = `${opts.append ? (0, read_1.sync)(opts.dst) || '' : ''}\n${header}${ret}`;
|
|
const src = opts.source || opts.files ? opts.files[0] : null;
|
|
let dst = opts.dst;
|
|
if (src) {
|
|
dst = (0, options_1.targets)(src, opts)[0];
|
|
first = dst;
|
|
}
|
|
(0, write_1.sync)(dst, content);
|
|
}
|
|
if (opts.stdOut && (0, primitives_1.isString)(ret) || Buffer.isBuffer(ret)) {
|
|
process.stdout.write(ret);
|
|
}
|
|
(0, exports.dumpAsScript)(opts);
|
|
return ret;
|
|
};
|
|
exports.assistant = assistant;
|
|
const applyFilters = (value, filters) => {
|
|
if (!value) {
|
|
index_1.logger.warn(`applyFilters: value is empty`);
|
|
return '';
|
|
}
|
|
filters.forEach((f) => {
|
|
try {
|
|
let _ret = f(value);
|
|
if (_ret) {
|
|
value = _ret;
|
|
}
|
|
else {
|
|
index_1.logger.warn(`applyFilters: filter returned null : ${value}`, f.toString());
|
|
}
|
|
}
|
|
catch (error) {
|
|
index_1.logger.error(`Error applying filter: ${error.message}`);
|
|
}
|
|
});
|
|
return value;
|
|
};
|
|
exports.applyFilters = applyFilters;
|
|
const createOpenAIFile = async (client, filePath, purpose = 'assistants') => {
|
|
return client.files.create({
|
|
file: fs.createReadStream(filePath),
|
|
purpose: purpose
|
|
});
|
|
};
|
|
exports.createOpenAIFile = createOpenAIFile;
|
|
//# sourceMappingURL=index.js.map
|