fs:write buffer | xlsx experiments - sub tables

This commit is contained in:
lovebird 2025-04-24 09:45:16 +02:00
parent fc4986a54c
commit 934f0e4fe6
102 changed files with 773 additions and 2812455 deletions

View File

@ -1,7 +1,25 @@
import * as mupdf from 'mupdf';
import { Logger } from 'tslog';
import { dirname } from 'node:path';
import { resolveVariables } from '@polymech/commons';
import { sync as write } from '@polymech/fs/write';
import { sync as mkdir } from '@polymech/fs/dir';
import { writeFileSync } from 'node:fs';
import { Buffer } from 'node:buffer';
// Helper function to convert object-like image data to Buffer
function imageDataObjectToBuffer(imageDataObject) {
const keys = Object.keys(imageDataObject).map(Number).sort((a, b) => a - b);
const bufferLength = keys.length > 0 ? keys[keys.length - 1] + 1 : 0; // Determine length based on max index + 1
const buffer = Buffer.allocUnsafe(bufferLength); // Use allocUnsafe for performance if overwriting all bytes
for (const key in imageDataObject) {
if (Object.prototype.hasOwnProperty.call(imageDataObject, key)) {
const index = parseInt(key, 10);
if (!isNaN(index) && index >= 0 && index < bufferLength) {
buffer[index] = imageDataObject[key];
}
}
}
return buffer;
}
export async function convertPdfToImages(pdfData, options) {
const logger = options.logger || new Logger();
const outputFiles = [];
@ -37,7 +55,8 @@ export async function convertPdfToImages(pdfData, options) {
const imageData = options.format === 'png'
? pixmap.asPNG()
: pixmap.asJPEG(100, false);
write(outputPath, imageData);
mkdir(dirname(outputPath));
writeFileSync(outputPath, imageDataObjectToBuffer(imageData));
outputFiles.push(outputPath);
logger.info(`Converted page ${pageNumber} to ${outputPath}`);
}

View File

@ -12,11 +12,13 @@
"@polymech/commons": "file:../../../commons",
"@polymech/fs": "file:../../../fs",
"@types/yargs": "^17.0.33",
"init": "^0.1.2",
"mupdf": "^1.3.3",
"p-map": "^7.0.3",
"tslog": "^4.9.3",
"typescript": "^5.8.2",
"vitest": "^3.1.1",
"xlsx": "^0.18.5",
"yargs": "^17.7.2",
"zod": "^3.24.3"
},
@ -896,6 +898,15 @@
"url": "https://opencollective.com/vitest"
}
},
"node_modules/adler-32": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/adler-32/-/adler-32-1.3.1.tgz",
"integrity": "sha512-ynZ4w/nUUv5rrsR8UUGoe1VC9hZj6V5hU9Qw1HlMDJGEJw5S7TfTErWTjMys6M7vr0YWcPqs3qAr4ss0nDfP+A==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
@ -938,6 +949,19 @@
"node": ">=8"
}
},
"node_modules/cfb": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/cfb/-/cfb-1.2.2.tgz",
"integrity": "sha512-KfdUZsSOw19/ObEWasvBP/Ac4reZvAGauZhs6S/gqNhXhI7cKwvlH7ulj+dOEYnca4bm4SGo8C1bTAQvnTjgQA==",
"license": "Apache-2.0",
"dependencies": {
"adler-32": "~1.3.0",
"crc-32": "~1.2.0"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/chai": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/chai/-/chai-5.2.0.tgz",
@ -977,6 +1001,15 @@
"node": ">=12"
}
},
"node_modules/codepage": {
"version": "1.15.0",
"resolved": "https://registry.npmjs.org/codepage/-/codepage-1.15.0.tgz",
"integrity": "sha512-3g6NUTPd/YtuuGrhMnOMRjFc+LJw/bnMp3+0r/Wcz3IXUuCosKRJvMphm5+Q+bvTVGcJJuRvVLuYba+WojaFaA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/color-convert": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
@ -995,6 +1028,26 @@
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"license": "MIT"
},
"node_modules/crc-32": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz",
"integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==",
"license": "Apache-2.0",
"bin": {
"crc32": "bin/crc32.njs"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/daemon": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/daemon/-/daemon-1.1.0.tgz",
"integrity": "sha512-1vX9YVcP21gt12nSD3SQRC/uPU7fyA6M8qyClTBIFuiRWoylFn57PwXhjBAqRl085bZAje7sILhZU48qcS9SWw==",
"engines": {
"node": ">= 0.8.0"
}
},
"node_modules/debug": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
@ -1114,6 +1167,15 @@
}
}
},
"node_modules/frac": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/frac/-/frac-1.1.2.tgz",
"integrity": "sha512-w/XBfkibaTl3YDqASwfDUqkna4Z2p9cFSr1aHDt0WoMTECnRfBOv2WArlZILlqgWlmdIlALXGpM2AOhEk5W3IA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
@ -1137,6 +1199,17 @@
"node": "6.* || 8.* || >= 10.*"
}
},
"node_modules/init": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/init/-/init-0.1.2.tgz",
"integrity": "sha512-IvHUjULS2q+BXJdiu4FHkByh3+qSFmkOXQ2ItSfYTtkdUksQc0yNX6f1uDyokzRV71tjpFsFc3ckeYLJXunTGw==",
"dependencies": {
"daemon": ">=0.3.0"
},
"engines": {
"node": ">=0.4.7"
}
},
"node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
@ -1327,6 +1400,18 @@
"node": ">=0.10.0"
}
},
"node_modules/ssf": {
"version": "0.11.2",
"resolved": "https://registry.npmjs.org/ssf/-/ssf-0.11.2.tgz",
"integrity": "sha512-+idbmIXoYET47hH+d7dfm2epdOMUDjqcB4648sTZ+t2JwoyBFL/insLfB/racrDmsKB3diwsDA696pZMieAC5g==",
"license": "Apache-2.0",
"dependencies": {
"frac": "~1.1.2"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/stackback": {
"version": "0.0.2",
"resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz",
@ -1633,6 +1718,24 @@
"node": ">=8"
}
},
"node_modules/wmf": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wmf/-/wmf-1.0.2.tgz",
"integrity": "sha512-/p9K7bEh0Dj6WbXg4JG0xvLQmIadrner1bi45VMJTfnbVHsc7yIajZyoSoK60/dtVBs12Fm6WkUI5/3WAVsNMw==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/word": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/word/-/word-0.3.0.tgz",
"integrity": "sha512-OELeY0Q61OXpdUfTp+oweA/vtLVg5VDOXh+3he3PNzLGG/y0oylSOC1xRVj0+l4vQ3tj/bB1HVHv1ocXkQceFA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.8"
}
},
"node_modules/wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
@ -1650,6 +1753,27 @@
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/xlsx": {
"version": "0.18.5",
"resolved": "https://registry.npmjs.org/xlsx/-/xlsx-0.18.5.tgz",
"integrity": "sha512-dmg3LCjBPHZnQp5/F/+nnTa+miPJxUXB6vtk42YjBBKayDNagxGEeIdWApkYPOf3Z3pm3k62Knjzp7lMeTEtFQ==",
"license": "Apache-2.0",
"dependencies": {
"adler-32": "~1.3.0",
"cfb": "~1.2.1",
"codepage": "~1.15.0",
"crc-32": "~1.2.1",
"ssf": "~0.11.2",
"wmf": "~1.0.1",
"word": "~0.3.0"
},
"bin": {
"xlsx": "bin/xlsx.njs"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/y18n": {
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",

View File

@ -27,11 +27,13 @@
"@polymech/commons": "file:../../../commons",
"@polymech/fs": "file:../../../fs",
"@types/yargs": "^17.0.33",
"init": "^0.1.2",
"mupdf": "^1.3.3",
"p-map": "^7.0.3",
"tslog": "^4.9.3",
"typescript": "^5.8.2",
"vitest": "^3.1.1",
"xlsx": "^0.18.5",
"yargs": "^17.7.2",
"zod": "^3.24.3"
},

View File

@ -1,142 +0,0 @@
import * as path from 'path'
import { sync as exists } from "@polymech/fs/exists"
import { sync as read } from "@polymech/fs/read"
import { resolve, isFile } from '@polymech/commons'
import { substitute } from '@polymech/commons'
import { IResizeOptions } from '../types'
export const fileAsBuffer = (path: string) => read(path, 'buffer') as Buffer || Buffer.from("-")
const clone = (obj) => {
if (null == obj || "object" != typeof obj) return obj;
var copy = obj.constructor();
for (var attr in obj) {
if (obj.hasOwnProperty(attr)) copy[attr] = obj[attr];
}
return copy;
}
export const targets = (f: string, options: IResizeOptions) => {
const srcParts = path.parse(f)
const variables = clone(options.variables || {})
const targets: string[] = []
const rel = path.relative(options.srcInfo.DIR, srcParts.dir)
if (options.dstInfo.IS_GLOB) {
options.dstInfo.GLOB_EXTENSIONS.forEach((e) => {
variables.SRC_NAME = srcParts.name
variables.SRC_DIR = srcParts.dir
let targetPath: string = substitute(options.alt, variables.DST_PATH, variables)
targetPath = targetPath.replace(variables.DST_GLOB, '')
if(variables.DST_FILE_EXT){
targetPath = targetPath.replace('.' + variables.DST_FILE_EXT, '')
}
const parts = path.parse(targetPath)
//back compat
if (variables.DST_NAME === '*') {
variables.DST_NAME = ''
}
if (!parts.ext) {
if (variables.DST_PATH.indexOf(`{SRC_NAME}`) === -1) {
targetPath = path.join(targetPath, rel, srcParts.name + variables.DST_NAME.replace(variables.DST_GLOB, '') + '.' + e)
} else {
targetPath = targetPath + variables.DST_NAME.replace(variables.DST_GLOB, '') + '.' + e
}
}
//src.base contains dots
if (!targetPath.endsWith('.' + e)) {
targetPath += '.' + e
}
targets.push(path.resolve(targetPath))
})
} else {
let targetPath = ''
if (!variables.DST_PATH) {
targetPath = path.join(srcParts.dir, srcParts.base)
} else {
variables.SRC_NAME = srcParts.name
variables.SRC_DIR = srcParts.dir
targetPath = substitute(options.alt, variables.DST_PATH, variables)
if (isFile(f) && exists(f)) {
// targetPath = path.join(targetPath, srcParts.base)
} else {
targetPath = path.join(targetPath, srcParts.base)
}
const targetParts = path.parse(targetPath)
if (!targetParts.ext) {
targetPath = path.join(targetPath, srcParts.base)
}
}
targets.push(path.resolve(resolve(targetPath, options.alt, variables)));
}
return targets;
}
export const targetsNext = (f: string, options: IResizeOptions) => {
const srcParts = path.parse(f)
const variables = clone(options.variables || {})
const targets: string[] = []
const rel = path.relative(options.srcInfo.DIR, srcParts.dir)
if (options.dstInfo.IS_GLOB) {
options.dstInfo.GLOB_EXTENSIONS.forEach((e) => {
variables.SRC_NAME = srcParts.name
variables.SRC_DIR = srcParts.dir
let targetPath: string = substitute(options.alt, variables.DST_PATH, variables)
targetPath = targetPath.replace(variables.DST_GLOB, '')
if(variables.DST_FILE_EXT){
targetPath = targetPath.replace('.' + variables.DST_FILE_EXT, '')
}
const parts = path.parse(targetPath)
//back compat
if (variables.DST_NAME === '*') {
variables.DST_NAME = ''
}
if (!parts.ext) {
if (variables.DST_PATH.indexOf(`{SRC_NAME}`) === -1) {
targetPath = path.join(targetPath, rel, srcParts.name + variables.DST_NAME.replace(variables.DST_GLOB, '') + '.' + e)
} else {
targetPath = targetPath + variables.DST_NAME.replace(variables.DST_GLOB, '') + '.' + e
}
}
//src.base contains dots
if (!targetPath.endsWith('.' + e)) {
targetPath += '.' + e
}
targets.push(path.resolve(targetPath))
})
} else {
let targetPath = ''
if (!variables.DST_PATH) {
targetPath = path.join(srcParts.dir, srcParts.base)
} else {
variables.SRC_NAME = srcParts.name
variables.SRC_DIR = srcParts.dir
variables.SRC_DIR = srcParts.dir
targetPath = substitute(options.alt, variables.DST_PATH, variables)
if (isFile(f) && exists(f)) {
// targetPath = path.join(targetPath, srcParts.base)
} else {
targetPath = path.join(targetPath, srcParts.base)
}
const targetParts = path.parse(targetPath)
if (!targetParts.ext) {
targetPath = path.join(targetPath, srcParts.base)
}
}
targets.push(path.resolve(resolve(targetPath, options.alt, variables)));
}
return targets
}

View File

@ -1,167 +0,0 @@
import * as path from 'path'
import * as pMap from 'p-map'
import * as sharp from 'sharp'
import { sync as exists } from "@polymech/fs/exists"
import { async as move } from "@polymech/fs/move"
import { sync as dir } from "@polymech/fs/dir"
import { createItem as toNode } from "@polymech/fs/inspect"
import {
logger,
ERR_PERM_RETRY_DELAY,
ERR_PERM_RETRY_MAX,
IOptions,
IResizeOptions
} from '../../../index'
import {
meta
} from './lib'
import {
targets,
targetsNext
} from '../..'
export const resizeFile = async (source: string, target: string, onNode: (data: sharp.Sharp) => void = () => { }, options: IResizeOptions): Promise<sharp.Sharp | undefined> => {
//const osr_cache = OSR_CACHE()
//const ca_options = JSON.parse(JSON.stringify({ ...options, target, skip: null }))
//const cached = await get_cached(file, ca_options, MODULE_NAME)
const targetOri = '' + target
let inPlace = false
if (path.resolve(source) === path.resolve(target)) {
const parts = path.parse(target)
target = path.join(parts.dir, parts.name + '_tmp' + parts.ext)
inPlace = true
}
let image: sharp.Sharp
try {
image = sharp(source)
} catch (e) {
logger.error(`Error reading file, ${source}`, e)
return
}
onNode(image)
let metaData: any = await meta(source, image) || {}
const percent = options.percent
const dstParts = path.parse(target)
const node = toNode(source, {
size: true,
mime: true
})
if (!exists(dstParts.dir)) {
dir(dstParts.dir)
}
if (options.width && options.minWidth && options.width <= options.minWidth) {
logger.error(`Error resizing : options.width <= options.minWidth`)
return
}
if (metaData.width && options.width && options.minWidth) {
if (metaData.width <= options.minWidth) {
return image
}
}
if (metaData.height && options.height && options.minHeight) {
if (metaData.height <= options.minHeight) {
return image
}
}
if (options.minSize && node.size && options.minSize >= node.size) {
return image
}
const resizeOptions = {
height: options.height,
fastShrinkOnLoad: options.fastShrinkOnLoad,
withoutEnlargement: options.withoutEnlargement,
withoutReduction: options.withoutReduction,
fit: options.fit,
position: options.position,
background: options.background || 'white'
}
if (percent && metaData.width) {
image = image.resize({
width: Math.round(metaData.width * (percent / 100)),
...resizeOptions
})
} else if (options.width || options.height) {
image = image.resize({
width: options.width,
...resizeOptions
})
} else {
logger.error(`Error resizing, invalid options for ${source} - no width, height or percent`)
return image
}
if(dstParts.ext.toLowerCase() === '.webp' ||
dstParts.ext.toLowerCase() === '.png') {
image = image.rotate()
}
if (metaData.width) {
await image.withMetadata().toFile(target)
} else {
try {
await image.toFile(target)
} catch (e) {
logger.error(`Error writing file out, ${source}`, e)
return
}
}
if (inPlace) {
const timeout = async (retry) => new Promise((resolve) =>
setTimeout(resolve, ERR_PERM_RETRY_DELAY * retry)
)
const moveRetry = async (src, dst, retry = 0) => {
if (retry > ERR_PERM_RETRY_MAX) {
logger.error(`Error moving file failed, max retries reached ${src}`)
return
}
try {
await move(target, targetOri)
} catch (e) {
if (e.code === 'EPERM') {
logger.warn(`Error moving file out, retry ${source}`, e)
await timeout(retry)
moveRetry(src, dst, retry + 1)
}
}
}
await moveRetry(source, targetOri)
}
logger.debug(`Resized Image ${source} to ${targetOri}`)
return image
}
export const _resize = async (file, targets: string[], onNode: (data: any) => void = () => { }, options: IOptions) => {
return pMap(targets, async (target) => {
logger.debug(`Resizing ${file} to ${target}`)
if (options.dry) {
return Promise.resolve()
}
return resizeFile(file, target, onNode, options);
}, { concurrency: 1 })
}
export const resize = async (options: IResizeOptions) => {
let reports: any = []
logger.setSettings({ minLevel: options.logLevel || 'info' as any })
const onNode = (data: any) => reports.push(data)
if (options.srcInfo) {
logger.debug(`Convert ${options.srcInfo.FILES.length} files`)
return await pMap(options.srcInfo.FILES, async (f) => {
const outputs = targetsNext(f, options)
logger.debug(`Convert ${f} to `, outputs)
return _resize(f, outputs, onNode, options)
}, { concurrency: 1 })
} else {
logger.error(`Invalid source info`)
}
return reports
}

View File

@ -0,0 +1,353 @@
import * as path from 'path'
import pMap from 'p-map'
import pkg from 'which';
const { sync: which } = pkg;
import { resolve, OSR_CACHE } from '@polymech/commons'
import { dirname,equalFiles, swProcMessage } from './sw-util.js'
import { reportCSV } from '../report/csv.js'
import { logger, substitute } from '../index.js'
import { removeEmpty } from '../lib/index.js'
import { SolidworkOptions } from '../types.js'
import { Helper } from '../lib/process/index.js'
import { sync as exists } from "@polymech/fs/exists"
import { sync as read } from "@polymech/fs/read"
import { sync as write } from "@polymech/fs/write"
import { sync as dir } from "@polymech/fs/dir"
import { sync as rm } from "@polymech/fs/remove"
import { deepClone as clone } from "@polymech/core/objects"
import { swRayTraceRenderQuality_e, IAssembly, IAssemblyData } from './sw-types.js'
import { get_cached, get_path_cached, get_cache_key, set_cached } from '@polymech/cache'
import {
MODULE_NAME,
MSG_FAILED_TO_LOAD
} from '../constants.js'
import { closeAppByName, fileAsBuffer, getSWBin, removeEmptyValues } from './sw-util.js'
export const convertFile = async (
src,
target,
view: string,
onNode: (data) => void = () => { },
options: SolidworkOptions,
configuration: string) => {
configuration = options.configuration || configuration
options.close && closeAppByName('SLDWORKS')
const osr_cache = OSR_CACHE()
let cache_key_obj: any = {
sw: options.sw,
src,
target,
configuration
}
if (target.endsWith('.jpg')) {
cache_key_obj =
{
...cache_key_obj,
quality: options.quality,
width: options.width,
height: options.height,
renderer: options.renderer
}
}
if (target.endsWith('.xlsx')) {
cache_key_obj = {
...cache_key_obj,
"bom-config": options['bom-config'],
"bom-detail": options['bom-detail'],
"bom-template": options['bom-template'],
"bom-type": options['bom-type'],
"bom-images": options['bom-images'],
}
}
const ca_options = JSON.parse(JSON.stringify(removeEmpty(cache_key_obj)))
let cached = await get_cached(src, ca_options, MODULE_NAME)
const cachedPath = await get_path_cached(src, ca_options, MODULE_NAME)
if (!exists(target)) {
cached = null;
}
if (osr_cache && cached && cachedPath && options.cache == true) {
if (!exists(target) || !equalFiles(target, cachedPath)) {
write(target, Buffer.from(cached))
}
logger.debug(`[${MODULE_NAME}] Skipping conversion of ${src} to ${target}`)
await onNode({ src, target, options })
return Promise.resolve()
}
const parts = path.parse(target)
const source_parts = path.parse(src)
let exe = '' + options.script
let cwd = getSWBin(options.sw)
let _target = '' + target
let onPost = null
// SW Photoview wont render correctly in hidden mode
if (parts.ext === '.jpg' && source_parts.ext.toLowerCase() === '.sldasm' && options.renderer.toLowerCase() === ' ') {
logger.debug(`[${MODULE_NAME}] Converting ${src} to ${target} : - Photoview: - ` + options.hidden)
options.hidden = "false"
}
let args = [
`--source="${src}"`,
`--target="${target}"`,
`--configuration="${configuration}"`,
`--view="*${view}"`,
`--hidden=` + options.hidden || "true",
`--width=` + options.width,
`--height=` + options.height,
`--swv=` + options.swv,
`--renderer=` + options.renderer.toLowerCase() || "solidworks",
`--quality=${options.quality || swRayTraceRenderQuality_e.swRenderQuality_Good}`
]
if (options.save) args.push(`--save`)
if (options.pack) args.push(`--pack`)
if (options.rebuild) args.push(`--rebuild`)
if (options.light) args.push(`--light`)
if (options.write) args.push(`--write`)
if (parts.ext === '.json' && source_parts.ext.toLowerCase() === '.sldasm') {
exe = 'model-reader.exe'
args = [
`--source="${path.resolve(src)}"`,
`--target="${_target}"`
]
onPost = () => {
try {
let props = read(_target, 'json') as any[];
if (!props) {
logger.error('Error reading model file ', src)
return false
}
props = props.map(removeEmpty)
write(_target, props)
return true
} catch (e) {
logger.error(`Error executing model-reader::onPost for ${src} to ${_target}`)
write(_target, {})
return false
}
}
}
if (parts.base.endsWith('-configs.json') && source_parts.ext.toLowerCase() === '.sldasm') {
exe = 'getconfigs.exe'
args = [
`--source="${path.resolve(src)}"`,
`--target="${path.resolve(_target)}"`
]
onPost = () => {
try {
let props = read(_target, 'json') as any[];
if (!props) {
logger.error('Error reading configurations file ', src)
return false
}
return true
} catch (e) {
logger.error(`Error executing get::onPost for ${src} to ${_target}`)
write(_target, {})
return false
}
}
}
if (parts.ext === '.html') {
exe = 'ExportHTML.exe'
if (!configuration || configuration === 'Default') {
args = [
`"${src}"`,
`"${target}"`,
]
} else if (configuration) {
//EDrawings Control doesnt support configurations directly, we need a configuration specific edrawings file exported instead
const eDrawingsFile = src.toLowerCase().replace('.sldasm', `-${configuration}.EASM`)
if (!exists(eDrawingsFile)) {
logger.error(`Configuration specific edrawing file ${eDrawingsFile} doesnt exists`)
return Promise.resolve()
}
args = [
`"${eDrawingsFile}"`,
`"${target}"`,
`${configuration}`
]
}
}
if (parts.ext === '.xlsx') {
exe = 'bom.exe';
args = [
`"${src}"`,
`"${target}"`,
`--configuration ${options['bom-config']}`,
`--type ${options['bom-type']}`,
`--detail ${options['bom-detail']}`
]
options['bom-images'] && args.push('--images')
options['bom-template'] && args.push(`--template ${options['bom-template']}`)
if (!options.cache && exists(target)) {
rm(target);
}
}
if (source_parts.ext === '.drawio') {
exe = 'draw.io.exe';
try {
cwd = path.parse(which(exe)).dir;
} catch (e) {
logger.error(`Cant find ${exe}`);
return Promise.resolve();
}
args = [
`"${src}"`,
'-x',
`-f ${parts.ext.replace('.', '')}`,
`${options.args}`
]
}
const bin = path.resolve(`${cwd}/${exe}`)
if (!exists(bin)) {
logger.error(`${bin} doesnt exists in ${cwd}`)
logger.error('__dirname:' + dirname())
logger.error('options.sw ' + options.sw)
return
}
const ret = await Helper.run(cwd, exe, args, options.debug)
ret.messages = [...new Set(ret.messages)]
const failed = !!ret.messages.find((m: string) => m.includes(MSG_FAILED_TO_LOAD))
ret.messages = ret.messages.map((m: string) => swProcMessage(m)).filter(x => x != null).map(x => x.message)
const info = {
...ret,
src,
target,
failed: failed,
options
}
await onNode(info)
onPost && onPost()
if (info.failed) {
rm(_target)
return ret
}
osr_cache && options.cache == true && await set_cached(src, ca_options, MODULE_NAME, fileAsBuffer(_target))
options.close && closeAppByName('SLDWORKS')
return ret
}
export async function convertFiles(file, targets: string[], view, onNode: (data: any) => void = () => { }, options: SolidworkOptions) {
if (options.dry) {
logger.info(`Dry run convert ${file} to `, targets.map((t) => { `\n\t${t}` }).join(',\n'))
return Promise.resolve()
}
return pMap(targets, (target: any) => {
return convertFile(file, target.target, view, onNode, options, target.configuration);
}, { concurrency: 1 })
}
export const report = (data, dst: string) => {
let report: any = null;
if (dst.endsWith('.md')) {
//report = reportMarkdown(data)
}
if (dst.endsWith('.csv')) {
report = reportCSV(data)
}
logger.info(`Write report to ${dst}`)
report = write(dst, data)
return report;
}
export const targets = (file: string, options: SolidworkOptions) => {
const srcParts = path.parse(file)
const variables = clone(options.variables)
const targets = []
let configurations: any = { "Default": null }
if (options.configuration && options.configuration !== 'Default') {
configurations[options.configuration] = null
delete configurations["Default"]
}
for (const conf in configurations) {
if (options.dstInfo.IS_GLOB) {
options.dstInfo.GLOB_EXTENSIONS.forEach((e) => {
variables.SRC_NAME = srcParts.name
variables.SRC_DIR = srcParts.dir
variables.CONFIGURATION = conf
let targetPath = substitute(options.variables.DST_PATH, options.alt, variables)
targetPath = path.resolve(targetPath.replace(options.variables.DST_FILE_EXT, '') + e)
const parts = path.parse(targetPath)
if (srcParts.ext === parts.ext) {
return
}
if (!exists(parts.dir)) {
try {
dir(parts.dir)
} catch (e) {
if (options.debug) {
logger.error(`Error creating target path ${parts.dir} for ${targetPath}`);
}
return
}
}
targets.push({
target: targetPath,
configuration: conf
})
})
} else {
variables.SRC_NAME = srcParts.name
variables.SRC_DIR = srcParts.dir
variables.CONFIGURATION = conf
let targetPath = substitute(options.variables.DST_PATH, options.alt, variables)
if (!exists(targetPath)) {
try {
dir(targetPath)
} catch (e) {
if (options.debug) {
logger.error(`Error creating target path ${targetPath}`)
}
return
}
}
targets.push({
target: targetPath,
configuration: conf
})
}
}
return targets
}
export async function convert(options: SolidworkOptions) {
logger.setSettings({ minLevel: options.logLevel as any || 'warn' })
let reports = []
const onNode = options.onNode || ((data) => reports.push(data))
if (options.srcInfo.FILES.length === 0) {
logger.warn(`No files found to convert : `, options.src)
return
}
//skip orphan / temporary files
options.srcInfo.FILES = options.srcInfo.FILES.filter((f) => {
return f.includes('~$') === false
})
const ret = await pMap(options.srcInfo.FILES, async (f) => {
const outputs = targets(f, options)
logger.info(`Convert ${f} to ${outputs.map(t => t.target).join(',')}`)
return convertFiles(f, outputs, options.view, onNode, options)
}, { concurrency: 1 })
if (options.report) {
const reportOutFile: string = path.resolve(resolve(options.report, false, {
dst: options.srcInfo.DIR,
...options.variables,
CONFIGURATION: options.configuration || ''
}))
logger.debug(`Write report to ${reportOutFile}`)
report(reports, reportOutFile)
}
return ret
}

View File

@ -0,0 +1,84 @@
import * as CLI from 'yargs'
import * as path from 'path'
import { resolve, forward_slash, pathInfo } from "@polymech/commons"
export const sanitize = (argv: any): SolidworkOptions => {
const src = forward_slash(path.resolve(resolve(argv.src)))
const config: any = argv.config ? read(path.resolve('' + argv.config), 'json') : {}
const extraVariables = {}
for (const key in config) {
if (Object.prototype.hasOwnProperty.call(config, key)) {
const element = config[key];
if (typeof element === 'string') {
extraVariables[key] = element
}
}
}
const args: SolidworkOptions = {
src: src,
dst: '' + argv.dst as string,
debug: argv.debug,
verbose: argv.verbose,
dry: argv.dry,
onNode: argv.onNode,
cache: argv.cache,
hidden: argv.hidden || "true",
renderer: argv.renderer || "solidworks",
alt: argv.alt,
quality: argv.quality,
logLevel: argv.logLevel,
close: argv.close,
width: argv.width || "1024",
height: argv.height || "1024",
script: argv.script || 'convert.exe',
sw: argv.sw || 2024,
swv: argv.swv || 32,
configuration: argv.configuration || 'Default',
report: argv.report || DEFAULT_REPORT,
pack: argv.pack,
light: argv.light,
rebuild: argv.rebuild,
save: argv.save,
write: argv.write,
variables: { ...extraVariables },
view: argv.view || 'Render',
args: argv.args || '',
"bom-config": argv['bom-config'],
"bom-detail": argv['bom-detail'],
"bom-template": argv['bom-template'],
"bom-type": argv['bom-type'],
"bom-images": argv['bom-images'],
} as SolidworkOptions
if (!args.src) {
logger.error('Invalid source, abort')
return process.exit()
}
args.srcInfo = pathInfo(src)
if (!args.srcInfo.FILES) {
logger.error(`Invalid source files, abort`, args.srcInfo)
return process.exit()
}
for (const key in args.srcInfo) {
if (Object.prototype.hasOwnProperty.call(args.srcInfo, key)) {
args.variables['SRC_' + key] = args.srcInfo[key]
}
}
if (argv.dst) {
args.dst = path.resolve(substitute(args.dst, args.variables))
args.dstInfo = pathInfo(args.dst as string)
args.dstInfo.PATH = argv.dst as string
for (const key in args.dstInfo) {
if (Object.prototype.hasOwnProperty.call(args.dstInfo, key)) {
args.variables['DST_' + key] = args.dstInfo[key]
}
}
}
(args as SolidworkOptions).view = argv.view as string || "Render"
return args
}

View File

@ -1,64 +0,0 @@
import * as path from 'node:path'
import { pathInfoEx } from '@polymech/commons'
import { DEFAULT_ROOTS, DEFAULT_VARS } from '@polymech/commons'
export const variables = (options: IKBotTask) => {
const { model, router,baseURL } = options
let ret = {
model,
router,
baseURL,
...DEFAULT_ROOTS,
...DEFAULT_VARS({})
}
if (options?.include?.length === 1) {
const [include] = options.include
const { } = pathInfoEx(include)
const srcParts = path.parse(include)
const srcVariables: Record<string, string> = {}
srcVariables.SRC_NAME = srcParts.name
srcVariables.SRC_DIR = srcParts.dir
srcVariables.SRC_EXT = srcParts.ext
if (srcVariables.ROOT) {
srcVariables.SRC_REL = path.relative(srcVariables.ROOT, srcParts.dir)
}
const dashed = srcParts.name.split('-')
if (dashed.length > 1) {
for (let i = 0; i < dashed.length; i++) {
srcVariables[`SRC_NAME-${i}`] = dashed[i]
}
}
const dotted = srcParts.name.split('.')
if (dotted.length > 1) {
for (let i = 0; i < dotted.length; i++) {
srcVariables[`SRC_NAME.${i}`] = dotted[i]
}
}
const underscored = srcParts.name.split('_')
if (underscored.length > 1) {
for (let i = 0; i < underscored.length; i++) {
srcVariables[`SRC_NAME_${i}`] = underscored[i]
}
}
ret = { ...ret, ...srcVariables }
}
// CLI argv variables
let variables = Object.assign({}, ...Object.keys(options).filter((k) => k.startsWith('var-')).map((k) => {
return {
[k.replace('var-', '')]: options[k]
}
}))
ret = Object.keys(ret).reduce((acc, key) => {
acc[key.toUpperCase()] = ret[key];
return acc;
}, {});
return { ...ret, ...variables }
}

View File

@ -1,9 +1,29 @@
import * as mupdf from 'mupdf'
import { Logger } from 'tslog'
import { writeFile, mkdir } from 'node:fs/promises'
import { writeFile } from 'node:fs/promises'
import { dirname } from 'node:path'
import { resolveVariables, pathInfoEx } from '@polymech/commons'
import { sync as write } from '@polymech/fs/write'
import { sync as mkdir } from '@polymech/fs/dir'
import { writeFileSync } from 'node:fs'
import { Buffer } from 'node:buffer'
// Helper function to convert object-like image data to Buffer
function imageDataObjectToBuffer(imageDataObject: Record<string, number>): Buffer {
const keys = Object.keys(imageDataObject).map(Number).sort((a, b) => a - b);
const bufferLength = keys.length > 0 ? keys[keys.length - 1] + 1 : 0; // Determine length based on max index + 1
const buffer = Buffer.allocUnsafe(bufferLength); // Use allocUnsafe for performance if overwriting all bytes
for (const key in imageDataObject) {
if (Object.prototype.hasOwnProperty.call(imageDataObject, key)) {
const index = parseInt(key, 10);
if (!isNaN(index) && index >= 0 && index < bufferLength) {
buffer[index] = imageDataObject[key];
}
}
}
return buffer;
}
export type ImageFormat = 'png' | 'jpg';
@ -21,7 +41,7 @@ export async function convertPdfToImages(
pdfData: Buffer,
options: PdfToImageOptions
): Promise<string[]> {
const logger = options.logger || new Logger <any>();
const logger = options.logger || new Logger<any>();
const outputFiles: string[] = [];
try {
@ -36,7 +56,7 @@ export async function convertPdfToImages(
throw new Error(`startPage (${options.startPage}) is out of valid range (1-${pageCount})`);
}
if (end < 0 || end >= pageCount) {
throw new Error(`endPage (${options.endPage}) is out of valid range (1-${pageCount})`);
throw new Error(`endPage (${options.endPage}) is out of valid range (1-${pageCount})`);
}
if (start > end) {
// This should also be caught by Zod schema, but good to double-check
@ -48,11 +68,11 @@ export async function convertPdfToImages(
for (let i = start; i <= end; i++) {
const pageNumber = i + 1; // User-facing page number (1-based)
// Create page-specific variables
const pageVariables: Record<string, string> = {
...options.baseVariables,
PAGE: pageNumber.toString()
const pageVariables: Record<string, string> = {
...options.baseVariables,
PAGE: pageNumber.toString()
};
// Resolve the output path using the template and page-specific variables
@ -64,12 +84,13 @@ export async function convertPdfToImages(
mupdf.ColorSpace.DeviceRGB,
false
);
const imageData = options.format === 'png'
const imageData = options.format === 'png'
? pixmap.asPNG()
: pixmap.asJPEG(100, false);
write(outputPath, imageData)
mkdir(dirname(outputPath));
writeFileSync(outputPath, imageDataObjectToBuffer(imageData as any))
outputFiles.push(outputPath);
logger.info(`Converted page ${pageNumber} to ${outputPath}`);
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -6,12 +6,12 @@ import * as path from 'node:path';
const packageRoot = process.cwd(); // Assumes test runs from package root
const inputPdf = path.join('tests', 'RS485-780.pdf');
const outputDir = path.join(packageRoot, 'tests', 'out', 'RS485-780');
const outputPattern = '${SRC_DIR}/out/${SRC_NAME}/${SRC_NAME}-${PAGE}.${FORMAT}';
const outputPattern = '${SRC_DIR}/out/${SRC_NAME}/${SRC_NAME}-${PAGE}.jpg';
// Expected number of pages for RS485-780.pdf
const expectedPageCount = 29;
const expectedBaseName = 'RS485-780';
const expectedFormat = 'png'; // Default format
const expectedFormat = 'jpg'; // Default format
describe('CLI Integration Test - Variable Output Path', () => {
beforeAll(() => {

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

View File

@ -0,0 +1,21 @@
const xlsx = require("xlsx");
const data = [
{ FunctionalCodeSet: "SKI780", Group: "Group P (readable)", Codes: ["P0","P1","P2","P3","P4","P5","P6","P7","P8","P9","PA","PB"] },
{ FunctionalCodeSet: "SKI780", Group: "Group A (readable)", Codes: ["A0","A1","A2","A5","A6","A7","A8","A9","AA","AB","AC"] }
];
const rows = data.flatMap(({ FunctionalCodeSet, Group, Codes }) =>
Codes.map(code => ({ FunctionalCodeSet, Group, Code: code }))
);
const wb = xlsx.utils.book_new();
const ws = xlsx.utils.json_to_sheet(rows);
xlsx.utils.book_append_sheet(wb, ws, "SKI780 Codes");
ws['!freeze'] = { ySplit: 1 }; // freeze header row
ws['!autofilter'] = { ref: `A1:C${rows.length+1}` };
ws['!cols'] = [ { wch: 18 }, { wch: 22 }, { wch: 6 } ]; // or compute
xlsx.writeFile(wb, "SKI780_Functional_Codes.xlsx");
console.log("✓ SKI780_Functional_Codes.xlsx created with proper column widths!");

Some files were not shown because too many files have changed in this diff Show More