Chore/build npm (#9)
Co-authored-by: DigHuang <114602213+DigHuang@users.noreply.github.com> Co-authored-by: Felix <24791380+vcfgv@users.noreply.github.com> Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
47
scripts/after-pack.cjs
Normal file
47
scripts/after-pack.cjs
Normal file
@@ -0,0 +1,47 @@
|
||||
/**
|
||||
* after-pack.cjs
|
||||
*
|
||||
* electron-builder afterPack hook.
|
||||
*
|
||||
* Problem: electron-builder respects .gitignore when copying extraResources.
|
||||
* Since .gitignore contains "node_modules/", the openclaw bundle's
|
||||
* node_modules directory is silently skipped during the extraResources copy.
|
||||
*
|
||||
* Solution: This hook runs AFTER electron-builder finishes packing. It manually
|
||||
* copies build/openclaw/node_modules/ into the output resources directory,
|
||||
* bypassing electron-builder's glob filtering entirely.
|
||||
*/
|
||||
|
||||
const { cpSync, existsSync, readdirSync } = require('fs');
|
||||
const { join } = require('path');
|
||||
|
||||
exports.default = async function afterPack(context) {
|
||||
const appOutDir = context.appOutDir;
|
||||
const platform = context.electronPlatformName; // 'win32' | 'darwin' | 'linux'
|
||||
|
||||
const src = join(__dirname, '..', 'build', 'openclaw', 'node_modules');
|
||||
|
||||
// On macOS, resources live inside the .app bundle
|
||||
let resourcesDir;
|
||||
if (platform === 'darwin') {
|
||||
const appName = context.packager.appInfo.productFilename;
|
||||
resourcesDir = join(appOutDir, `${appName}.app`, 'Contents', 'Resources');
|
||||
} else {
|
||||
resourcesDir = join(appOutDir, 'resources');
|
||||
}
|
||||
|
||||
const dest = join(resourcesDir, 'openclaw', 'node_modules');
|
||||
|
||||
if (!existsSync(src)) {
|
||||
console.warn('[after-pack] ⚠️ build/openclaw/node_modules not found. Run "pnpm run bundle:openclaw" first.');
|
||||
return;
|
||||
}
|
||||
|
||||
const depCount = readdirSync(src, { withFileTypes: true })
|
||||
.filter(d => d.isDirectory() && d.name !== '.bin')
|
||||
.length;
|
||||
|
||||
console.log(`[after-pack] Copying ${depCount} openclaw dependencies to ${dest} ...`);
|
||||
cpSync(src, dest, { recursive: true });
|
||||
console.log('[after-pack] ✅ openclaw node_modules copied successfully.');
|
||||
};
|
||||
208
scripts/bundle-openclaw.mjs
Normal file
208
scripts/bundle-openclaw.mjs
Normal file
@@ -0,0 +1,208 @@
|
||||
#!/usr/bin/env zx
|
||||
|
||||
/**
|
||||
* bundle-openclaw.mjs
|
||||
*
|
||||
* Bundles the openclaw npm package with ALL its dependencies (including
|
||||
* transitive ones) into a self-contained directory (build/openclaw/) for
|
||||
* electron-builder to pick up.
|
||||
*
|
||||
* pnpm uses a content-addressable virtual store with symlinks. A naive copy
|
||||
* of node_modules/openclaw/ will miss runtime dependencies entirely. Even
|
||||
* copying only direct siblings misses transitive deps (e.g. @clack/prompts
|
||||
* depends on @clack/core which lives in a separate virtual store entry).
|
||||
*
|
||||
* This script performs a recursive BFS through pnpm's virtual store to
|
||||
* collect every transitive dependency into a flat node_modules structure.
|
||||
*/
|
||||
|
||||
import 'zx/globals';
|
||||
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
const OUTPUT = path.join(ROOT, 'build', 'openclaw');
|
||||
const NODE_MODULES = path.join(ROOT, 'node_modules');
|
||||
|
||||
echo`📦 Bundling openclaw for electron-builder...`;
|
||||
|
||||
// 1. Resolve the real path of node_modules/openclaw (follows pnpm symlink)
|
||||
const openclawLink = path.join(NODE_MODULES, 'openclaw');
|
||||
if (!fs.existsSync(openclawLink)) {
|
||||
echo`❌ node_modules/openclaw not found. Run pnpm install first.`;
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const openclawReal = fs.realpathSync(openclawLink);
|
||||
echo` openclaw resolved: ${openclawReal}`;
|
||||
|
||||
// 2. Clean and create output directory
|
||||
if (fs.existsSync(OUTPUT)) {
|
||||
fs.rmSync(OUTPUT, { recursive: true });
|
||||
}
|
||||
fs.mkdirSync(OUTPUT, { recursive: true });
|
||||
|
||||
// 3. Copy openclaw package itself to OUTPUT root
|
||||
echo` Copying openclaw package...`;
|
||||
fs.cpSync(openclawReal, OUTPUT, { recursive: true, dereference: true });
|
||||
|
||||
// 4. Recursively collect ALL transitive dependencies via pnpm virtual store BFS
|
||||
//
|
||||
// pnpm structure example:
|
||||
// .pnpm/openclaw@ver/node_modules/
|
||||
// openclaw/ <- real files
|
||||
// chalk/ <- symlink -> .pnpm/chalk@ver/node_modules/chalk
|
||||
// @clack/prompts/ <- symlink -> .pnpm/@clack+prompts@ver/node_modules/@clack/prompts
|
||||
//
|
||||
// .pnpm/@clack+prompts@ver/node_modules/
|
||||
// @clack/prompts/ <- real files
|
||||
// @clack/core/ <- symlink (transitive dep, NOT in openclaw's siblings!)
|
||||
//
|
||||
// We BFS from openclaw's virtual store node_modules, following each symlink
|
||||
// to discover the target's own virtual store node_modules and its deps.
|
||||
|
||||
const collected = new Map(); // realPath -> packageName (for deduplication)
|
||||
const queue = []; // BFS queue of virtual-store node_modules dirs to visit
|
||||
|
||||
/**
|
||||
* Given a real path of a package, find the containing virtual-store node_modules.
|
||||
* e.g. .pnpm/chalk@5.4.1/node_modules/chalk -> .pnpm/chalk@5.4.1/node_modules
|
||||
* e.g. .pnpm/@clack+core@0.4.1/node_modules/@clack/core -> .pnpm/@clack+core@0.4.1/node_modules
|
||||
*/
|
||||
function getVirtualStoreNodeModules(realPkgPath) {
|
||||
let dir = realPkgPath;
|
||||
while (dir !== path.dirname(dir)) {
|
||||
if (path.basename(dir) === 'node_modules') {
|
||||
return dir;
|
||||
}
|
||||
dir = path.dirname(dir);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* List all package entries in a virtual-store node_modules directory.
|
||||
* Handles both regular packages (chalk) and scoped packages (@clack/prompts).
|
||||
* Returns array of { name, fullPath }.
|
||||
*/
|
||||
function listPackages(nodeModulesDir) {
|
||||
const result = [];
|
||||
if (!fs.existsSync(nodeModulesDir)) return result;
|
||||
|
||||
for (const entry of fs.readdirSync(nodeModulesDir)) {
|
||||
if (entry === '.bin') continue;
|
||||
|
||||
const entryPath = path.join(nodeModulesDir, entry);
|
||||
const stat = fs.lstatSync(entryPath);
|
||||
|
||||
if (entry.startsWith('@')) {
|
||||
// Scoped package: read sub-entries
|
||||
if (stat.isDirectory() || stat.isSymbolicLink()) {
|
||||
const resolvedScope = stat.isSymbolicLink() ? fs.realpathSync(entryPath) : entryPath;
|
||||
// Check if this is actually a scoped directory or a package
|
||||
try {
|
||||
const scopeEntries = fs.readdirSync(entryPath);
|
||||
for (const sub of scopeEntries) {
|
||||
result.push({
|
||||
name: `${entry}/${sub}`,
|
||||
fullPath: path.join(entryPath, sub),
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Not a directory, skip
|
||||
}
|
||||
}
|
||||
} else {
|
||||
result.push({ name: entry, fullPath: entryPath });
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Start BFS from openclaw's virtual store node_modules
|
||||
const openclawVirtualNM = getVirtualStoreNodeModules(openclawReal);
|
||||
if (!openclawVirtualNM) {
|
||||
echo`❌ Could not determine pnpm virtual store for openclaw`;
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
echo` Virtual store root: ${openclawVirtualNM}`;
|
||||
queue.push({ nodeModulesDir: openclawVirtualNM, skipPkg: 'openclaw' });
|
||||
|
||||
while (queue.length > 0) {
|
||||
const { nodeModulesDir, skipPkg } = queue.shift();
|
||||
const packages = listPackages(nodeModulesDir);
|
||||
|
||||
for (const { name, fullPath } of packages) {
|
||||
// Skip the package that owns this virtual store entry (it's the package itself, not a dep)
|
||||
if (name === skipPkg) continue;
|
||||
|
||||
let realPath;
|
||||
try {
|
||||
realPath = fs.realpathSync(fullPath);
|
||||
} catch {
|
||||
continue; // broken symlink, skip
|
||||
}
|
||||
|
||||
if (collected.has(realPath)) continue; // already visited
|
||||
collected.set(realPath, name);
|
||||
|
||||
// Find this package's own virtual store node_modules to discover ITS deps
|
||||
const depVirtualNM = getVirtualStoreNodeModules(realPath);
|
||||
if (depVirtualNM && depVirtualNM !== nodeModulesDir) {
|
||||
// Determine the package's "self name" in its own virtual store
|
||||
// For scoped: @clack/core -> skip "@clack/core" when scanning
|
||||
queue.push({ nodeModulesDir: depVirtualNM, skipPkg: name });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
echo` Found ${collected.size} total packages (direct + transitive)`;
|
||||
|
||||
// 5. Copy all collected packages into OUTPUT/node_modules/ (flat structure)
|
||||
//
|
||||
// IMPORTANT: BFS guarantees direct deps are encountered before transitive deps.
|
||||
// When the same package name appears at different versions (e.g. chalk@5 from
|
||||
// openclaw directly, chalk@4 from a transitive dep), we keep the FIRST one
|
||||
// (direct dep version) and skip later duplicates. This prevents version
|
||||
// conflicts like CJS chalk@4 overwriting ESM chalk@5.
|
||||
const outputNodeModules = path.join(OUTPUT, 'node_modules');
|
||||
fs.mkdirSync(outputNodeModules, { recursive: true });
|
||||
|
||||
const copiedNames = new Set(); // Track package names already copied
|
||||
let copiedCount = 0;
|
||||
let skippedDupes = 0;
|
||||
|
||||
for (const [realPath, pkgName] of collected) {
|
||||
if (copiedNames.has(pkgName)) {
|
||||
skippedDupes++;
|
||||
continue; // Keep the first version (closer to openclaw in dep tree)
|
||||
}
|
||||
copiedNames.add(pkgName);
|
||||
|
||||
const dest = path.join(outputNodeModules, pkgName);
|
||||
|
||||
try {
|
||||
// Ensure parent directory exists (for scoped packages like @clack/core)
|
||||
fs.mkdirSync(path.dirname(dest), { recursive: true });
|
||||
fs.cpSync(realPath, dest, { recursive: true, dereference: true });
|
||||
copiedCount++;
|
||||
} catch (err) {
|
||||
echo` ⚠️ Skipped ${pkgName}: ${err.message}`;
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Verify the bundle
|
||||
const entryExists = fs.existsSync(path.join(OUTPUT, 'openclaw.mjs'));
|
||||
const distExists = fs.existsSync(path.join(OUTPUT, 'dist', 'entry.js'));
|
||||
|
||||
echo``;
|
||||
echo`✅ Bundle complete: ${OUTPUT}`;
|
||||
echo` Unique packages copied: ${copiedCount}`;
|
||||
echo` Duplicate versions skipped: ${skippedDupes}`;
|
||||
echo` Total discovered: ${collected.size}`;
|
||||
echo` openclaw.mjs: ${entryExists ? '✓' : '✗'}`;
|
||||
echo` dist/entry.js: ${distExists ? '✓' : '✗'}`;
|
||||
|
||||
if (!entryExists || !distExists) {
|
||||
echo`❌ Bundle verification failed!`;
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -1,139 +1,124 @@
|
||||
import { spawnSync } from 'node:child_process';
|
||||
import { mkdirSync, rmSync, existsSync, chmodSync, renameSync, writeFileSync } from 'node:fs';
|
||||
import { join, dirname } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { platform, arch } from 'node:os';
|
||||
#!/usr/bin/env zx
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const ROOT_DIR = join(__dirname, '..');
|
||||
import 'zx/globals';
|
||||
|
||||
// Configuration
|
||||
const ROOT_DIR = path.resolve(__dirname, '..');
|
||||
const UV_VERSION = '0.10.0';
|
||||
const BASE_URL = `https://github.com/astral-sh/uv/releases/download/${UV_VERSION}`;
|
||||
const OUTPUT_BASE = join(ROOT_DIR, 'resources', 'bin');
|
||||
const OUTPUT_BASE = path.join(ROOT_DIR, 'resources', 'bin');
|
||||
|
||||
// Mapping Node platforms/archs to uv release naming
|
||||
const TARGETS = {
|
||||
'darwin-arm64': {
|
||||
filename: 'uv-aarch64-apple-darwin.tar.gz',
|
||||
binName: 'uv',
|
||||
extractCmd: (src, dest) => spawnSync('tar', ['-xzf', src, '-C', dest])
|
||||
},
|
||||
'darwin-x64': {
|
||||
filename: 'uv-x86_64-apple-darwin.tar.gz',
|
||||
binName: 'uv',
|
||||
extractCmd: (src, dest) => spawnSync('tar', ['-xzf', src, '-C', dest])
|
||||
},
|
||||
'win32-x64': {
|
||||
filename: 'uv-x86_64-pc-windows-msvc.zip',
|
||||
binName: 'uv.exe',
|
||||
extractCmd: (src, dest) => {
|
||||
if (platform() === 'win32') {
|
||||
return spawnSync('powershell.exe', ['-Command', `Expand-Archive -Path "${src}" -DestinationPath "${dest}" -Force`]);
|
||||
} else {
|
||||
return spawnSync('unzip', ['-q', '-o', src, '-d', dest]);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
async function downloadFile(url, dest) {
|
||||
console.log(`⬇️ Downloading: ${url}`);
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) throw new Error(`Failed to download: ${response.statusText}`);
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
writeFileSync(dest, Buffer.from(arrayBuffer));
|
||||
}
|
||||
|
||||
async function setupTarget(id) {
|
||||
const target = TARGETS[id];
|
||||
if (!target) {
|
||||
console.warn(`⚠️ Target ${id} is not supported by this script.`);
|
||||
echo(chalk.yellow`⚠️ Target ${id} is not supported by this script.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const targetDir = join(OUTPUT_BASE, id);
|
||||
const tempDir = join(ROOT_DIR, 'temp_uv_extract');
|
||||
const archivePath = join(ROOT_DIR, target.filename);
|
||||
const targetDir = path.join(OUTPUT_BASE, id);
|
||||
const tempDir = path.join(ROOT_DIR, 'temp_uv_extract');
|
||||
const archivePath = path.join(ROOT_DIR, target.filename);
|
||||
const downloadUrl = `${BASE_URL}/${target.filename}`;
|
||||
|
||||
console.log(`
|
||||
📦 Setting up uv for ${id}...`);
|
||||
echo(chalk.blue`\n📦 Setting up uv for ${id}...`);
|
||||
|
||||
// Cleanup & Prep
|
||||
if (existsSync(targetDir)) rmSync(targetDir, { recursive: true });
|
||||
if (existsSync(tempDir)) rmSync(tempDir, { recursive: true });
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
mkdirSync(tempDir, { recursive: true });
|
||||
await fs.remove(targetDir);
|
||||
await fs.remove(tempDir);
|
||||
await fs.ensureDir(targetDir);
|
||||
await fs.ensureDir(tempDir);
|
||||
|
||||
try {
|
||||
// Download
|
||||
await downloadFile(`${BASE_URL}/${target.filename}`, archivePath);
|
||||
echo`⬇️ Downloading: ${downloadUrl}`;
|
||||
const response = await fetch(downloadUrl);
|
||||
if (!response.ok) throw new Error(`Failed to download: ${response.statusText}`);
|
||||
const buffer = await response.arrayBuffer();
|
||||
await fs.writeFile(archivePath, Buffer.from(buffer));
|
||||
|
||||
// Extract
|
||||
console.log('📂 Extracting...');
|
||||
target.extractCmd(archivePath, tempDir);
|
||||
echo`📂 Extracting...`;
|
||||
if (target.filename.endsWith('.zip')) {
|
||||
if (os.platform() === 'win32') {
|
||||
// Use .NET Framework for ZIP extraction (more reliable than Expand-Archive)
|
||||
const { execSync } = await import('child_process');
|
||||
const psCommand = `Add-Type -AssemblyName System.IO.Compression.FileSystem; [System.IO.Compression.ZipFile]::ExtractToDirectory('${archivePath.replace(/'/g, "''")}', '${tempDir.replace(/'/g, "''")}')`;
|
||||
execSync(`powershell.exe -NoProfile -Command "${psCommand}"`, { stdio: 'inherit' });
|
||||
} else {
|
||||
await $`unzip -q -o ${archivePath} -d ${tempDir}`;
|
||||
}
|
||||
} else {
|
||||
await $`tar -xzf ${archivePath} -C ${tempDir}`;
|
||||
}
|
||||
|
||||
// Move binary to final location
|
||||
// Move binary
|
||||
// uv archives usually contain a folder named after the target
|
||||
const folderName = target.filename.replace('.tar.gz', '').replace('.zip', '');
|
||||
const sourceBin = join(tempDir, folderName, target.binName);
|
||||
const destBin = join(targetDir, target.binName);
|
||||
const sourceBin = path.join(tempDir, folderName, target.binName);
|
||||
const destBin = path.join(targetDir, target.binName);
|
||||
|
||||
if (existsSync(sourceBin)) {
|
||||
renameSync(sourceBin, destBin);
|
||||
if (await fs.pathExists(sourceBin)) {
|
||||
await fs.move(sourceBin, destBin, { overwrite: true });
|
||||
} else {
|
||||
// Fallback: search for the binary if folder structure changed
|
||||
console.log('🔍 Binary not found in expected subfolder, searching...');
|
||||
const findResult = spawnSync(platform() === 'win32' ? 'where' : 'find',
|
||||
platform() === 'win32' ? ['/R', tempDir, target.binName] : [tempDir, '-name', target.binName]);
|
||||
|
||||
const foundPath = findResult.stdout.toString().trim().split('\n')[0];
|
||||
if (foundPath && existsSync(foundPath)) {
|
||||
renameSync(foundPath, destBin);
|
||||
echo(chalk.yellow`🔍 Binary not found in expected subfolder, searching...`);
|
||||
const files = await glob(`**/${target.binName}`, { cwd: tempDir, absolute: true });
|
||||
if (files.length > 0) {
|
||||
await fs.move(files[0], destBin, { overwrite: true });
|
||||
} else {
|
||||
throw new Error(`Could not find ${target.binName} in extracted files.`);
|
||||
}
|
||||
}
|
||||
|
||||
// Permission fix
|
||||
if (platform() !== 'win32') {
|
||||
chmodSync(destBin, 0o755);
|
||||
if (os.platform() !== 'win32') {
|
||||
await fs.chmod(destBin, 0o755);
|
||||
}
|
||||
|
||||
console.log(`✅ Success: ${destBin}`);
|
||||
echo(chalk.green`✅ Success: ${destBin}`);
|
||||
} finally {
|
||||
// Cleanup
|
||||
if (existsSync(archivePath)) rmSync(archivePath);
|
||||
if (existsSync(tempDir)) rmSync(tempDir, { recursive: true });
|
||||
await fs.remove(archivePath);
|
||||
await fs.remove(tempDir);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = process.argv.slice(2);
|
||||
const downloadAll = args.includes('--all');
|
||||
// Main logic
|
||||
const args = process.argv.slice(3); // zx scripts/file.mjs --all -> argv is [node, zx, file, --all] ? or similar.
|
||||
// zx execution: process.argv is [node, script, users_args...]
|
||||
// Let's use minimist which zx includes globally as `argv`
|
||||
const downloadAll = argv.all;
|
||||
|
||||
if (downloadAll) {
|
||||
echo(chalk.cyan`🌐 Downloading uv binaries for ALL supported platforms...`);
|
||||
for (const id of Object.keys(TARGETS)) {
|
||||
await setupTarget(id);
|
||||
}
|
||||
} else {
|
||||
const currentId = `${os.platform()}-${os.arch()}`;
|
||||
echo(chalk.cyan`💻 Detected system: ${currentId}`);
|
||||
|
||||
if (downloadAll) {
|
||||
console.log('🌐 Downloading uv binaries for ALL supported platforms...');
|
||||
for (const id of Object.keys(TARGETS)) {
|
||||
await setupTarget(id);
|
||||
}
|
||||
if (TARGETS[currentId]) {
|
||||
await setupTarget(currentId);
|
||||
} else {
|
||||
const currentId = `${platform()}-${arch()}`;
|
||||
console.log(`💻 Detected system: ${currentId}`);
|
||||
|
||||
if (TARGETS[currentId]) {
|
||||
await setupTarget(currentId);
|
||||
} else {
|
||||
console.error(`❌ Current system ${currentId} is not in the supported download list.`);
|
||||
console.log('Supported targets:', Object.keys(TARGETS).join(', '));
|
||||
process.exit(1);
|
||||
}
|
||||
echo(chalk.red`❌ Current system ${currentId} is not in the supported download list.`);
|
||||
echo(`Supported targets: ${Object.keys(TARGETS).join(', ')}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('\n🎉 Done!');
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error('\n❌ Error:', err.message);
|
||||
process.exit(1);
|
||||
});
|
||||
echo(chalk.green`\n🎉 Done!`);
|
||||
|
||||
85
scripts/generate-icons.mjs
Normal file
85
scripts/generate-icons.mjs
Normal file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env zx
|
||||
|
||||
import 'zx/globals';
|
||||
import sharp from 'sharp';
|
||||
import png2icons from 'png2icons';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
// Calculate paths
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const PROJECT_ROOT = path.resolve(__dirname, '..');
|
||||
const ICONS_DIR = path.join(PROJECT_ROOT, 'resources', 'icons');
|
||||
const SVG_SOURCE = path.join(ICONS_DIR, 'icon.svg');
|
||||
|
||||
echo`🎨 Generating ClawX icons using Node.js...`;
|
||||
|
||||
// Check if SVG source exists
|
||||
if (!fs.existsSync(SVG_SOURCE)) {
|
||||
echo`❌ SVG source not found: ${SVG_SOURCE}`;
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Ensure icons directory exists
|
||||
await fs.ensureDir(ICONS_DIR);
|
||||
|
||||
try {
|
||||
// 1. Generate Master PNG Buffer (1024x1024)
|
||||
echo` Processing SVG source...`;
|
||||
const masterPngBuffer = await sharp(SVG_SOURCE)
|
||||
.resize(1024, 1024)
|
||||
.png() // Ensure it's PNG
|
||||
.toBuffer();
|
||||
|
||||
// Save the main icon.png (typically 512x512 for Electron root icon)
|
||||
await sharp(masterPngBuffer)
|
||||
.resize(512, 512)
|
||||
.toFile(path.join(ICONS_DIR, 'icon.png'));
|
||||
echo` ✅ Created icon.png (512x512)`;
|
||||
|
||||
// 2. Generate Windows .ico
|
||||
// png2icons expects a buffer. It returns a buffer (or null).
|
||||
// createICO(buffer, scalingAlgorithm, withSize, useMath)
|
||||
// scalingAlgorithm: 1 = Bilinear (better), 2 = Hermite (good), 3 = Bezier (best/slowest)
|
||||
// Defaulting to Bezier (3) for quality or Hermite (2) for speed. Let's use 2 (Hermite) as it's balanced.
|
||||
echo`🪟 Generating Windows .ico...`;
|
||||
const icoBuffer = png2icons.createICO(masterPngBuffer, png2icons.HERMITE, 0, false);
|
||||
|
||||
if (icoBuffer) {
|
||||
fs.writeFileSync(path.join(ICONS_DIR, 'icon.ico'), icoBuffer);
|
||||
echo` ✅ Created icon.ico`;
|
||||
} else {
|
||||
echo(chalk.red` ❌ Failed to create icon.ico`);
|
||||
// detailed error might not be available from png2icons simple API, often returns null on failure
|
||||
}
|
||||
|
||||
// 3. Generate macOS .icns
|
||||
echo`🍎 Generating macOS .icns...`;
|
||||
const icnsBuffer = png2icons.createICNS(masterPngBuffer, png2icons.HERMITE, 0);
|
||||
|
||||
if (icnsBuffer) {
|
||||
fs.writeFileSync(path.join(ICONS_DIR, 'icon.icns'), icnsBuffer);
|
||||
echo` ✅ Created icon.icns`;
|
||||
} else {
|
||||
echo(chalk.red` ❌ Failed to create icon.icns`);
|
||||
}
|
||||
|
||||
// 4. Generate Linux PNGs (various sizes)
|
||||
echo`🐧 Generating Linux PNG icons...`;
|
||||
const linuxSizes = [16, 32, 48, 64, 128, 256, 512];
|
||||
let generatedCount = 0;
|
||||
|
||||
for (const size of linuxSizes) {
|
||||
await sharp(masterPngBuffer)
|
||||
.resize(size, size)
|
||||
.toFile(path.join(ICONS_DIR, `${size}x${size}.png`));
|
||||
generatedCount++;
|
||||
}
|
||||
echo` ✅ Created ${generatedCount} Linux PNG icons`;
|
||||
|
||||
echo`\n✨ Icon generation complete! Files located in: ${ICONS_DIR}`;
|
||||
|
||||
} catch (error) {
|
||||
echo(chalk.red`\n❌ Fatal Error: ${error.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -1,111 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Icon Generation Script
|
||||
# Generates app icons for macOS, Windows, and Linux from SVG source
|
||||
#
|
||||
# Prerequisites:
|
||||
# - macOS: brew install imagemagick librsvg
|
||||
# - Linux: apt install imagemagick librsvg2-bin
|
||||
# - Windows: Install ImageMagick
|
||||
#
|
||||
# Usage: ./scripts/generate-icons.sh
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
|
||||
ICONS_DIR="$PROJECT_DIR/resources/icons"
|
||||
SVG_SOURCE="$ICONS_DIR/icon.svg"
|
||||
|
||||
echo "🎨 Generating ClawX icons..."
|
||||
|
||||
# Check if SVG source exists
|
||||
if [ ! -f "$SVG_SOURCE" ]; then
|
||||
echo "❌ SVG source not found: $SVG_SOURCE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for required tools
|
||||
if ! command -v convert &> /dev/null; then
|
||||
echo "❌ ImageMagick not found. Please install it:"
|
||||
echo " macOS: brew install imagemagick"
|
||||
echo " Linux: apt install imagemagick"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v rsvg-convert &> /dev/null; then
|
||||
echo "❌ rsvg-convert not found. Please install it:"
|
||||
echo " macOS: brew install librsvg"
|
||||
echo " Linux: apt install librsvg2-bin"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create temp directory
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap "rm -rf $TEMP_DIR" EXIT
|
||||
|
||||
echo "📁 Using temp directory: $TEMP_DIR"
|
||||
|
||||
# Generate PNG files at various sizes
|
||||
SIZES=(16 32 64 128 256 512 1024)
|
||||
|
||||
for SIZE in "${SIZES[@]}"; do
|
||||
echo " Generating ${SIZE}x${SIZE} PNG..."
|
||||
rsvg-convert -w $SIZE -h $SIZE "$SVG_SOURCE" -o "$TEMP_DIR/icon_${SIZE}.png"
|
||||
done
|
||||
|
||||
# ============ macOS (.icns) ============
|
||||
echo "🍎 Generating macOS .icns..."
|
||||
|
||||
ICONSET_DIR="$TEMP_DIR/ClawX.iconset"
|
||||
mkdir -p "$ICONSET_DIR"
|
||||
|
||||
# macOS iconset requires specific file names
|
||||
cp "$TEMP_DIR/icon_16.png" "$ICONSET_DIR/icon_16x16.png"
|
||||
cp "$TEMP_DIR/icon_32.png" "$ICONSET_DIR/icon_16x16@2x.png"
|
||||
cp "$TEMP_DIR/icon_32.png" "$ICONSET_DIR/icon_32x32.png"
|
||||
cp "$TEMP_DIR/icon_64.png" "$ICONSET_DIR/icon_32x32@2x.png"
|
||||
cp "$TEMP_DIR/icon_128.png" "$ICONSET_DIR/icon_128x128.png"
|
||||
cp "$TEMP_DIR/icon_256.png" "$ICONSET_DIR/icon_128x128@2x.png"
|
||||
cp "$TEMP_DIR/icon_256.png" "$ICONSET_DIR/icon_256x256.png"
|
||||
cp "$TEMP_DIR/icon_512.png" "$ICONSET_DIR/icon_256x256@2x.png"
|
||||
cp "$TEMP_DIR/icon_512.png" "$ICONSET_DIR/icon_512x512.png"
|
||||
cp "$TEMP_DIR/icon_1024.png" "$ICONSET_DIR/icon_512x512@2x.png"
|
||||
|
||||
if command -v iconutil &> /dev/null; then
|
||||
iconutil -c icns -o "$ICONS_DIR/icon.icns" "$ICONSET_DIR"
|
||||
echo " ✅ Created icon.icns"
|
||||
else
|
||||
echo " ⚠️ iconutil not found (macOS only). Skipping .icns generation."
|
||||
fi
|
||||
|
||||
# ============ Windows (.ico) ============
|
||||
echo "🪟 Generating Windows .ico..."
|
||||
|
||||
# Windows ICO typically includes 16, 32, 48, 64, 128, 256
|
||||
convert "$TEMP_DIR/icon_16.png" \
|
||||
"$TEMP_DIR/icon_32.png" \
|
||||
"$TEMP_DIR/icon_64.png" \
|
||||
"$TEMP_DIR/icon_128.png" \
|
||||
"$TEMP_DIR/icon_256.png" \
|
||||
"$ICONS_DIR/icon.ico"
|
||||
echo " ✅ Created icon.ico"
|
||||
|
||||
# ============ Linux (PNG set) ============
|
||||
echo "🐧 Generating Linux PNG icons..."
|
||||
|
||||
LINUX_SIZES=(16 32 48 64 128 256 512)
|
||||
for SIZE in "${LINUX_SIZES[@]}"; do
|
||||
cp "$TEMP_DIR/icon_${SIZE}.png" "$ICONS_DIR/${SIZE}x${SIZE}.png" 2>/dev/null || \
|
||||
rsvg-convert -w $SIZE -h $SIZE "$SVG_SOURCE" -o "$ICONS_DIR/${SIZE}x${SIZE}.png"
|
||||
done
|
||||
echo " ✅ Created Linux PNG icons"
|
||||
|
||||
# ============ Copy main icon ============
|
||||
cp "$TEMP_DIR/icon_512.png" "$ICONS_DIR/icon.png"
|
||||
echo " ✅ Created icon.png (512x512)"
|
||||
|
||||
echo ""
|
||||
echo "✅ Icon generation complete!"
|
||||
echo " Generated files in: $ICONS_DIR"
|
||||
ls -la "$ICONS_DIR"
|
||||
Reference in New Issue
Block a user