chore: 清理macOS同步产生的重复文件
详细说明: - 删除了352个带数字后缀的重复文件 - 更新.gitignore防止未来产生此类文件 - 这些文件是由iCloud或其他同步服务冲突产生的 - 不影响项目功能,仅清理冗余文件
This commit is contained in:
@@ -1,12 +0,0 @@
|
||||
const { npm_config_user_agent: UA } = process.env;
|
||||
const [packageManager] = (UA ?? '').split(' ');
|
||||
const [name, version] = packageManager.split('/');
|
||||
if (name !== 'pnpm') {
|
||||
const suggestion = '\033[1;92mpnpm\033[0;31m';
|
||||
console.error('\033[0;31m');
|
||||
console.error('╭───────────────────────────────────────────╮');
|
||||
console.error(`│\tPlease use ${suggestion} instead of ${name} \t │`);
|
||||
console.error('╰───────────────────────────────────────────╯');
|
||||
console.error('\033[0m');
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -1,252 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* This script is used to build the n8n application for production.
|
||||
* It will:
|
||||
* 1. Clean the previous build output
|
||||
* 2. Run pnpm install and build
|
||||
* 3. Prepare for deployment - clean package.json files
|
||||
* 4. Create a pruned production deployment in 'compiled'
|
||||
*/
|
||||
|
||||
import { $, echo, fs, chalk } from 'zx';
|
||||
import path from 'path';
|
||||
|
||||
// Check if running in a CI environment
|
||||
const isCI = process.env.CI === 'true';
|
||||
|
||||
// Check if test controller should be excluded (CI + flag not set)
|
||||
const excludeTestController =
|
||||
process.env.CI === 'true' && process.env.INCLUDE_TEST_CONTROLLER !== 'true';
|
||||
|
||||
// Disable verbose output and force color only if not in CI
|
||||
$.verbose = !isCI;
|
||||
process.env.FORCE_COLOR = isCI ? '0' : '1';
|
||||
|
||||
const scriptDir = path.dirname(new URL(import.meta.url).pathname);
|
||||
const isInScriptsDir = path.basename(scriptDir) === 'scripts';
|
||||
const rootDir = isInScriptsDir ? path.join(scriptDir, '..') : scriptDir;
|
||||
|
||||
// #region ===== Configuration =====
|
||||
const config = {
|
||||
compiledAppDir: path.join(rootDir, 'compiled'),
|
||||
rootDir: rootDir,
|
||||
};
|
||||
|
||||
// Define backend patches to keep during deployment
|
||||
const PATCHES_TO_KEEP = ['pdfjs-dist', 'pkce-challenge', 'bull'];
|
||||
|
||||
// #endregion ===== Configuration =====
|
||||
|
||||
// #region ===== Helper Functions =====
|
||||
const timers = new Map();
|
||||
|
||||
function startTimer(name) {
|
||||
timers.set(name, Date.now());
|
||||
}
|
||||
|
||||
function getElapsedTime(name) {
|
||||
const start = timers.get(name);
|
||||
if (!start) return 0;
|
||||
return Math.floor((Date.now() - start) / 1000);
|
||||
}
|
||||
|
||||
function formatDuration(seconds) {
|
||||
const hours = Math.floor(seconds / 3600);
|
||||
const minutes = Math.floor((seconds % 3600) / 60);
|
||||
const secs = seconds % 60;
|
||||
|
||||
if (hours > 0) return `${hours}h ${minutes}m ${secs}s`;
|
||||
if (minutes > 0) return `${minutes}m ${secs}s`;
|
||||
return `${secs}s`;
|
||||
}
|
||||
|
||||
function printHeader(title) {
|
||||
echo('');
|
||||
echo(chalk.blue.bold(`===== ${title} =====`));
|
||||
}
|
||||
|
||||
function printDivider() {
|
||||
echo(chalk.gray('-----------------------------------------------'));
|
||||
}
|
||||
|
||||
// #endregion ===== Helper Functions =====
|
||||
|
||||
// #region ===== Main Build Process =====
|
||||
printHeader('n8n Build & Production Preparation');
|
||||
echo(`INFO: Output Directory: ${config.compiledAppDir}`);
|
||||
printDivider();
|
||||
|
||||
startTimer('total_build');
|
||||
|
||||
// 0. Clean Previous Build Output
|
||||
echo(chalk.yellow(`INFO: Cleaning previous output directory: ${config.compiledAppDir}...`));
|
||||
await fs.remove(config.compiledAppDir);
|
||||
printDivider();
|
||||
|
||||
// 1. Local Application Pre-build
|
||||
echo(chalk.yellow('INFO: Starting local application pre-build...'));
|
||||
startTimer('package_build');
|
||||
|
||||
echo(chalk.yellow('INFO: Running pnpm install and build...'));
|
||||
try {
|
||||
const installProcess = $`cd ${config.rootDir} && pnpm install --frozen-lockfile`;
|
||||
installProcess.pipe(process.stdout);
|
||||
await installProcess;
|
||||
|
||||
const buildProcess = $`cd ${config.rootDir} && pnpm build`;
|
||||
buildProcess.pipe(process.stdout);
|
||||
await buildProcess;
|
||||
|
||||
echo(chalk.green('✅ pnpm install and build completed'));
|
||||
} catch (error) {
|
||||
console.error(chalk.red('\n🛑 BUILD PROCESS FAILED!'));
|
||||
console.error(chalk.red('An error occurred during the build process:'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const packageBuildTime = getElapsedTime('package_build');
|
||||
echo(chalk.green(`✅ Package build completed in ${formatDuration(packageBuildTime)}`));
|
||||
printDivider();
|
||||
|
||||
// 2. Prepare for deployment - clean package.json files
|
||||
echo(chalk.yellow('INFO: Performing pre-deploy cleanup on package.json files...'));
|
||||
|
||||
// Find and backup package.json files
|
||||
const packageJsonFiles = await $`cd ${config.rootDir} && find . -name "package.json" \
|
||||
-not -path "./node_modules/*" \
|
||||
-not -path "*/node_modules/*" \
|
||||
-not -path "./compiled/*" \
|
||||
-type f`.lines();
|
||||
|
||||
// Backup all package.json files
|
||||
// This is only needed locally, not in CI
|
||||
if (process.env.CI !== 'true') {
|
||||
for (const file of packageJsonFiles) {
|
||||
if (file) {
|
||||
const fullPath = path.join(config.rootDir, file);
|
||||
await fs.copy(fullPath, `${fullPath}.bak`);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Run FE trim script
|
||||
await $`cd ${config.rootDir} && node .github/scripts/trim-fe-packageJson.js`;
|
||||
echo(chalk.yellow('INFO: Performing selective patch cleanup...'));
|
||||
|
||||
const packageJsonPath = path.join(config.rootDir, 'package.json');
|
||||
|
||||
if (await fs.pathExists(packageJsonPath)) {
|
||||
try {
|
||||
// 1. Read the package.json file
|
||||
const packageJsonContent = await fs.readFile(packageJsonPath, 'utf8');
|
||||
let packageJson = JSON.parse(packageJsonContent);
|
||||
|
||||
// 2. Modify the patchedDependencies directly in JavaScript
|
||||
if (packageJson.pnpm && packageJson.pnpm.patchedDependencies) {
|
||||
const filteredPatches = {};
|
||||
for (const [key, value] of Object.entries(packageJson.pnpm.patchedDependencies)) {
|
||||
// Check if the key (patch name) starts with any of the allowed patches
|
||||
const shouldKeep = PATCHES_TO_KEEP.some((patchPrefix) => key.startsWith(patchPrefix));
|
||||
if (shouldKeep) {
|
||||
filteredPatches[key] = value;
|
||||
}
|
||||
}
|
||||
packageJson.pnpm.patchedDependencies = filteredPatches;
|
||||
}
|
||||
|
||||
// 3. Write the modified package.json back
|
||||
await fs.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2), 'utf8');
|
||||
|
||||
echo(chalk.green('✅ Kept backend patches: ' + PATCHES_TO_KEEP.join(', ')));
|
||||
echo(
|
||||
chalk.gray(
|
||||
`Removed FE/dev patches that are not in the list of backend patches to keep: ${PATCHES_TO_KEEP.join(', ')}`,
|
||||
),
|
||||
);
|
||||
} catch (error) {
|
||||
echo(chalk.red(`ERROR: Failed to cleanup patches in package.json: ${error.message}`));
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
echo(chalk.yellow(`INFO: Creating pruned production deployment in '${config.compiledAppDir}'...`));
|
||||
startTimer('package_deploy');
|
||||
|
||||
await fs.ensureDir(config.compiledAppDir);
|
||||
|
||||
if (excludeTestController) {
|
||||
const cliPackagePath = path.join(config.rootDir, 'packages/cli/package.json');
|
||||
const content = await fs.readFile(cliPackagePath, 'utf8');
|
||||
const packageJson = JSON.parse(content);
|
||||
packageJson.files.push('!dist/**/e2e.*');
|
||||
await fs.writeFile(cliPackagePath, JSON.stringify(packageJson, null, 2));
|
||||
echo(chalk.gray(' - Excluded test controller from packages/cli/package.json'));
|
||||
}
|
||||
|
||||
await $`cd ${config.rootDir} && NODE_ENV=production DOCKER_BUILD=true pnpm --filter=n8n --prod --legacy deploy --no-optional ./compiled`;
|
||||
|
||||
const packageDeployTime = getElapsedTime('package_deploy');
|
||||
|
||||
// Restore package.json files
|
||||
// This is only needed locally, not in CI
|
||||
if (process.env.CI !== 'true') {
|
||||
for (const file of packageJsonFiles) {
|
||||
if (file) {
|
||||
const fullPath = path.join(config.rootDir, file);
|
||||
const backupPath = `${fullPath}.bak`;
|
||||
if (await fs.pathExists(backupPath)) {
|
||||
await fs.move(backupPath, fullPath, { overwrite: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate output size
|
||||
const compiledAppOutputSize = (await $`du -sh ${config.compiledAppDir} | cut -f1`).stdout.trim();
|
||||
|
||||
// Generate build manifest
|
||||
const buildManifest = {
|
||||
buildTime: new Date().toISOString(),
|
||||
artifactSize: compiledAppOutputSize,
|
||||
buildDuration: {
|
||||
packageBuild: packageBuildTime,
|
||||
packageDeploy: packageDeployTime,
|
||||
total: getElapsedTime('total_build'),
|
||||
},
|
||||
};
|
||||
|
||||
await fs.writeJson(path.join(config.compiledAppDir, 'build-manifest.json'), buildManifest, {
|
||||
spaces: 2,
|
||||
});
|
||||
|
||||
echo(chalk.green(`✅ Package deployment completed in ${formatDuration(packageDeployTime)}`));
|
||||
echo(`INFO: Size of ${config.compiledAppDir}: ${compiledAppOutputSize}`);
|
||||
printDivider();
|
||||
|
||||
// Calculate total time
|
||||
const totalBuildTime = getElapsedTime('total_build');
|
||||
|
||||
// #endregion ===== Main Build Process =====
|
||||
|
||||
// #region ===== Final Output =====
|
||||
echo('');
|
||||
echo(chalk.green.bold('================ BUILD SUMMARY ================'));
|
||||
echo(chalk.green(`✅ n8n built successfully!`));
|
||||
echo('');
|
||||
echo(chalk.blue('📦 Build Output:'));
|
||||
echo(` Directory: ${path.resolve(config.compiledAppDir)}`);
|
||||
echo(` Size: ${compiledAppOutputSize}`);
|
||||
echo('');
|
||||
echo(chalk.blue('⏱️ Build Times:'));
|
||||
echo(` Package Build: ${formatDuration(packageBuildTime)}`);
|
||||
echo(` Package Deploy: ${formatDuration(packageDeployTime)}`);
|
||||
echo(chalk.gray(' -----------------------------'));
|
||||
echo(chalk.bold(` Total Time: ${formatDuration(totalBuildTime)}`));
|
||||
echo('');
|
||||
echo(chalk.blue('📋 Build Manifest:'));
|
||||
echo(` ${path.resolve(config.compiledAppDir)}/build-manifest.json`);
|
||||
echo(chalk.green.bold('=============================================='));
|
||||
|
||||
// #endregion ===== Final Output =====
|
||||
|
||||
// Exit with success
|
||||
process.exit(0);
|
||||
@@ -1,171 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Build n8n Docker image locally
|
||||
*
|
||||
* This script simulates the CI build process for local testing.
|
||||
* Default output: 'n8nio/n8n:local'
|
||||
* Override with IMAGE_BASE_NAME and IMAGE_TAG environment variables.
|
||||
*/
|
||||
|
||||
import { $, echo, fs, chalk, os } from 'zx';
|
||||
import { fileURLToPath } from 'url';
|
||||
import path from 'path';
|
||||
|
||||
// Disable verbose mode for cleaner output
|
||||
$.verbose = false;
|
||||
process.env.FORCE_COLOR = '1';
|
||||
|
||||
// #region ===== Helper Functions =====
|
||||
|
||||
/**
|
||||
* Get Docker platform string based on host architecture
|
||||
* @returns {string} Platform string (e.g., 'linux/amd64')
|
||||
*/
|
||||
function getDockerPlatform() {
|
||||
const arch = os.arch();
|
||||
const dockerArch = {
|
||||
x64: 'amd64',
|
||||
arm64: 'arm64',
|
||||
}[arch];
|
||||
|
||||
if (!dockerArch) {
|
||||
throw new Error(`Unsupported architecture: ${arch}. Only x64 and arm64 are supported.`);
|
||||
}
|
||||
|
||||
return `linux/${dockerArch}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format duration in seconds
|
||||
* @param {number} ms - Duration in milliseconds
|
||||
* @returns {string} Formatted duration
|
||||
*/
|
||||
function formatDuration(ms) {
|
||||
return `${Math.floor(ms / 1000)}s`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Docker image size
|
||||
* @param {string} imageName - Full image name with tag
|
||||
* @returns {Promise<string>} Image size or 'Unknown'
|
||||
*/
|
||||
async function getImageSize(imageName) {
|
||||
try {
|
||||
const { stdout } = await $`docker images ${imageName} --format "{{.Size}}"`;
|
||||
return stdout.trim();
|
||||
} catch {
|
||||
return 'Unknown';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a command exists
|
||||
* @param {string} command - Command to check
|
||||
* @returns {Promise<boolean>} True if command exists
|
||||
*/
|
||||
async function commandExists(command) {
|
||||
try {
|
||||
await $`command -v ${command}`;
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// #endregion ===== Helper Functions =====
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const isInScriptsDir = path.basename(__dirname) === 'scripts';
|
||||
const rootDir = isInScriptsDir ? path.join(__dirname, '..') : __dirname;
|
||||
|
||||
const config = {
|
||||
dockerfilePath: path.join(rootDir, 'docker/images/n8n/Dockerfile'),
|
||||
imageBaseName: process.env.IMAGE_BASE_NAME || 'n8nio/n8n',
|
||||
imageTag: process.env.IMAGE_TAG || 'local',
|
||||
buildContext: rootDir,
|
||||
compiledAppDir: path.join(rootDir, 'compiled'),
|
||||
get fullImageName() {
|
||||
return `${this.imageBaseName}:${this.imageTag}`;
|
||||
},
|
||||
};
|
||||
|
||||
// #region ===== Main Build Process =====
|
||||
|
||||
const platform = getDockerPlatform();
|
||||
|
||||
async function main() {
|
||||
echo(chalk.blue.bold('===== Docker Build for n8n ====='));
|
||||
echo(`INFO: Image: ${config.fullImageName}`);
|
||||
echo(`INFO: Platform: ${platform}`);
|
||||
echo(chalk.gray('-'.repeat(47)));
|
||||
|
||||
await checkPrerequisites();
|
||||
|
||||
// Build Docker image
|
||||
const buildTime = await buildDockerImage();
|
||||
|
||||
// Get image details
|
||||
const imageSize = await getImageSize(config.fullImageName);
|
||||
|
||||
// Display summary
|
||||
displaySummary({
|
||||
imageName: config.fullImageName,
|
||||
platform,
|
||||
size: imageSize,
|
||||
buildTime,
|
||||
});
|
||||
}
|
||||
|
||||
async function checkPrerequisites() {
|
||||
if (!(await fs.pathExists(config.compiledAppDir))) {
|
||||
echo(chalk.red(`Error: Compiled app directory not found at ${config.compiledAppDir}`));
|
||||
echo(chalk.yellow('Please run build-n8n.mjs first!'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!(await commandExists('docker'))) {
|
||||
echo(chalk.red('Error: Docker is not installed or not in PATH'));
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async function buildDockerImage() {
|
||||
const startTime = Date.now();
|
||||
echo(chalk.yellow('INFO: Building Docker image...'));
|
||||
|
||||
try {
|
||||
const { stdout } = await $`docker build \
|
||||
--platform ${platform} \
|
||||
--build-arg TARGETPLATFORM=${platform} \
|
||||
-t ${config.fullImageName} \
|
||||
-f ${config.dockerfilePath} \
|
||||
--load \
|
||||
${config.buildContext}`;
|
||||
|
||||
echo(stdout);
|
||||
return formatDuration(Date.now() - startTime);
|
||||
} catch (error) {
|
||||
echo(chalk.red(`ERROR: Docker build failed: ${error.stderr || error.message}`));
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function displaySummary({ imageName, platform, size, buildTime }) {
|
||||
echo('');
|
||||
echo(chalk.green.bold('═'.repeat(54)));
|
||||
echo(chalk.green.bold(' DOCKER BUILD COMPLETE'));
|
||||
echo(chalk.green.bold('═'.repeat(54)));
|
||||
echo(chalk.green(`✅ Image built: ${imageName}`));
|
||||
echo(` Platform: ${platform}`);
|
||||
echo(` Size: ${size}`);
|
||||
echo(` Build time: ${buildTime}`);
|
||||
echo(chalk.green.bold('═'.repeat(54)));
|
||||
}
|
||||
|
||||
// #endregion ===== Main Build Process =====
|
||||
|
||||
main().catch((error) => {
|
||||
echo(chalk.red(`Unexpected error: ${error.message}`));
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,17 +0,0 @@
|
||||
import { accessSync, constants } from 'node:fs';
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
const ZX_PATH = 'node_modules/.bin/zx';
|
||||
|
||||
if (!zxExists()) {
|
||||
execSync('pnpm --frozen-lockfile --filter n8n-monorepo install', { stdio: 'inherit' });
|
||||
}
|
||||
|
||||
function zxExists() {
|
||||
try {
|
||||
accessSync(ZX_PATH, constants.F_OK);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
const prettier = path.resolve('node_modules', '.bin', 'prettier');
|
||||
const biome = path.resolve('node_modules', '.bin', 'biome');
|
||||
|
||||
[prettier, biome].forEach((bin) => {
|
||||
if (!fs.existsSync(bin)) {
|
||||
throw new Error(
|
||||
[`${path.basename(bin)} not found at path: ${bin}`, 'Please run `pnpm i` first'].join('\n'),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
const prettierConfig = path.resolve('.prettierrc.js');
|
||||
const biomeConfig = path.resolve('biome.jsonc');
|
||||
const ignore = path.resolve('.prettierignore');
|
||||
|
||||
const ROOT_DIRS_TO_SKIP = ['.git', 'node_modules', 'packages', '.turbo', 'cypress'];
|
||||
const EXTENSIONS_TO_FORMAT_WITH_PRETTIER = ['.yml'];
|
||||
const EXTENSIONS_TO_FORMAT_WITH_BIOME = ['.js', '.json', '.ts'];
|
||||
|
||||
const isDir = (path) => fs.lstatSync(path).isDirectory();
|
||||
|
||||
const isPrettierTarget = (path) =>
|
||||
EXTENSIONS_TO_FORMAT_WITH_PRETTIER.some((ext) => path.endsWith(ext));
|
||||
const isBiomeTarget = (path) => EXTENSIONS_TO_FORMAT_WITH_BIOME.some((ext) => path.endsWith(ext));
|
||||
|
||||
const biomeTargets = [];
|
||||
const prettierTargets = [];
|
||||
|
||||
const walk = (dir) => {
|
||||
fs.readdirSync(dir).forEach((entry) => {
|
||||
const entryPath = path.resolve(dir, entry);
|
||||
if (isDir(entryPath)) walk(entryPath);
|
||||
if (isPrettierTarget(entryPath)) prettierTargets.push(entryPath);
|
||||
if (isBiomeTarget(entryPath)) biomeTargets.push(entryPath);
|
||||
});
|
||||
};
|
||||
|
||||
fs.readdirSync('.').forEach((cur) => {
|
||||
if (ROOT_DIRS_TO_SKIP.includes(cur)) return;
|
||||
if (isDir(cur)) walk(cur);
|
||||
if (isPrettierTarget(cur)) prettierTargets.push(cur);
|
||||
if (isBiomeTarget(cur)) biomeTargets.push(cur);
|
||||
});
|
||||
|
||||
execSync(
|
||||
[
|
||||
prettier,
|
||||
'--config',
|
||||
prettierConfig,
|
||||
'--ignore-path',
|
||||
ignore,
|
||||
'--write',
|
||||
prettierTargets.join(' '),
|
||||
].join(' '),
|
||||
);
|
||||
|
||||
execSync(
|
||||
[biome, 'format', '--write', `--config-path=${biomeConfig}`, biomeTargets.join(' ')].join(' '),
|
||||
);
|
||||
@@ -1,10 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
// Skip lefthook install in CI or Docker build
|
||||
if (process.env.CI || process.env.DOCKER_BUILD) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
execSync('pnpm lefthook install', { stdio: 'inherit' });
|
||||
@@ -1,30 +0,0 @@
|
||||
// Resets the repository by deleting all untracked files except for few exceptions.
|
||||
import { $, echo, fs } from 'zx';
|
||||
|
||||
$.verbose = true;
|
||||
process.env.FORCE_COLOR = '1';
|
||||
|
||||
const excludePatterns = ['/.vscode/', '/.idea/', '.env', '/.claude/'];
|
||||
const excludeFlags = excludePatterns.map((exclude) => ['-e', exclude]).flat();
|
||||
|
||||
echo(
|
||||
`This will delete all untracked files except for those matching the following patterns: ${excludePatterns.map((x) => `"${x}"`).join(', ')}.`,
|
||||
);
|
||||
|
||||
const answer = await question('❓ Do you want to continue? (y/n) ');
|
||||
|
||||
if (!['y', 'Y', ''].includes(answer)) {
|
||||
echo('Aborting...');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
echo('🧹 Cleaning untracked files...');
|
||||
await $({ verbose: false })`git clean -fxd ${excludeFlags}`;
|
||||
// In case node_modules is not removed by git clean
|
||||
fs.removeSync('node_modules');
|
||||
|
||||
echo('⏬ Running pnpm install...');
|
||||
await $`pnpm install`;
|
||||
|
||||
echo('🏗️ Running pnpm build...');
|
||||
await $`pnpm build`;
|
||||
@@ -1,156 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* This script is used to scan the n8n docker image for vulnerabilities.
|
||||
* It uses Trivy to scan the image.
|
||||
*/
|
||||
|
||||
import { $, echo, fs, chalk } from 'zx';
|
||||
import path from 'path';
|
||||
|
||||
$.verbose = false;
|
||||
process.env.FORCE_COLOR = '1';
|
||||
|
||||
const scriptDir = path.dirname(new URL(import.meta.url).pathname);
|
||||
const isInScriptsDir = path.basename(scriptDir) === 'scripts';
|
||||
const rootDir = isInScriptsDir ? path.join(scriptDir, '..') : scriptDir;
|
||||
|
||||
// #region ===== Configuration =====
|
||||
const config = {
|
||||
imageBaseName: process.env.IMAGE_BASE_NAME || 'n8nio/n8n',
|
||||
imageTag: process.env.IMAGE_TAG || 'local',
|
||||
trivyImage: process.env.TRIVY_IMAGE || 'aquasec/trivy:latest',
|
||||
severity: process.env.TRIVY_SEVERITY || 'CRITICAL,HIGH,MEDIUM,LOW',
|
||||
outputFormat: process.env.TRIVY_FORMAT || 'table',
|
||||
outputFile: process.env.TRIVY_OUTPUT || null,
|
||||
scanTimeout: process.env.TRIVY_TIMEOUT || '10m',
|
||||
ignoreUnfixed: process.env.TRIVY_IGNORE_UNFIXED === 'true',
|
||||
scanners: process.env.TRIVY_SCANNERS || 'vuln',
|
||||
quiet: process.env.TRIVY_QUIET === 'true',
|
||||
rootDir: rootDir,
|
||||
};
|
||||
|
||||
config.fullImageName = `${config.imageBaseName}:${config.imageTag}`;
|
||||
|
||||
const printHeader = (title) =>
|
||||
!config.quiet && echo(`\n${chalk.blue.bold(`===== ${title} =====`)}`);
|
||||
|
||||
const printSummary = (status, time, message) => {
|
||||
if (config.quiet) return;
|
||||
|
||||
echo('\n' + chalk.blue.bold('===== Scan Summary ====='));
|
||||
echo(status === 'success' ? chalk.green.bold(message) : chalk.yellow.bold(message));
|
||||
echo(chalk[status === 'success' ? 'green' : 'yellow'](` Scan time: ${time}s`));
|
||||
|
||||
if (config.outputFile) {
|
||||
const resolvedPath = path.isAbsolute(config.outputFile)
|
||||
? config.outputFile
|
||||
: path.join(config.rootDir, config.outputFile);
|
||||
echo(chalk[status === 'success' ? 'green' : 'yellow'](` Report saved to: ${resolvedPath}`));
|
||||
}
|
||||
|
||||
echo('\n' + chalk.gray('Scan Configuration:'));
|
||||
echo(chalk.gray(` • Target Image: ${config.fullImageName}`));
|
||||
echo(chalk.gray(` • Severity Levels: ${config.severity}`));
|
||||
echo(chalk.gray(` • Scanners: ${config.scanners}`));
|
||||
if (config.ignoreUnfixed) echo(chalk.gray(` • Ignored unfixed: yes`));
|
||||
echo(chalk.blue.bold('========================'));
|
||||
};
|
||||
|
||||
// #endregion ===== Configuration =====
|
||||
|
||||
// #region ===== Main Process =====
|
||||
(async () => {
|
||||
printHeader('Trivy Security Scan for n8n Image');
|
||||
|
||||
try {
|
||||
await $`command -v docker`;
|
||||
} catch {
|
||||
echo(chalk.red('Error: Docker is not installed or not in PATH'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
await $`docker image inspect ${config.fullImageName} > /dev/null 2>&1`;
|
||||
} catch {
|
||||
echo(chalk.red(`Error: Docker image '${config.fullImageName}' not found`));
|
||||
echo(chalk.yellow('Please run dockerize-n8n.mjs first!'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Pull latest Trivy image silently
|
||||
try {
|
||||
await $`docker pull ${config.trivyImage} > /dev/null 2>&1`;
|
||||
} catch {
|
||||
// Silent fallback to cached version
|
||||
}
|
||||
|
||||
// Build Trivy command
|
||||
const trivyArgs = [
|
||||
'run',
|
||||
'--rm',
|
||||
'-v',
|
||||
'/var/run/docker.sock:/var/run/docker.sock',
|
||||
config.trivyImage,
|
||||
'image',
|
||||
'--severity',
|
||||
config.severity,
|
||||
'--format',
|
||||
config.outputFormat,
|
||||
'--timeout',
|
||||
config.scanTimeout,
|
||||
'--scanners',
|
||||
config.scanners,
|
||||
'--no-progress',
|
||||
];
|
||||
|
||||
if (config.ignoreUnfixed) trivyArgs.push('--ignore-unfixed');
|
||||
if (config.quiet && config.outputFormat === 'table') trivyArgs.push('--quiet');
|
||||
|
||||
// Handle output file - resolve relative to root directory
|
||||
if (config.outputFile) {
|
||||
const outputPath = path.isAbsolute(config.outputFile)
|
||||
? config.outputFile
|
||||
: path.join(config.rootDir, config.outputFile);
|
||||
await fs.ensureDir(path.dirname(outputPath));
|
||||
trivyArgs.push('--output', '/tmp/trivy-output', '-v', `${outputPath}:/tmp/trivy-output`);
|
||||
}
|
||||
|
||||
trivyArgs.push(config.fullImageName);
|
||||
|
||||
// Run the scan
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
const result = await $`docker ${trivyArgs}`;
|
||||
|
||||
// Print Trivy output first
|
||||
if (!config.outputFile && result.stdout) {
|
||||
echo(result.stdout);
|
||||
}
|
||||
|
||||
// Then print our summary
|
||||
const scanTime = Math.floor((Date.now() - startTime) / 1000);
|
||||
printSummary('success', scanTime, '✅ Security scan completed successfully');
|
||||
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
const scanTime = Math.floor((Date.now() - startTime) / 1000);
|
||||
|
||||
// Trivy returns exit code 1 when vulnerabilities are found
|
||||
if (error.exitCode === 1) {
|
||||
// Print Trivy output first
|
||||
if (!config.outputFile && error.stdout) {
|
||||
echo(error.stdout);
|
||||
}
|
||||
|
||||
// Then print our summary
|
||||
printSummary('warning', scanTime, '⚠️ Vulnerabilities found!');
|
||||
process.exit(1);
|
||||
} else {
|
||||
echo(chalk.red(`❌ Scan failed: ${error.message}`));
|
||||
process.exit(error.exitCode || 1);
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
// #endregion ===== Main Process =====
|
||||
Reference in New Issue
Block a user