pull:初次提交

This commit is contained in:
Yep_Q
2025-09-08 04:48:28 +08:00
parent 5c0619656d
commit f64f498365
11751 changed files with 1953723 additions and 0 deletions

10
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,10 @@
{
"files.exclude": {
"**/.git": true,
"**/.svn": true,
"**/.hg": true,
"**/.DS_Store": true,
"**/Thumbs.db": true,
".yoyo": true
}
}

5
n8n-n8n-1.109.2/.actrc Executable file
View File

@@ -0,0 +1,5 @@
-P blacksmith-2vcpu-ubuntu-2204=ubuntu-latest
-P blacksmith-4vcpu-ubuntu-2204=ubuntu-latest
-P ubuntu-22.04=ubuntu-latest
-P ubuntu-20.04=ubuntu-latest
--container-architecture linux/amd64

View File

@@ -0,0 +1,30 @@
{
"baseDir": "packages/frontend/editor-ui/dist",
"defaultCompression": "gzip",
"reportOutput": [
[
"github",
{
"checkRun": true,
"commitStatus": "off",
"prComment": true
}
]
],
"files": [
{
"path": "*.wasm",
"friendlyName": "WASM Dependencies"
}
],
"groups": [
{
"groupName": "Editor UI - Total JS Size",
"path": "**/*.js"
},
{
"groupName": "Editor UI - Total CSS Size",
"path": "**/*.css"
}
]
}

View File

@@ -0,0 +1,9 @@
FROM n8nio/base:22
RUN apk add --no-cache --update openssh sudo shadow bash
RUN echo node ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/node && chmod 0440 /etc/sudoers.d/node
RUN mkdir /workspaces && chown node:node /workspaces
RUN npm install -g pnpm
USER node
RUN mkdir -p ~/.pnpm-store && pnpm config set store-dir ~/.pnpm-store --global

View File

@@ -0,0 +1,19 @@
{
"name": "n8n",
"dockerComposeFile": "docker-compose.yml",
"service": "n8n",
"workspaceFolder": "/workspaces",
"mounts": [
"type=bind,source=${localWorkspaceFolder},target=/workspaces,consistency=cached",
"type=bind,source=${localEnv:HOME}/.ssh,target=/home/node/.ssh,consistency=cached",
"type=bind,source=${localEnv:HOME}/.n8n,target=/home/node/.n8n,consistency=cached"
],
"forwardPorts": [8080, 5678],
"postCreateCommand": "corepack prepare --activate && pnpm install",
"postAttachCommand": "pnpm build",
"customizations": {
"codespaces": {
"openFiles": ["CONTRIBUTING.md"]
}
}
}

View File

@@ -0,0 +1,24 @@
volumes:
postgres-data:
services:
postgres:
image: postgres:16-alpine
restart: unless-stopped
volumes:
- postgres-data:/var/lib/postgresql/data
environment:
- POSTGRES_DB=n8n
- POSTGRES_PASSWORD=password
n8n:
build:
context: .
dockerfile: Dockerfile
volumes:
- ..:/workspaces:cached
command: sleep infinity
environment:
DB_POSTGRESDB_HOST: postgres
DB_TYPE: postgresdb
DB_POSTGRESDB_PASSWORD: password

19
n8n-n8n-1.109.2/.dockerignore Executable file
View File

@@ -0,0 +1,19 @@
**/*.md
**/.env
.cache
assets
node_modules
packages/node-dev
packages/**/node_modules
packages/**/dist
packages/**/.turbo
packages/**/*.test.*
.git
.github
!.github/scripts
*.tsbuildinfo
docker/compose
docker/**/Dockerfile
.vscode
packages/testing
cypress

20
n8n-n8n-1.109.2/.editorconfig Executable file
View File

@@ -0,0 +1,20 @@
root = true
[*]
charset = utf-8
indent_style = tab
indent_size = 2
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[package.json]
indent_style = space
indent_size = 2
[*.yml]
indent_style = space
indent_size = 2
[*.ts]
quote_type = single

View File

@@ -0,0 +1,18 @@
# Commits of large-scale changes to exclude from `git blame` results
# Set up linting and formatting (#2120)
56c4c6991fb21ba4b7bdcd22c929f63cc1d1defe
# refactor(editor): Apply Prettier (no-changelog) #4920
5ca2148c7ed06c90f999508928b7a51f9ac7a788
# refactor: Run lintfix (no-changelog) (#7537)
62c096710fab2f7e886518abdbded34b55e93f62
# refactor: Move test files alongside tested files (#11504)
7e58fc4fec468aca0b45d5bfe6150e1af632acbc
f32b13c6ed078be042a735bc8621f27e00dc3116

1
n8n-n8n-1.109.2/.gitattributes vendored Executable file
View File

@@ -0,0 +1 @@
*.sh text eol=lf

1
n8n-n8n-1.109.2/.github/CODEOWNERS vendored Executable file
View File

@@ -0,0 +1 @@
packages/@n8n/db/src/migrations/ @n8n-io/migrations-review

View File

@@ -0,0 +1,105 @@
name: Bug Report
description: Create a bug report to help us improve
body:
- type: markdown
attributes:
value: |
> ⚠️ This form is for reporting bugs only.
> ❌ Please do not use this form for general support, feature requests, or questions.
> 💬 For help and general inquiries, visit our [community support forum](https://community.n8n.io).
> ☁️ If you're experiencing issues with cloud instances not starting or license-related problems, contact [n8n support directly](mailto:help@n8n.io).
---
Thank you for helping us improve n8n!
To ensure we can address your report efficiently, please fill out all sections in English and provide as much detail as possible.
- type: textarea
id: description
attributes:
label: Bug Description
description: A clear and concise description of what the bug is
placeholder: Tell us what you see!
validations:
required: true
- type: textarea
id: reproduction
attributes:
label: To Reproduce
description: Steps to reproduce the behavior
placeholder: |
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
validations:
required: true
- type: textarea
id: expected
attributes:
label: Expected behavior
description: A clear and concise description of what you expected to happen
validations:
required: true
- type: textarea
id: debug-info
attributes:
label: Debug Info
description: This can be found under Help > About n8n > Copy debug information
validations:
required: true
- type: markdown
attributes:
value: '## Environment'
- type: input
id: os
attributes:
label: Operating System
placeholder: ex. Ubuntu Linux 22.04
validations:
required: true
- type: input
id: n8n-version
attributes:
label: n8n Version
placeholder: ex. 1.25.0
validations:
required: true
- type: input
id: nodejs-version
attributes:
label: Node.js Version
placeholder: ex. 22.16.0
validations:
required: true
- type: dropdown
id: db
attributes:
label: Database
options:
- SQLite (default)
- PostgreSQL
- MySQL
- MariaDB
default: 0
validations:
required: true
- type: dropdown
id: execution-mode
attributes:
label: Execution mode
description: '[Info](https://docs.n8n.io/hosting/scaling/execution-modes-processes/)'
options:
- main (default)
- queue
- own (deprecated)
default: 0
validations:
required: true
- type: dropdown
id: hosting
attributes:
label: Hosting
options:
- n8n cloud
- self hosted
default: 0
validations:
required: true

View File

@@ -0,0 +1,11 @@
blank_issues_enabled: false
contact_links:
- name: Feature request
url: https://community.n8n.io
about: Suggest an idea for this project
- name: Question / Problem
url: https://community.n8n.io
about: Questions and problems with n8n
- name: n8n Security Vulnerability
url: https://n8n.io/legal/#vulnerability
about: Learn about our Vulnerability Disclosure Policy

4
n8n-n8n-1.109.2/.github/actionlint.yaml vendored Executable file
View File

@@ -0,0 +1,4 @@
self-hosted-runner:
labels:
- blacksmith-2vcpu-ubuntu-2204
- blacksmith-4vcpu-ubuntu-2204

View File

@@ -0,0 +1,58 @@
name: 'Setup Environment and Build Project'
description: 'Sets up Node.js with pnpm, installs dependencies, (optional) enables Turborepo caching, and (optional) builds the project.'
inputs:
node-version:
description: 'Node.js version to use.'
required: false
default: '22.x'
enable-caching:
description: 'Flag to enable/disable all caching.'
required: false
default: 'true'
cache-suffix:
description: 'Suffix to add to the cache key.'
required: false
default: 'base:build'
skip-build:
description: 'Skip the build step, useful when restoring cached artifacts.'
required: false
default: 'false'
cache-paths:
description: 'Paths to cache. Defaults to dist folders.'
required: false
default: './packages/**/dist'
runs:
using: 'composite'
steps:
- name: Setup pnpm CLI
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
with:
run_install: false
- name: Setup Node.js
uses: useblacksmith/setup-node@65c6ca86fdeb0ab3d85e78f57e4f6a7e4780b391 # v5.0.4
with:
node-version: ${{ inputs.node-version }}
cache: pnpm
- name: Install dependencies
run: pnpm install --frozen-lockfile
shell: bash
- name: Configure Turborepo Cache
if: inputs.enable-caching == 'true'
uses: useblacksmith/caching-for-turbo@v1
- name: Build packages
if: inputs.skip-build == 'false'
run: pnpm build
shell: bash
- name: Cache artifacts
if: inputs.enable-caching == 'true' && inputs.skip-build == 'false'
uses: useblacksmith/cache@c5fe29eb0efdf1cf4186b9f7fcbbcbc0cf025662 # v5.0.2
with:
path: ${{ inputs.cache-paths }}
key: ${{ github.sha }}-${{ inputs.cache-suffix }}

View File

@@ -0,0 +1,42 @@
name: 'Blacksmith Node.js Build Setup'
description: 'Configures Node.js with pnpm, installs dependencies, enables Turborepo caching, (optional) sets up Docker layer caching, and builds the project or an optional command.'
inputs:
node-version:
description: 'Node.js version to use. Uses latest 22.x by default.'
required: false
default: '22.x'
enable-docker-cache:
description: 'Whether to set up Blacksmith Buildx for Docker layer caching.'
required: false
default: 'false'
type: boolean
build-command:
description: 'Command to execute for building the project or an optional command. Leave empty to skip build step.'
required: false
default: 'pnpm build'
type: string
runs:
using: 'composite'
steps:
- name: Setup Node.js
uses: useblacksmith/setup-node@65c6ca86fdeb0ab3d85e78f57e4f6a7e4780b391 # v5.0.4
with:
node-version: ${{ inputs.node-version }}
- name: Setup pnpm and Install Dependencies
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.0.0
with:
run_install: true
- name: Configure Turborepo Cache
uses: useblacksmith/caching-for-turbo@bafb57e7ebdbf1185762286ec94d24648cd3938a # v1
- name: Setup Blacksmith Buildx for Docker Cache
if: ${{ inputs.enable-docker-cache == 'true' }}
uses: useblacksmith/build-push-action@574eb0ee0b59c6a687ace24192f0727dfb65d6d7 # v1.2.0
- name: Build Project
run: ${{ inputs.build-command }}
shell: bash

43
n8n-n8n-1.109.2/.github/docker-compose.yml vendored Executable file
View File

@@ -0,0 +1,43 @@
services:
mariadb:
image: mariadb:10.5
environment:
- MARIADB_DATABASE=n8n
- MARIADB_ROOT_PASSWORD=password
- MARIADB_MYSQL_LOCALHOST_USER=true
ports:
- 3306:3306
tmpfs:
- /var/lib/mysql
mysql-8.0.13:
image: mysql:8.0.13
environment:
- MYSQL_DATABASE=n8n
- MYSQL_ROOT_PASSWORD=password
ports:
- 3306:3306
tmpfs:
- /var/lib/mysql
mysql-8.4:
image: mysql:8.4
environment:
- MYSQL_DATABASE=n8n
- MYSQL_ROOT_PASSWORD=password
ports:
- 3306:3306
tmpfs:
- /var/lib/mysql
postgres:
image: postgres:16
restart: always
environment:
- POSTGRES_DB=n8n
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=password
ports:
- 5432:5432
tmpfs:
- /var/lib/postgresql/data

View File

@@ -0,0 +1,29 @@
## Summary
<!--
Describe what the PR does and how to test.
Photos and videos are recommended.
-->
## Related Linear tickets, Github issues, and Community forum posts
<!--
Include links to **Linear ticket** or Github issue or Community forum post.
Important in order to close *automatically* and provide context to reviewers.
https://linear.app/n8n/issue/
-->
<!-- Use "closes #<issue-number>", "fixes #<issue-number>", or "resolves #<issue-number>" to automatically close issues when the PR is merged. -->
## Review / Merge checklist
- [ ] PR title and summary are descriptive. ([conventions](../blob/master/.github/pull_request_title_conventions.md)) <!--
**Remember, the title automatically goes into the changelog.
Use `(no-changelog)` otherwise.**
-->
- [ ] [Docs updated](https://github.com/n8n-io/n8n-docs) or follow-up ticket created.
- [ ] Tests included. <!--
A bug is not considered fixed, unless a test is added to prevent it from happening again.
A feature is not complete without tests.
-->
- [ ] PR Labeled with `release/backport` (if the PR is an urgent fix that needs to be backported)

View File

@@ -0,0 +1,116 @@
# PR Title Convention
We have very precise rules over how Pull Requests (to the `master` branch) must be formatted. This format basically follows the [Angular Commit Message Convention](https://github.com/angular/angular/blob/master/CONTRIBUTING.md#commit). It leads to easier to read commit history and allows for automated generation of release notes:
A PR title consists of these elements:
```text
<type>(<scope>): <summary>
│ │ │
│ │ └─⫸ Summary: In imperative present tense.
| | Capitalized
| | No period at the end.
│ │
│ └─⫸ Scope: API | benchmark | core | editor | * Node
└─⫸ Type: build | ci | chore | docs | feat | fix | perf | refactor | test
```
- PR title
- type
- scope (_optional_)
- summary
- PR description
- body (optional)
- blank line
- footer (optional)
The structure looks like this:
## Type
Must be one of the following:
| type | description | appears in changelog |
| --- | --- | --- |
| `feat` | A new feature | ✅ |
| `fix` | A bug fix | ✅ |
| `perf` | A code change that improves performance | ✅ |
| `test` | Adding missing tests or correcting existing tests | ❌ |
| `docs` | Documentation only changes | ❌ |
| `refactor` | A behavior-neutral code change that neither fixes a bug nor adds a feature | ❌ |
| `build` | Changes that affect the build system or external dependencies (TypeScript, Jest, pnpm, etc.) | ❌ |
| `ci` | Changes to CI configuration files and scripts (e.g. Github actions) | ❌ |
| `chore` | Routine tasks, maintenance, and minor updates not covered by other types | ❌ |
> BREAKING CHANGES (see Footer section below), will **always** appear in the changelog unless suffixed with `no-changelog`.
## Scope (optional)
The scope should specify the place of the commit change as long as the commit clearly addresses one of the following supported scopes. (Otherwise, omit the scope!)
- `API` - changes to the _public_ API
- `benchmark` - changes to the benchmark cli
- `core` - changes to the core / private API / backend of n8n
- `editor` - changes to the Editor UI
- `* Node` - changes to a specific node or trigger node (”`*`” to be replaced with the node name, not its display name), e.g.
- mattermost → Mattermost Node
- microsoftToDo → Microsoft To Do Node
- n8n → n8n Node
## Summary
The summary contains succinct description of the change:
- use the imperative, present tense: "change" not "changed" nor "changes"
- capitalize the first letter
- _no_ dot (.) at the end
- do _not_ include Linear ticket IDs etc. (e.g. N8N-1234)
- suffix with “(no-changelog)” for commits / PRs that should not get mentioned in the changelog.
## Body (optional)
Just as in the **summary**, use the imperative, present tense: "change" not "changed" nor "changes". The body should include the motivation for the change and contrast this with previous behavior.
## Footer (optional)
The footer can contain information about breaking changes and deprecations and is also the place to [reference GitHub issues](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword), Linear tickets, and other PRs that this commit closes or is related to. For example:
```text
BREAKING CHANGE: <breaking change summary>
<BLANK LINE>
<breaking change description + migration instructions>
<BLANK LINE>
<BLANK LINE>
Fixes #<issue number>
```
or
```text
DEPRECATED: <what is deprecated>
<BLANK LINE>
<deprecation description + recommended update path>
<BLANK LINE>
<BLANK LINE>
Closes #<pr number>
```
A Breaking Change section should start with the phrase "`BREAKING CHANGE:` " followed by a summary of the breaking change, a blank line, and a detailed description of the breaking change that also includes migration instructions.
> 💡 A breaking change can additionally also be marked by adding a “`!`” to the header, right before the “`:`”, e.g. `feat(editor)!: Remove support for dark mode`
>
> This makes locating breaking changes easier when just skimming through commit messages.
> 💡 The breaking changes must also be added to the [packages/cli/BREAKING-CHANGES.md](https://github.com/n8n-io/n8n/blob/master/packages/cli/BREAKING-CHANGES.md) file located in the n8n repository.
Similarly, a Deprecation section should start with "`DEPRECATED:` " followed by a short description of what is deprecated, a blank line, and a detailed description of the deprecation that also mentions the recommended update path.
### Revert commits
If the commit reverts a previous commit, it should begin with `revert:` , followed by the header of the reverted commit.
The content of the commit message body should contain:
- information about the SHA of the commit being reverted in the following format: `This reverts commit <SHA>`,
- a clear description of the reason for reverting the commit message.

View File

@@ -0,0 +1,55 @@
import semver from 'semver';
import { writeFile, readFile } from 'fs/promises';
import { resolve } from 'path';
import child_process from 'child_process';
import { promisify } from 'util';
import assert from 'assert';
const exec = promisify(child_process.exec);
const rootDir = process.cwd();
const releaseType = process.env.RELEASE_TYPE;
assert.match(releaseType, /^(patch|minor|major)$/, 'Invalid RELEASE_TYPE');
// TODO: if releaseType is `auto` determine release type based on the changelog
const lastTag = (await exec('git describe --tags --match "n8n@*" --abbrev=0')).stdout.trim();
const packages = JSON.parse((await exec('pnpm ls -r --only-projects --json')).stdout);
const packageMap = {};
for (let { name, path, version, private: isPrivate, dependencies } of packages) {
if (isPrivate && path !== rootDir) continue;
if (path === rootDir) name = 'monorepo-root';
const isDirty = await exec(`git diff --quiet HEAD ${lastTag} -- ${path}`)
.then(() => false)
.catch((error) => true);
packageMap[name] = { path, isDirty, version };
}
assert.ok(
Object.values(packageMap).some(({ isDirty }) => isDirty),
'No changes found since the last release',
);
// Keep the monorepo version up to date with the released version
packageMap['monorepo-root'].version = packageMap['n8n'].version;
for (const packageName in packageMap) {
const { path, version, isDirty } = packageMap[packageName];
const packageFile = resolve(path, 'package.json');
const packageJson = JSON.parse(await readFile(packageFile, 'utf-8'));
packageJson.version = packageMap[packageName].nextVersion =
isDirty ||
Object.keys(packageJson.dependencies || {}).some(
(dependencyName) => packageMap[dependencyName]?.isDirty,
)
? semver.inc(version, releaseType)
: version;
await writeFile(packageFile, JSON.stringify(packageJson, null, 2) + '\n');
}
console.log(packageMap['n8n'].nextVersion);

View File

@@ -0,0 +1,44 @@
import { writeFile, readFile, copyFile } from 'fs/promises';
import { resolve, dirname } from 'path';
import child_process from 'child_process';
import { fileURLToPath } from 'url';
import { promisify } from 'util';
const exec = promisify(child_process.exec);
const commonFiles = ['LICENSE.md', 'LICENSE_EE.md'];
const baseDir = resolve(dirname(fileURLToPath(import.meta.url)), '../..');
const packages = JSON.parse((await exec('pnpm ls -r --only-projects --json')).stdout);
for (let { name, path, version, private: isPrivate } of packages) {
if (isPrivate) continue;
const packageFile = resolve(path, 'package.json');
const packageJson = {
...JSON.parse(await readFile(packageFile, 'utf-8')),
// Add these fields to all published package.json files to ensure provenance checks pass
license: 'SEE LICENSE IN LICENSE.md',
homepage: 'https://n8n.io',
author: {
name: 'Jan Oberhauser',
email: 'jan@n8n.io',
},
repository: {
type: 'git',
url: 'git+https://github.com/n8n-io/n8n.git',
},
};
// Copy over LICENSE.md and LICENSE_EE.md into every published package, and ensure they get included in the published package
await Promise.all(
commonFiles.map(async (file) => {
await copyFile(resolve(baseDir, file), resolve(path, file));
if (packageJson.files && !packageJson.files.includes(file)) {
packageJson.files.push(file);
}
}),
);
await writeFile(packageFile, JSON.stringify(packageJson, null, 2) + '\n');
}

12
n8n-n8n-1.109.2/.github/scripts/package.json vendored Executable file
View File

@@ -0,0 +1,12 @@
{
"dependencies": {
"cacheable-lookup": "6.1.0",
"conventional-changelog": "^4.0.0",
"debug": "4.3.4",
"glob": "10.3.10",
"p-limit": "3.1.0",
"picocolors": "1.0.1",
"semver": "7.5.4",
"tempfile": "5.0.0"
}
}

View File

@@ -0,0 +1,18 @@
const { writeFileSync } = require('fs');
const { resolve } = require('path');
const baseDir = resolve(__dirname, '../..');
const trimPackageJson = (packageName) => {
const filePath = resolve(baseDir, 'packages', packageName, 'package.json');
const { scripts, peerDependencies, devDependencies, dependencies, ...packageJson } = require(
filePath,
);
if (packageName === 'frontend/@n8n/chat') {
packageJson.dependencies = dependencies;
}
writeFileSync(filePath, JSON.stringify(packageJson, null, 2) + '\n', 'utf-8');
};
trimPackageJson('frontend/@n8n/chat');
trimPackageJson('frontend/@n8n/design-system');
trimPackageJson('frontend/editor-ui');

View File

@@ -0,0 +1,39 @@
import createTempFile from 'tempfile';
import conventionalChangelog from 'conventional-changelog';
import { resolve } from 'path';
import { createReadStream, createWriteStream } from 'fs';
import { dirname } from 'path';
import { fileURLToPath } from 'url';
import { pipeline } from 'stream/promises';
import packageJson from '../../package.json' with { type: 'json' };
const baseDir = resolve(dirname(fileURLToPath(import.meta.url)), '../..');
const fullChangelogFile = resolve(baseDir, 'CHANGELOG.md');
const versionChangelogFile = resolve(baseDir, `CHANGELOG-${packageJson.version}.md`);
const changelogStream = conventionalChangelog({
preset: 'angular',
releaseCount: 1,
tagPrefix: 'n8n@',
transform: (commit, callback) => {
const hasNoChangelogInHeader = commit.header.includes('(no-changelog)');
const isBenchmarkScope = commit.scope === 'benchmark';
// Ignore commits that have 'benchmark' scope or '(no-changelog)' in the header
callback(null, hasNoChangelogInHeader || isBenchmarkScope ? undefined : commit);
},
}).on('error', (err) => {
console.error(err.stack);
process.exit(1);
});
// Write the new changelog to a new temporary file, so that the contents can be used in the PR description
await pipeline(changelogStream, createWriteStream(versionChangelogFile));
// Since we can't read and write from the same file at the same time,
// we use a temporary file to output the updated changelog to.
const tmpFile = createTempFile();
const tmpStream = createWriteStream(tmpFile);
await pipeline(createReadStream(versionChangelogFile), tmpStream, { end: false });
await pipeline(createReadStream(fullChangelogFile), tmpStream);
await pipeline(createReadStream(tmpFile), createWriteStream(fullChangelogFile));

View File

@@ -0,0 +1,90 @@
#!/usr/bin/env node
const packages = ['nodes-base', '@n8n/nodes-langchain'];
const concurrency = 20;
let exitCode = 0;
const debug = require('debug')('n8n');
const path = require('path');
const https = require('https');
const glob = require('glob');
const pLimit = require('p-limit');
const picocolors = require('picocolors');
const Lookup = require('cacheable-lookup').default;
const agent = new https.Agent({ keepAlive: true, keepAliveMsecs: 5000 });
new Lookup().install(agent);
const limiter = pLimit(concurrency);
const validateUrl = async (packageName, kind, type) =>
new Promise((resolve, reject) => {
const name = type.displayName;
const documentationUrl =
kind === 'credentials'
? type.documentationUrl
: type.codex?.resources?.primaryDocumentation?.[0]?.url;
if (!documentationUrl) resolve([name, null]);
const url = new URL(
/^https?:\/\//.test(documentationUrl)
? documentationUrl
: `https://docs.n8n.io/integrations/builtin/${kind}/${documentationUrl.toLowerCase()}/`,
);
https
.request(
{
hostname: url.hostname,
port: 443,
path: url.pathname,
method: 'HEAD',
agent,
},
(res) => {
debug(picocolors.green('✓'), packageName, kind, name);
resolve([name, res.statusCode]);
},
)
.on('error', (e) => {
debug(picocolors.red('✘'), packageName, kind, name);
reject(e);
})
.end();
});
const checkLinks = async (packageName, kind) => {
const baseDir = path.resolve(__dirname, '../../packages', packageName);
let types = require(path.join(baseDir, `dist/types/${kind}.json`));
if (kind === 'nodes')
types = types.filter(
({ codex, hidden }) => !!codex?.resources?.primaryDocumentation && !hidden,
);
debug(packageName, kind, types.length);
const statuses = await Promise.all(
types.map((type) =>
limiter(() => {
return validateUrl(packageName, kind, type);
}),
),
);
const missingDocs = [];
const invalidUrls = [];
for (const [name, statusCode] of statuses) {
if (statusCode === null) missingDocs.push(name);
if (statusCode !== 200) invalidUrls.push(name);
}
if (missingDocs.length)
console.log('Documentation URL missing in %s for %s', packageName, kind, missingDocs);
if (invalidUrls.length)
console.log('Documentation URL invalid in %s for %s', packageName, kind, invalidUrls);
if (missingDocs.length || invalidUrls.length) exitCode = 1;
};
(async () => {
for (const packageName of packages) {
await Promise.all([checkLinks(packageName, 'credentials'), checkLinks(packageName, 'nodes')]);
if (exitCode !== 0) process.exit(exitCode);
}
})();

View File

@@ -0,0 +1,46 @@
name: Destroy Benchmark Env
on:
schedule:
- cron: '0 5 * * *'
workflow_dispatch:
permissions:
id-token: write
contents: read
concurrency:
group: benchmark
cancel-in-progress: false
jobs:
build:
runs-on: ubuntu-latest
environment: benchmarking
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Azure login
uses: azure/login@6c251865b4e6290e7b78be643ea2d005bc51f69a # v2.1.1
with:
client-id: ${{ secrets.BENCHMARK_ARM_CLIENT_ID }}
tenant-id: ${{ secrets.BENCHMARK_ARM_TENANT_ID }}
subscription-id: ${{ secrets.BENCHMARK_ARM_SUBSCRIPTION_ID }}
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: 22.x
- name: Setup corepack and pnpm
run: |
npm i -g corepack@0.33
corepack enable
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Destroy cloud env
run: pnpm destroy-cloud-env
working-directory: packages/@n8n/benchmark

View File

@@ -0,0 +1,104 @@
name: Run Nightly Benchmark
run-name: Benchmark ${{ inputs.n8n_tag || 'nightly' }}
on:
schedule:
- cron: '30 1,2,3 * * *'
workflow_dispatch:
inputs:
debug:
description: 'Use debug logging'
required: true
default: 'false'
n8n_tag:
description: 'Name of the n8n docker tag to run the benchmark against.'
required: true
default: 'nightly'
benchmark_tag:
description: 'Name of the benchmark cli docker tag to run the benchmark with.'
required: true
default: 'latest'
env:
ARM_CLIENT_ID: ${{ secrets.BENCHMARK_ARM_CLIENT_ID }}
ARM_SUBSCRIPTION_ID: ${{ secrets.BENCHMARK_ARM_SUBSCRIPTION_ID }}
ARM_TENANT_ID: ${{ secrets.BENCHMARK_ARM_TENANT_ID }}
N8N_TAG: ${{ inputs.n8n_tag || 'nightly' }}
N8N_BENCHMARK_TAG: ${{ inputs.benchmark_tag || 'latest' }}
DEBUG: ${{ inputs.debug == 'true' && '--debug' || '' }}
permissions:
id-token: write
contents: read
concurrency:
group: benchmark
cancel-in-progress: false
jobs:
build:
runs-on: ubuntu-latest
environment: benchmarking
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3
with:
terraform_version: '1.8.5'
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: 22.x
- name: Setup corepack and pnpm
run: |
npm i -g corepack@0.33
corepack enable
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Azure login
uses: azure/login@6c251865b4e6290e7b78be643ea2d005bc51f69a # v2.1.1
with:
client-id: ${{ env.ARM_CLIENT_ID }}
tenant-id: ${{ env.ARM_TENANT_ID }}
subscription-id: ${{ env.ARM_SUBSCRIPTION_ID }}
- name: Destroy any existing environment
run: pnpm destroy-cloud-env
working-directory: packages/@n8n/benchmark
- name: Provision the environment
run: pnpm provision-cloud-env ${{ env.DEBUG }}
working-directory: packages/@n8n/benchmark
- name: Run the benchmark
env:
BENCHMARK_RESULT_WEBHOOK_URL: ${{ secrets.BENCHMARK_RESULT_WEBHOOK_URL }}
BENCHMARK_RESULT_WEBHOOK_AUTH_HEADER: ${{ secrets.BENCHMARK_RESULT_WEBHOOK_AUTH_HEADER }}
N8N_LICENSE_CERT: ${{ secrets.N8N_BENCHMARK_LICENSE_CERT }}
run: |
pnpm benchmark-in-cloud \
--vus 5 \
--duration 1m \
--n8nTag ${{ env.N8N_TAG }} \
--benchmarkTag ${{ env.N8N_BENCHMARK_TAG }} \
${{ env.DEBUG }}
working-directory: packages/@n8n/benchmark
# We need to login again because the access token expires
- name: Azure login
if: always()
uses: azure/login@6c251865b4e6290e7b78be643ea2d005bc51f69a # v2.1.1
with:
client-id: ${{ env.ARM_CLIENT_ID }}
tenant-id: ${{ env.ARM_TENANT_ID }}
subscription-id: ${{ env.ARM_SUBSCRIPTION_ID }}
- name: Destroy the environment
if: always()
run: pnpm destroy-cloud-env ${{ env.DEBUG }}
working-directory: packages/@n8n/benchmark

View File

@@ -0,0 +1,37 @@
name: Check Documentation URLs
on:
release:
types: [published]
schedule:
- cron: '0 0 * * *'
workflow_dispatch:
jobs:
check-docs-urls:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Setup Node.js
uses: n8n-io/n8n/.github/actions/setup-nodejs-blacksmith@f5fbbbe0a28a886451c886cac6b49192a39b0eea # v1.104.1
with:
build-command: turbo build --filter=*nodes*
- run: npm install --prefix=.github/scripts --no-package-lock
- name: Test URLs
run: node .github/scripts/validate-docs-links.js
- name: Notify Slack on failure
uses: act10ns/slack@44541246747a30eb3102d87f7a4cc5471b0ffb7d # v2.1.0
if: failure()
with:
status: ${{ job.status }}
channel: '#alerts-build'
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
message: |
<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}| Documentation URLs check failed >

View File

@@ -0,0 +1,20 @@
name: Check PR title
on:
pull_request:
types:
- opened
- edited
- synchronize
branches:
- 'master'
jobs:
check-pr-title:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: Validate PR title
uses: n8n-io/validate-n8n-pull-request-title@c97ff722ac14ee0bda73766473bba764445db805 # v2.2.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -0,0 +1,111 @@
# Determines if conditions are met for running subsequent jobs on a Pull Request.
#
# !! IMPORTANT !!
# This workflow RELIES on being called from a parent workflow triggered by
# a `pull_request` or `pull_request_target` event. It uses `github.event`
# to access PR details.
#
# It checks if all the following conditions are TRUE:
# 1. The PR is NOT from a fork (i.e., it's an internal PR).
# 2. The PR has been approved by a maintainer (`is_pr_approved_by_maintainer`).
# 3. The PR's source branch does NOT match an excluded pattern.
# 4. The PR includes relevant file changes (`paths_filter_patterns`).
#
# It outputs `should_run` as 'true' if ALL conditions pass, 'false' otherwise.
name: PR Eligibility Check
on:
workflow_call:
inputs:
is_pr_approved_by_maintainer:
required: true
type: boolean
paths_filter_patterns:
description: "Path filter patterns for 'paths-filter-action'."
required: false
type: string
default: |
not_ignored:
- '!.devcontainer/**'
- '!.github/*'
- '!.github/scripts/*'
- '!.github/workflows/benchmark-*'
- '!.github/workflows/check-*'
- '!.vscode/**'
- '!docker/**'
- '!packages/@n8n/benchmark/**'
- '!packages/@n8n/task-runner-python/**'
- '!**/*.md'
excluded_source_branch_patterns:
description: 'Newline-separated list of glob patterns for source branches to EXCLUDE.'
required: false
type: string
default: |
release/*
master
outputs:
should_run:
description: "Outputs 'true' if all eligibility checks pass, otherwise 'false'."
value: ${{ jobs.evaluate_conditions.outputs.run_decision }}
jobs:
evaluate_conditions:
runs-on: ubuntu-latest
outputs:
run_decision: ${{ steps.evaluate.outputs.should_run }}
steps:
- name: Check out current commit
uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Determine changed files
uses: tomi/paths-filter-action@32c62f5ca100c1110406e3477d5b3ecef4666fec # v3.0.2
id: changed
with:
filters: ${{ inputs.paths_filter_patterns }}
predicate-quantifier: 'every'
- name: Evaluate Conditions & Set Output
id: evaluate
env:
IS_FORK: ${{ github.event.pull_request.head.repo.fork }}
IS_APPROVED: ${{ inputs.is_pr_approved_by_maintainer }}
FILES_CHANGED: ${{ steps.changed.outputs.not_ignored == 'true' }}
HEAD_REF: ${{ github.event.pull_request.head.ref }}
EXCLUDED_PATTERNS: ${{ inputs.excluded_source_branch_patterns }}
run: |
if [[ "$IS_FORK" == "true" ]]; then
is_community="true"
else
is_community="false"
fi
source_branch_excluded="false"
while IFS= read -r pattern; do
# shellcheck disable=SC2053
if [[ -n "$pattern" && "$HEAD_REF" == $pattern ]]; then
source_branch_excluded="true"
break
fi
done <<< "$EXCLUDED_PATTERNS"
echo "--- Checking Conditions ---"
echo "Is NOT Community PR: $([[ "$is_community" == "false" ]] && echo true || echo false)"
echo "Files Changed: $FILES_CHANGED"
echo "Source Branch Excluded: $source_branch_excluded"
echo "Is Approved: $IS_APPROVED"
echo "-------------------------"
if [[ "$is_community" == "false" && \
"$FILES_CHANGED" == "true" && \
"$source_branch_excluded" == "false" && \
"$IS_APPROVED" == "true" ]]; then
echo "Decision: Conditions met. Setting should_run=true."
echo "should_run=true" >> "$GITHUB_OUTPUT"
else
echo "Decision: Conditions not met. Setting should_run=false."
echo "should_run=false" >> "$GITHUB_OUTPUT"
fi

View File

@@ -0,0 +1,98 @@
name: Chromatic
on:
schedule:
- cron: '0 0 * * *'
workflow_dispatch:
pull_request_review:
types: [submitted]
concurrency:
group: chromatic-${{ github.event.pull_request.number || github.ref }}-${{github.event.review.state}}
cancel-in-progress: true
jobs:
get-metadata:
name: Get Metadata
runs-on: ubuntu-latest
if: github.event.review.state == 'approved'
steps:
- name: Check out current commit
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 2
- name: Determine changed files
uses: tomi/paths-filter-action@32c62f5ca100c1110406e3477d5b3ecef4666fec # v3.0.2
id: changed
if: github.event_name == 'pull_request_review'
with:
filters: |
design_system:
- packages/design-system/**
- .github/workflows/chromatic.yml
outputs:
design_system_files_changed: ${{ steps.changed.outputs.design_system == 'true' }}
is_community_pr: ${{ contains(github.event.pull_request.labels.*.name, 'community') }}
is_pr_target_master: ${{ github.event.pull_request.base.ref == 'master' }}
is_dispatch: ${{ github.event_name == 'workflow_dispatch' }}
is_pr_approved: ${{ github.event.review.state == 'approved' }}
chromatic:
needs: [get-metadata]
if: |
needs.get-metadata.outputs.is_dispatch == 'true' ||
(
needs.get-metadata.outputs.design_system_files_changed == 'true' &&
needs.get-metadata.outputs.is_community_pr == 'false' &&
needs.get-metadata.outputs.is_pr_target_master == 'true' &&
needs.get-metadata.outputs.is_pr_approved == 'true'
)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 0
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: 22.x
- name: Setup corepack and pnpm
run: |
npm i -g corepack@0.33
corepack enable
- run: pnpm install --frozen-lockfile
- name: Publish to Chromatic
uses: chromaui/action@1cfa065cbdab28f6ca3afaeb3d761383076a35aa # v11
id: chromatic_tests
continue-on-error: true
with:
workingDir: packages/design-system
onlyChanged: true
projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
exitZeroOnChanges: false
- name: Success comment
if: steps.chromatic_tests.outcome == 'success' && github.ref != 'refs/heads/master'
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
token: ${{ secrets.GITHUB_TOKEN }}
edit-mode: replace
body: |
:white_check_mark: No visual regressions found.
- name: Fail comment
if: steps.chromatic_tests.outcome != 'success' && github.ref != 'refs/heads/master'
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
token: ${{ secrets.GITHUB_TOKEN }}
edit-mode: replace
body: |
[:warning: Visual regressions found](${{steps.chromatic_tests.outputs.url}}): ${{steps.chromatic_tests.outputs.changeCount}}

View File

@@ -0,0 +1,42 @@
name: Test Master
on:
push:
branches:
- master
paths-ignore:
- packages/@n8n/task-runner-python/**
jobs:
unit-test:
name: Unit tests
uses: ./.github/workflows/units-tests-reusable.yml
strategy:
matrix:
node-version: [20.x, 22.x, 24.3.x]
with:
ref: ${{ github.sha }}
nodeVersion: ${{ matrix.node-version }}
collectCoverage: ${{ matrix.node-version == '22.x' }}
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
lint:
name: Lint
uses: ./.github/workflows/linting-reusable.yml
with:
ref: ${{ github.sha }}
notify-on-failure:
name: Notify Slack on failure
runs-on: ubuntu-latest
needs: [unit-test, lint]
steps:
- name: Notify Slack on failure
uses: act10ns/slack@44541246747a30eb3102d87f7a4cc5471b0ffb7d # v2.1.0
if: failure()
with:
status: ${{ job.status }}
channel: '#alerts-build'
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
message: Master branch (build or test or lint) failed (${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})

View File

@@ -0,0 +1,146 @@
name: Test Postgres and MySQL schemas
on:
schedule:
- cron: '0 0 * * *'
workflow_dispatch:
pull_request:
paths:
- packages/cli/src/databases/**
- packages/cli/src/modules/*/database/**
- packages/cli/src/modules/**/*.entity.ts
- packages/cli/src/modules/**/*.repository.ts
- packages/cli/test/integration/**
- packages/cli/test/shared/db/**
- packages/@n8n/db/**
- packages/cli/**/__tests__/**
- .github/workflows/ci-postgres-mysql.yml
- .github/docker-compose.yml
pull_request_review:
types: [submitted]
concurrency:
group: db-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: false
env:
NODE_OPTIONS: '--max-old-space-size=3072'
jobs:
build:
name: Install & Build
runs-on: blacksmith-2vcpu-ubuntu-2204
if: github.event_name != 'pull_request_review' || startsWith(github.event.pull_request.base.ref, 'release/')
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Setup and Build
uses: n8n-io/n8n/.github/actions/setup-nodejs-blacksmith@f5fbbbe0a28a886451c886cac6b49192a39b0eea # v1.104.1
sqlite-pooled:
name: SQLite Pooled
needs: build
runs-on: blacksmith-2vcpu-ubuntu-2204
timeout-minutes: 20
env:
DB_TYPE: sqlite
DB_SQLITE_POOL_SIZE: 4
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Setup and Build
uses: n8n-io/n8n/.github/actions/setup-nodejs-blacksmith@f5fbbbe0a28a886451c886cac6b49192a39b0eea # v1.104.1
- name: Test SQLite Pooled
working-directory: packages/cli
run: pnpm jest
mariadb:
name: MariaDB
needs: build
runs-on: blacksmith-2vcpu-ubuntu-2204
timeout-minutes: 20
env:
DB_MYSQLDB_PASSWORD: password
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Setup and Build
uses: n8n-io/n8n/.github/actions/setup-nodejs-blacksmith@f5fbbbe0a28a886451c886cac6b49192a39b0eea # v1.104.1
- name: Start MariaDB
uses: isbang/compose-action@802a148945af6399a338c7906c267331b39a71af # v2.0.0
with:
compose-file: ./.github/docker-compose.yml
services: |
mariadb
- name: Test MariaDB
working-directory: packages/cli
run: pnpm test:mariadb --testTimeout 120000
mysql:
name: MySQL (${{ matrix.service-name }})
needs: build
runs-on: blacksmith-2vcpu-ubuntu-2204
timeout-minutes: 20
strategy:
matrix:
service-name: ['mysql-8.0.13', 'mysql-8.4']
env:
DB_MYSQLDB_PASSWORD: password
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Setup and Build
uses: n8n-io/n8n/.github/actions/setup-nodejs-blacksmith@f5fbbbe0a28a886451c886cac6b49192a39b0eea # v1.104.1
- name: Start MySQL
uses: isbang/compose-action@802a148945af6399a338c7906c267331b39a71af # v2.0.0
with:
compose-file: ./.github/docker-compose.yml
services: |
${{ matrix.service-name }}
- name: Test MySQL
working-directory: packages/cli
run: pnpm test:mysql --testTimeout 120000
postgres:
name: Postgres
needs: build
runs-on: blacksmith-2vcpu-ubuntu-2204
timeout-minutes: 20
env:
DB_POSTGRESDB_PASSWORD: password
DB_POSTGRESDB_POOL_SIZE: 1 # Detect connection pooling deadlocks
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Setup and Build
uses: n8n-io/n8n/.github/actions/setup-nodejs-blacksmith@f5fbbbe0a28a886451c886cac6b49192a39b0eea # v1.104.1
- name: Start Postgres
uses: isbang/compose-action@802a148945af6399a338c7906c267331b39a71af # v2.0.0
with:
compose-file: ./.github/docker-compose.yml
services: |
postgres
- name: Test Postgres
working-directory: packages/cli
run: pnpm test:postgres
notify-on-failure:
name: Notify Slack on failure
runs-on: ubuntu-latest
needs: [mariadb, postgres, mysql]
steps:
- name: Notify Slack on failure
uses: act10ns/slack@44541246747a30eb3102d87f7a4cc5471b0ffb7d # v2.1.0
if: failure() && github.ref == 'refs/heads/master'
with:
status: ${{ job.status }}
channel: '#alerts-build'
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
message: Postgres, MariaDB or MySQL tests failed (${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})

View File

@@ -0,0 +1,106 @@
name: Build, unit test and lint branch
on:
pull_request:
branches:
- '**'
- '!release/*'
jobs:
install-and-build:
name: Install & Build
runs-on: blacksmith-2vcpu-ubuntu-2204
env:
NODE_OPTIONS: '--max-old-space-size=3072'
outputs:
frontend_changed: ${{ steps.paths-filter.outputs.frontend == 'true' }}
non_python_changed: ${{ steps.paths-filter.outputs.non-python == 'true' }}
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: refs/pull/${{ github.event.pull_request.number }}/merge
- name: Check for frontend changes
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
id: paths-filter
with:
filters: |
frontend:
- packages/frontend/**
- packages/@n8n/design-system/**
- packages/@n8n/chat/**
- packages/@n8n/codemirror-lang/**
- .bundlemonrc.json
- .github/workflows/ci-pull-requests.yml
non-python:
- '**'
- '!packages/@n8n/task-runner-python/**'
- name: Setup and Build
if: steps.paths-filter.outputs.non-python == 'true'
uses: n8n-io/n8n/.github/actions/setup-nodejs-blacksmith@f5fbbbe0a28a886451c886cac6b49192a39b0eea # v1.104.1
- name: Run format check
if: steps.paths-filter.outputs.non-python == 'true'
run: pnpm format:check
- name: Run typecheck
if: steps.paths-filter.outputs.non-python == 'true'
run: pnpm typecheck
- name: Upload Frontend Build Artifacts
if: steps.paths-filter.outputs.frontend == 'true'
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: editor-ui-dist
path: packages/frontend/editor-ui/dist/
retention-days: 1
bundle-size-check:
name: Bundle Size Check
needs: install-and-build
if: needs.install-and-build.outputs.frontend_changed == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: refs/pull/${{ github.event.pull_request.number }}/merge
- name: Setup pnpm CLI
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node.js
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: '22.x'
cache: pnpm
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Download Frontend Build Artifacts
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: editor-ui-dist
path: packages/frontend/editor-ui/dist/
- name: BundleMon
uses: lironer/bundlemon-action@cadbdd58f86faf1900725ef69d455444124b3748 # v1.3.0
unit-test:
name: Unit tests
if: needs.install-and-build.outputs.non_python_changed == 'true'
uses: ./.github/workflows/units-tests-reusable.yml
needs: install-and-build
with:
ref: refs/pull/${{ github.event.pull_request.number }}/merge
collectCoverage: true
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
lint:
name: Lint
if: needs.install-and-build.outputs.non_python_changed == 'true'
uses: ./.github/workflows/linting-reusable.yml
needs: install-and-build
with:
ref: refs/pull/${{ github.event.pull_request.number }}/merge

View File

@@ -0,0 +1,49 @@
name: Python CI
on:
pull_request:
paths:
- packages/@n8n/task-runner-python/**
- .github/workflows/ci-python.yml
push:
paths:
- packages/@n8n/task-runner-python/**
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/@n8n/task-runner-python
steps:
- name: Check out project
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install uv
uses: astral-sh/setup-uv@d9e0f98d3fc6adb07d1e3d37f3043649ddad06a1 # 6.5.0
with:
enable-cache: true
- name: Install Python
run: uv python install 3.13
- name: Install project dependencies
run: uv sync
- name: Format check
run: uv run ruff format --check
- name: Typecheck
run: uv run ty check src/
- name: Lint
run: uv run ruff check
unit-test:
name: Unit tests
runs-on: ubuntu-latest
needs: lint
steps:
- name: Python unit tests
run: echo "Skipping unit tests for Python-only changes until we have Python unit tests"

48
n8n-n8n-1.109.2/.github/workflows/claude.yml vendored Executable file
View File

@@ -0,0 +1,48 @@
name: Claude PR Assistant
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
issues:
types: [opened, assigned]
pull_request_review:
types: [submitted]
jobs:
claude-code-action:
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
(github.event_name == 'issues' && contains(github.event.issue.body, '@claude'))
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
issues: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude PR Action
uses: anthropics/claude-code-action@beta
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
# Or use OAuth token instead:
# claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
timeout_minutes: '60'
# mode: tag # Default: responds to @claude mentions
# Optional: Restrict network access to specific domains only
# experimental_allowed_domains: |
# .anthropic.com
# .github.com
# api.github.com
# .githubusercontent.com
# bun.sh
# registry.npmjs.org
# .blob.core.windows.net

View File

@@ -0,0 +1,59 @@
name: Docker Base Image CI
on:
push:
branches:
- master
paths:
- 'docker/images/n8n-base/Dockerfile'
pull_request:
paths:
- 'docker/images/n8n-base/Dockerfile'
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node_version: ['20', '22', '24']
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Login to GitHub Container Registry
if: github.event_name == 'push'
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to DockerHub
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
context: .
file: ./docker/images/n8n-base/Dockerfile
build-args: |
NODE_VERSION=${{ matrix.node_version }}
platforms: linux/amd64,linux/arm64
provenance: ${{ github.event_name == 'push' }}
sbom: ${{ github.event_name == 'push' }}
push: ${{ github.event_name == 'push' }}
tags: |
${{ secrets.DOCKER_USERNAME }}/base:${{ matrix.node_version }}-${{ github.sha }}
${{ secrets.DOCKER_USERNAME }}/base:${{ matrix.node_version }}
ghcr.io/${{ github.repository_owner }}/base:${{ matrix.node_version }}-${{ github.sha }}
ghcr.io/${{ github.repository_owner }}/base:${{ matrix.node_version }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -0,0 +1,414 @@
# This workflow is used to build and push the Docker image for n8n
# - determine-build-context: Determines what needs to be built based on the trigger
# - build-and-push-docker: This builds on both an ARM64 and AMD64 runner so the builds are native to the platform. Uses blacksmith native runners and build-push-action
# - create_multi_arch_manifest: This creates the multi-arch manifest for the Docker image. Needed to recombine the images from the build-and-push-docker job since they are separate runners.
# - security-scan: This scans the Docker image for security vulnerabilities using Trivy.
name: 'Docker: Build and Push'
env:
NODE_OPTIONS: '--max-old-space-size=7168'
on:
schedule:
- cron: '0 0 * * *'
workflow_call:
inputs:
n8n_version:
description: 'N8N version to build'
required: true
type: string
release_type:
description: 'Release type (stable, nightly, dev)'
required: false
type: string
default: 'stable'
push_enabled:
description: 'Whether to push the built images'
required: false
type: boolean
default: true
workflow_dispatch:
inputs:
push_enabled:
description: 'Push image to registry'
required: false
type: boolean
default: true
success_url:
description: 'URL to call after the build is successful'
required: false
type: string
pull_request:
types:
- opened
- ready_for_review
paths:
- '.github/workflows/docker-build-push.yml'
- 'docker/images/n8n/Dockerfile'
jobs:
determine-build-context:
name: Determine Build Context
runs-on: ubuntu-latest
outputs:
release_type: ${{ steps.context.outputs.release_type }}
n8n_version: ${{ steps.context.outputs.n8n_version }}
push_enabled: ${{ steps.context.outputs.push_enabled }}
build_matrix: ${{ steps.matrix.outputs.matrix }}
steps:
- name: Determine build context values
id: context
run: |
# Debug info
echo "Event: ${{ github.event_name }}"
echo "Ref: ${{ github.ref }}"
echo "Ref Name: ${{ github.ref_name }}"
# Check if called by another workflow (has n8n_version input)
if [[ -n "${{ inputs.n8n_version }}" ]]; then
# workflow_call - used for releases
{
echo "release_type=${{ inputs.release_type }}"
echo "n8n_version=${{ inputs.n8n_version }}"
echo "push_enabled=${{ inputs.push_enabled }}"
} >> "$GITHUB_OUTPUT"
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
# Nightly builds
{
echo "release_type=nightly"
echo "n8n_version=snapshot"
echo "push_enabled=true"
} >> "$GITHUB_OUTPUT"
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
# Build branches for Nathan deploy
BRANCH_NAME="${{ github.ref_name }}"
# Fallback to parsing ref if ref_name is empty
if [[ -z "$BRANCH_NAME" ]] && [[ "${{ github.ref }}" =~ ^refs/heads/(.+)$ ]]; then
BRANCH_NAME="${BASH_REMATCH[1]}"
fi
# Sanitize branch name for Docker tag
SAFE_BRANCH_NAME=$(echo "$BRANCH_NAME" | tr '/' '-' | tr -cd '[:alnum:]-_')
if [[ -z "$SAFE_BRANCH_NAME" ]]; then
echo "Error: Could not determine valid branch name"
exit 1
fi
{
echo "release_type=branch"
echo "n8n_version=branch-${SAFE_BRANCH_NAME}"
echo "push_enabled=${{ inputs.push_enabled }}"
} >> "$GITHUB_OUTPUT"
elif [[ "${{ github.event_name }}" == "pull_request" ]]; then
# Direct PR triggers for testing Dockerfile changes
{
echo "release_type=dev"
echo "n8n_version=pr-${{ github.event.pull_request.number }}"
echo "push_enabled=false"
} >> "$GITHUB_OUTPUT"
fi
# Output summary for logs
echo "=== Build Context Summary ==="
echo "Release type: $(grep release_type "$GITHUB_OUTPUT" | cut -d= -f2)"
echo "N8N version: $(grep n8n_version "$GITHUB_OUTPUT" | cut -d= -f2)"
echo "Push enabled: $(grep push_enabled "$GITHUB_OUTPUT" | cut -d= -f2)"
- name: Determine build matrix
id: matrix
run: |
RELEASE_TYPE="${{ steps.context.outputs.release_type }}"
# Branch builds only need AMD64, everything else needs both platforms
if [[ "$RELEASE_TYPE" == "branch" ]]; then
MATRIX='{
"platform": ["amd64"],
"include": [{
"platform": "amd64",
"runner": "blacksmith-4vcpu-ubuntu-2204",
"docker_platform": "linux/amd64"
}]
}'
else
# All other builds (stable, nightly, dev, PR) need both platforms
MATRIX='{
"platform": ["amd64", "arm64"],
"include": [{
"platform": "amd64",
"runner": "blacksmith-4vcpu-ubuntu-2204",
"docker_platform": "linux/amd64"
}, {
"platform": "arm64",
"runner": "blacksmith-4vcpu-ubuntu-2204-arm",
"docker_platform": "linux/arm64"
}]
}'
fi
# Output matrix as single line for GITHUB_OUTPUT
echo "matrix=$(echo "$MATRIX" | jq -c .)" >> "$GITHUB_OUTPUT"
echo "Build matrix: $(echo "$MATRIX" | jq .)"
build-and-push-docker:
name: Build App, then Build and Push Docker Image (${{ matrix.platform }})
needs: determine-build-context
runs-on: ${{ matrix.runner }}
timeout-minutes: 15
strategy:
matrix: ${{ fromJSON(needs.determine-build-context.outputs.build_matrix) }}
outputs:
image_ref: ${{ steps.determine-tags.outputs.primary_ghcr_manifest_tag }}
primary_ghcr_manifest_tag: ${{ steps.determine-tags.outputs.primary_ghcr_manifest_tag }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Setup and Build
uses: n8n-io/n8n/.github/actions/setup-nodejs-blacksmith@f5fbbbe0a28a886451c886cac6b49192a39b0eea # v1.104.1
with:
build-command: pnpm build:n8n
- name: Determine Docker tags
id: determine-tags
run: |
RELEASE_TYPE="${{ needs.determine-build-context.outputs.release_type }}"
N8N_VERSION_TAG="${{ needs.determine-build-context.outputs.n8n_version }}"
GHCR_BASE="ghcr.io/${{ github.repository_owner }}/n8n"
DOCKER_BASE="${{ secrets.DOCKER_USERNAME }}/n8n"
PLATFORM="${{ matrix.platform }}"
GHCR_TAGS_FOR_PUSH=""
DOCKER_TAGS_FOR_PUSH=""
PRIMARY_GHCR_MANIFEST_TAG_VALUE=""
# Validate inputs
if [[ "$RELEASE_TYPE" == "stable" && -z "$N8N_VERSION_TAG" ]]; then
echo "Error: N8N_VERSION_TAG is empty for a stable release."
exit 1
fi
if [[ "$RELEASE_TYPE" == "branch" && -z "$N8N_VERSION_TAG" ]]; then
echo "Error: N8N_VERSION_TAG is empty for a branch release."
exit 1
fi
# Determine tags based on release type
case "$RELEASE_TYPE" in
"stable")
PRIMARY_GHCR_MANIFEST_TAG_VALUE="${GHCR_BASE}:${N8N_VERSION_TAG}"
GHCR_TAGS_FOR_PUSH="${PRIMARY_GHCR_MANIFEST_TAG_VALUE}-${PLATFORM}"
DOCKER_TAGS_FOR_PUSH="${DOCKER_BASE}:${N8N_VERSION_TAG}-${PLATFORM}"
;;
"nightly")
PRIMARY_GHCR_MANIFEST_TAG_VALUE="${GHCR_BASE}:nightly"
GHCR_TAGS_FOR_PUSH="${PRIMARY_GHCR_MANIFEST_TAG_VALUE}-${PLATFORM}"
DOCKER_TAGS_FOR_PUSH="${DOCKER_BASE}:nightly-${PLATFORM}"
;;
"branch")
PRIMARY_GHCR_MANIFEST_TAG_VALUE="${GHCR_BASE}:${N8N_VERSION_TAG}"
GHCR_TAGS_FOR_PUSH="${PRIMARY_GHCR_MANIFEST_TAG_VALUE}-${PLATFORM}"
# No Docker Hub tags for branch builds
DOCKER_TAGS_FOR_PUSH=""
;;
"dev"|*)
if [[ "$N8N_VERSION_TAG" == pr-* ]]; then
# PR builds only go to GHCR
PRIMARY_GHCR_MANIFEST_TAG_VALUE="${GHCR_BASE}:${N8N_VERSION_TAG}"
GHCR_TAGS_FOR_PUSH="${PRIMARY_GHCR_MANIFEST_TAG_VALUE}-${PLATFORM}"
DOCKER_TAGS_FOR_PUSH=""
else
# Regular dev builds go to both registries
PRIMARY_GHCR_MANIFEST_TAG_VALUE="${GHCR_BASE}:dev"
GHCR_TAGS_FOR_PUSH="${PRIMARY_GHCR_MANIFEST_TAG_VALUE}-${PLATFORM}"
DOCKER_TAGS_FOR_PUSH="${DOCKER_BASE}:dev-${PLATFORM}"
fi
;;
esac
# Combine all tags
ALL_TAGS="${GHCR_TAGS_FOR_PUSH}"
if [[ -n "$DOCKER_TAGS_FOR_PUSH" ]]; then
ALL_TAGS="${ALL_TAGS}\n${DOCKER_TAGS_FOR_PUSH}"
fi
echo "Generated Tags for push: $ALL_TAGS"
{
echo "tags<<EOF"
echo -e "$ALL_TAGS"
echo "EOF"
} >> "$GITHUB_OUTPUT"
{
echo "ghcr_platform_tag=${GHCR_TAGS_FOR_PUSH}"
echo "dockerhub_platform_tag=${DOCKER_TAGS_FOR_PUSH}"
} >> "$GITHUB_OUTPUT"
# Only output manifest tags from the first platform to avoid duplicates
if [[ "$PLATFORM" == "amd64" ]]; then
echo "primary_ghcr_manifest_tag=${PRIMARY_GHCR_MANIFEST_TAG_VALUE}" >> "$GITHUB_OUTPUT"
fi
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Login to GitHub Container Registry
if: needs.determine-build-context.outputs.push_enabled == 'true'
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to DockerHub
if: needs.determine-build-context.outputs.push_enabled == 'true' && steps.determine-tags.outputs.dockerhub_platform_tag != ''
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push Docker image
uses: useblacksmith/build-push-action@574eb0ee0b59c6a687ace24192f0727dfb65d6d7 # v1.2
with:
context: .
file: ./docker/images/n8n/Dockerfile
build-args: |
NODE_VERSION=22
N8N_VERSION=${{ needs.determine-build-context.outputs.n8n_version }}
N8N_RELEASE_TYPE=${{ needs.determine-build-context.outputs.release_type }}
platforms: ${{ matrix.docker_platform }}
provenance: true
sbom: true
push: ${{ needs.determine-build-context.outputs.push_enabled == 'true' }}
tags: ${{ steps.determine-tags.outputs.tags }}
create_multi_arch_manifest:
name: Create Multi-Arch Manifest
needs: [determine-build-context, build-and-push-docker]
runs-on: ubuntu-latest
if: |
needs.build-and-push-docker.result == 'success' &&
needs.determine-build-context.outputs.push_enabled == 'true'
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Determine Docker Hub manifest tag
id: dockerhub_check
run: |
RELEASE_TYPE="${{ needs.determine-build-context.outputs.release_type }}"
N8N_VERSION="${{ needs.determine-build-context.outputs.n8n_version }}"
DOCKER_BASE="${{ secrets.DOCKER_USERNAME }}/n8n"
# Determine if Docker Hub manifest is needed and construct the tag
case "$RELEASE_TYPE" in
"stable")
{
echo "DOCKER_MANIFEST_TAG=${DOCKER_BASE}:${N8N_VERSION}"
echo "CREATE_DOCKERHUB_MANIFEST=true"
} >> "$GITHUB_OUTPUT"
;;
"nightly")
{
echo "DOCKER_MANIFEST_TAG=${DOCKER_BASE}:nightly"
echo "CREATE_DOCKERHUB_MANIFEST=true"
} >> "$GITHUB_OUTPUT"
;;
"dev")
if [[ "$N8N_VERSION" != pr-* ]]; then
{
echo "DOCKER_MANIFEST_TAG=${DOCKER_BASE}:dev"
echo "CREATE_DOCKERHUB_MANIFEST=true"
} >> "$GITHUB_OUTPUT"
else
echo "CREATE_DOCKERHUB_MANIFEST=false" >> "$GITHUB_OUTPUT"
fi
;;
*)
echo "CREATE_DOCKERHUB_MANIFEST=false" >> "$GITHUB_OUTPUT"
;;
esac
- name: Login to Docker Hub
if: steps.dockerhub_check.outputs.CREATE_DOCKERHUB_MANIFEST == 'true'
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Create GHCR multi-arch manifest
if: needs.build-and-push-docker.outputs.primary_ghcr_manifest_tag != ''
run: |
MANIFEST_TAG="${{ needs.build-and-push-docker.outputs.primary_ghcr_manifest_tag }}"
RELEASE_TYPE="${{ needs.determine-build-context.outputs.release_type }}"
echo "Creating GHCR manifest: $MANIFEST_TAG"
# For branch builds, only AMD64 is built
if [[ "$RELEASE_TYPE" == "branch" ]]; then
docker buildx imagetools create \
--tag $MANIFEST_TAG \
${MANIFEST_TAG}-amd64
else
docker buildx imagetools create \
--tag $MANIFEST_TAG \
${MANIFEST_TAG}-amd64 \
${MANIFEST_TAG}-arm64
fi
- name: Create Docker Hub multi-arch manifest
if: steps.dockerhub_check.outputs.CREATE_DOCKERHUB_MANIFEST == 'true'
run: |
MANIFEST_TAG="${{ steps.dockerhub_check.outputs.DOCKER_MANIFEST_TAG }}"
echo "Creating Docker Hub manifest: $MANIFEST_TAG"
docker buildx imagetools create \
--tag $MANIFEST_TAG \
${MANIFEST_TAG}-amd64 \
${MANIFEST_TAG}-arm64
call-success-url:
name: Call Success URL
needs: [create_multi_arch_manifest]
runs-on: ubuntu-latest
if: needs.create_multi_arch_manifest.result == 'success' || needs.create_multi_arch_manifest.result == 'skipped'
steps:
- name: Call Success URL
env:
SUCCESS_URL: ${{ github.event.inputs.success_url }}
if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.success_url != '' }}
run: |
echo "Calling success URL: ${{ env.SUCCESS_URL }}"
curl -v "${{ env.SUCCESS_URL }}" || echo "Failed to call success URL"
shell: bash
security-scan:
name: Security Scan
needs: [determine-build-context, build-and-push-docker]
if: |
success() &&
(needs.determine-build-context.outputs.release_type == 'stable' ||
needs.determine-build-context.outputs.release_type == 'nightly')
uses: ./.github/workflows/security-trivy-scan-callable.yml
with:
image_ref: ${{ needs.build-and-push-docker.outputs.image_ref }}
secrets: inherit

View File

@@ -0,0 +1,45 @@
name: Benchmark Docker Image CI
on:
workflow_dispatch:
push:
branches:
- master
paths:
- 'packages/@n8n/benchmark/**'
- 'pnpm-lock.yaml'
- 'pnpm-workspace.yaml'
- '.github/workflows/docker-images-benchmark.yml'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up QEMU
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0
- name: Login to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build
uses: docker/build-push-action@b32b51a8eda65d6793cd0494a773d4f6bcef32dc # v6.11.0
env:
DOCKER_BUILD_SUMMARY: false
with:
context: .
file: ./packages/@n8n/benchmark/Dockerfile
platforms: linux/amd64
provenance: false
push: true
tags: |
ghcr.io/${{ github.repository_owner }}/n8n-benchmark:latest

View File

@@ -0,0 +1,138 @@
name: Reusable e2e workflow
on:
workflow_call:
inputs:
branch:
description: 'GitHub branch to test.'
required: false
type: string
user:
description: 'User who kicked this off.'
required: false
type: string
default: 'schedule'
spec:
description: 'Specify specs.'
required: false
default: 'e2e/*'
type: string
record:
description: 'Record test run.'
required: false
default: true
type: boolean
parallel:
description: 'Run tests in parallel.'
required: false
default: true
type: boolean
containers:
description: 'Number of containers to run tests in.'
required: false
default: '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]'
type: string
pr_number:
description: 'PR number to run tests for.'
required: false
type: number
secrets:
CYPRESS_RECORD_KEY:
description: 'Cypress record key.'
required: true
CURRENTS_RECORD_KEY:
description: 'Currents record key.'
required: true
env:
NODE_OPTIONS: --max-old-space-size=3072
jobs:
testing:
runs-on: blacksmith-2vcpu-ubuntu-2204
outputs:
dashboardUrl: ${{ steps.cypress.outputs.dashboardUrl }}
strategy:
fail-fast: false
matrix:
# If spec is not e2e/* then we run only one container to prevent
# running the same tests multiple times
containers: ${{ fromJSON( inputs.spec == 'e2e/*' && inputs.containers || '[1]' ) }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up and build
uses: ./.github/actions/setup-nodejs-blacksmith
- name: Install Cypress
working-directory: cypress
run: pnpm cypress:install
- name: Cypress run
id: cypress
uses: cypress-io/github-action@be1bab96b388bbd9ce3887e397d373c8557e15af # v6.9.2
with:
working-directory: cypress
install: false
start: pnpm start
wait-on: 'http://localhost:5678'
wait-on-timeout: 120
record: ${{ inputs.record }}
parallel: ${{ inputs.spec == 'e2e/*' && inputs.parallel || false }}
# We have to provide custom ci-build-id key to make sure that this workflow could be run multiple times
# in the same parent workflow
ci-build-id: ${{ github.run_id }}-${{ github.run_attempt }}
spec: '${{ inputs.spec }}'
env:
NODE_OPTIONS: --dns-result-order=ipv4first
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
E2E_TESTS: true
COMMIT_INFO_MESSAGE: 🌳 ${{ inputs.branch }} 🤖 ${{ inputs.user }} 🗃️ ${{ inputs.spec }}
SHELL: /bin/sh
- name: Upload test results artifact
if: always()
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: test-results-${{ matrix.containers }}
path: cypress/test-results-*.xml
upload-to-currents:
needs: testing
if: always()
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Download all test results
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
path: test-results
- name: Merge and upload to Currents
run: |
npm install -g @currents/cmd junit-report-merger
# Merge all XML files, so Currents can show a single view for Cypress
jrm combined-results.xml "test-results/**/test-results-*.xml"
- name: Upload merged XML as artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: merged-junit-results
path: combined-results.xml
- name: Convert and upload to Currents
run: |
currents convert \
--input-format=junit \
--input-file=combined-results.xml \
--output-dir=.currents \
--framework=node \
--framework-version=cypress-14.4.0
currents upload \
--project-id=I0yzoc \
--key=${{ secrets.CURRENTS_RECORD_KEY }} \
--ci-build-id=n8n-io/n8n-${{ github.run_id }}-${{ github.run_attempt }} \
--report-dir=.currents \
--tag=cypress

View File

@@ -0,0 +1,44 @@
name: PR E2E
on:
pull_request_review:
types: [submitted]
concurrency:
group: e2e-${{ github.event.pull_request.number || github.ref }}-${{github.event.review.state}}
cancel-in-progress: true
jobs:
eligibility_check:
name: Check Eligibility for Test Run
if: github.event.review.state == 'approved'
uses: ./.github/workflows/check-run-eligibility.yml
with:
is_pr_approved_by_maintainer: true
run-e2e-tests:
name: E2E
uses: ./.github/workflows/e2e-reusable.yml
needs: [eligibility_check]
if: needs.eligibility_check.outputs.should_run == 'true'
with:
pr_number: ${{ github.event.pull_request.number }}
user: ${{ github.event.pull_request.user.login || 'PR User' }}
secrets: inherit
run-playwright-tests:
name: Playwright
uses: ./.github/workflows/playwright-test-reusable.yml
needs: [eligibility_check]
if: needs.eligibility_check.outputs.should_run == 'true'
secrets: inherit
post-e2e-tests:
name: E2E - Checks
runs-on: ubuntu-latest
needs: [eligibility_check, run-e2e-tests, run-playwright-tests]
if: always() && needs.eligibility_check.result != 'skipped'
steps:
- name: Fail if tests failed
if: needs.run-e2e-tests.result == 'failure' || needs.run-playwright-tests.result == 'failure'
run: exit 1

View File

@@ -0,0 +1,78 @@
name: End-to-End tests
run-name: E2E Tests ${{ inputs.branch }} - ${{ inputs.user }}
on:
schedule:
- cron: '0 3 * * *'
workflow_dispatch:
inputs:
branch:
description: 'GitHub branch to test.'
required: false
default: 'master'
spec:
description: 'Specify specs.'
required: false
default: 'e2e/*'
type: string
user:
description: 'User who kicked this off.'
required: false
default: 'schedule'
start-url:
description: 'URL to call after workflow is kicked off.'
required: false
default: ''
success-url:
description: 'URL to call after workflow is done.'
required: false
default: ''
jobs:
calls-start-url:
name: Calls start URL
runs-on: ubuntu-latest
if: ${{ github.event.inputs.start-url != '' }}
steps:
- name: Calls start URL
env:
START_URL: ${{ github.event.inputs.start-url }}
run: |
[[ "${{ env.START_URL }}" != "" ]] && curl -v -X POST -d 'url=${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}' "${{ env.START_URL }}" || echo ""
shell: bash
run-e2e-tests:
name: E2E
uses: ./.github/workflows/e2e-reusable.yml
with:
branch: ${{ github.event.inputs.branch || 'master' }}
user: ${{ github.event.inputs.user || 'PR User' }}
spec: ${{ github.event.inputs.spec || 'e2e/*' }}
secrets: inherit
run-playwright-tests:
name: Playwright
uses: ./.github/workflows/playwright-test-reusable.yml
secrets: inherit
calls-success-url-notify:
name: Calls success URL and notifies
runs-on: ubuntu-latest
needs: [run-e2e-tests, run-playwright-tests]
if: ${{ github.event.inputs.success-url != '' }}
steps:
- name: Notify Slack on failure
uses: act10ns/slack@44541246747a30eb3102d87f7a4cc5471b0ffb7d # v2.1.0
if: failure()
with:
status: ${{ job.status }}
channel: '#alerts-build'
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
message: E2E failure for branch `${{ inputs.branch || 'master' }}` deployed by ${{ inputs.user || 'schedule' }} (${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
- name: Call Success URL - optionally
env:
SUCCESS_URL: ${{ github.event.inputs.success-url }}
run: |
[[ "${{ env.SUCCESS_URL }}" != "" ]] && curl -v "${{ env.SUCCESS_URL }}" || echo ""
shell: bash

View File

@@ -0,0 +1,33 @@
name: Reusable linting workflow
on:
workflow_call:
inputs:
ref:
description: GitHub ref to lint.
required: false
type: string
default: master
nodeVersion:
description: Version of node to use.
required: false
type: string
default: 22.x
env:
NODE_OPTIONS: --max-old-space-size=7168
jobs:
lint:
name: Lint
runs-on: blacksmith-4vcpu-ubuntu-2204
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ inputs.ref }}
- name: Build and Test
uses: n8n-io/n8n/.github/actions/setup-nodejs-blacksmith@f5fbbbe0a28a886451c886cac6b49192a39b0eea # v1.104.1
with:
build-command: pnpm lint
node-version: ${{ inputs.nodeVersion }}

View File

@@ -0,0 +1,27 @@
name: Notify PR status changed
on:
pull_request_review:
types: [submitted, dismissed]
pull_request:
types: [closed]
jobs:
notify:
runs-on: ubuntu-latest
if: >-
(github.event_name == 'pull_request_review' && github.event.review.state == 'approved') ||
(github.event_name == 'pull_request_review' && github.event.review.state == 'dismissed') ||
(github.event_name == 'pull_request' && github.event.pull_request.merged == true) ||
(github.event_name == 'pull_request' && github.event.pull_request.merged == false && github.event.action == 'closed')
steps:
- uses: fjogeleit/http-request-action@bf78da14118941f7e940279dd58f67e863cbeff6 # v1
if: ${{!contains(github.event.pull_request.labels.*.name, 'community')}}
name: Notify
env:
PR_URL: ${{ github.event.pull_request.html_url }}
with:
url: ${{ secrets.N8N_NOTIFY_PR_STATUS_CHANGED_URL }}
method: 'POST'
customHeaders: '{ "x-api-token": "${{ secrets.N8N_NOTIFY_PR_STATUS_CHANGED_TOKEN }}" }'
data: '{ "event_name": "${{ github.event_name }}", "pr_url": "${{ env.PR_URL }}", "event": ${{ toJSON(github.event) }} }'

View File

@@ -0,0 +1,13 @@
name: Run Playwright Tests (Docker Build)
# This workflow is used to run Playwright tests in a Docker container built from the current branch
on:
workflow_call:
workflow_dispatch:
jobs:
build-and-test:
uses: ./.github/workflows/playwright-test-reusable.yml
with:
test-mode: docker-build
secrets: inherit

View File

@@ -0,0 +1,37 @@
name: Run Playwright Tests (Docker Pull)
# This workflow is used to run Playwright tests in a Docker container pulled from the registry
on:
workflow_call:
inputs:
shards:
description: 'Shards for parallel execution'
required: false
default: '[1]'
type: string
image:
description: 'Image to use'
required: false
default: 'n8nio/n8n:nightly'
type: string
workflow_dispatch:
inputs:
shards:
description: 'Shards for parallel execution'
required: false
default: '[1]'
type: string
image:
description: 'Image to use'
required: false
default: 'n8nio/n8n:nightly'
type: string
jobs:
build-and-test:
uses: ./.github/workflows/playwright-test-reusable.yml
with:
test-mode: docker-pull
shards: ${{ inputs.shards }}
docker-image: ${{ inputs.image }}
secrets: inherit

View File

@@ -0,0 +1,77 @@
name: Playwright Tests - Reusable
on:
workflow_call:
inputs:
test-mode:
description: 'Test mode: local (pnpm start from local), docker-build, or docker-pull'
required: false
default: 'local'
type: string
shards:
description: 'Shards for parallel execution'
required: false
default: '[1, 2]'
type: string
docker-image:
description: 'Docker image to use (for docker-pull mode)'
required: false
default: 'n8nio/n8n:nightly'
type: string
secrets:
CURRENTS_RECORD_KEY:
required: true
env:
PLAYWRIGHT_BROWSERS_PATH: packages/testing/playwright/ms-playwright-cache
NODE_OPTIONS: --max-old-space-size=3072
# Disable Ryuk to avoid issues with Docker since it needs privileged access, containers are cleaned on teardown anyway
TESTCONTAINERS_RYUK_DISABLED: true
PLAYWRIGHT_WORKERS: 3 # We have 2 CPUs on this runner but we can use more workers since it's low CPU intensive
jobs:
test:
runs-on: blacksmith-2vcpu-ubuntu-2204
strategy:
fail-fast: false
matrix:
shard: ${{ fromJSON(inputs.shards || '[1, 2]') }}
name: Test (Shard ${{ matrix.shard }}/${{ strategy.job-total }})
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 1
- name: Setup Environment
uses: ./.github/actions/setup-nodejs-blacksmith
with:
build-command: ${{ inputs.test-mode == 'docker-build' && 'pnpm build:docker' || 'pnpm turbo build:playwright' }}
enable-docker-cache: ${{ inputs.test-mode != 'local' }}
env:
INCLUDE_TEST_CONTROLLER: ${{ inputs.test-mode == 'docker-build' && 'true' || '' }}
- name: Install Browsers (Docker Build)
if: inputs.test-mode == 'docker-build'
run: pnpm turbo install-browsers:ci
- name: Run Tests (Local)
if: inputs.test-mode == 'local'
run: |
pnpm --filter=n8n-playwright test:local \
--shard=${{ matrix.shard }}/${{ strategy.job-total }} \
--workers=${{ env.PLAYWRIGHT_WORKERS }}
env:
CURRENTS_RECORD_KEY: ${{ secrets.CURRENTS_RECORD_KEY }}
- name: Run Tests (Docker)
if: inputs.test-mode != 'local'
run: |
pnpm --filter=n8n-playwright test:container:standard \
--shard=${{ matrix.shard }}/${{ strategy.job-total }} \
--workers=${{ env.PLAYWRIGHT_WORKERS }}
env:
N8N_DOCKER_IMAGE: ${{ inputs.test-mode == 'docker-build' && 'n8nio/n8n:local' || inputs.docker-image }}
CURRENTS_RECORD_KEY: ${{ secrets.CURRENTS_RECORD_KEY }}

View File

@@ -0,0 +1,73 @@
name: 'Release: Create Pull Request'
on:
workflow_dispatch:
inputs:
base-branch:
description: 'The branch, tag, or commit to create this release PR from.'
required: true
default: 'master'
release-type:
description: 'A SemVer release type.'
required: true
type: choice
default: 'minor'
options:
- patch
- minor
- major
jobs:
create-release-pr:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
timeout-minutes: 5
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 0
ref: ${{ github.event.inputs.base-branch }}
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: 22.x
- run: npm install --prefix=.github/scripts --no-package-lock
- name: Setup corepack and pnpm
run: |
npm i -g corepack@0.33
corepack enable
- name: Bump package versions
run: |
echo "NEXT_RELEASE=$(node .github/scripts/bump-versions.mjs)" >> "$GITHUB_ENV"
env:
RELEASE_TYPE: ${{ github.event.inputs.release-type }}
- name: Update Changelog
run: node .github/scripts/update-changelog.mjs
- name: Push the base branch
env:
BASE_BRANCH: ${{ github.event.inputs.base-branch }}
run: |
git push -f origin "refs/remotes/origin/${{ env.BASE_BRANCH }}:refs/heads/release/${{ env.NEXT_RELEASE }}"
- name: Push the release branch, and Create the PR
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
with:
base: 'release/${{ env.NEXT_RELEASE }}'
branch: 'release-pr/${{ env.NEXT_RELEASE }}'
commit-message: ':rocket: Release ${{ env.NEXT_RELEASE }}'
delete-branch: true
labels: release,release:${{ github.event.inputs.release-type }}
title: ':rocket: Release ${{ env.NEXT_RELEASE }}'
body-path: 'CHANGELOG-${{ env.NEXT_RELEASE }}.md'

View File

@@ -0,0 +1,168 @@
name: 'Release: Publish'
on:
pull_request:
types:
- closed
branches:
- 'release/*'
jobs:
publish-to-npm:
name: Publish to NPM
runs-on: ubuntu-latest
if: github.event.pull_request.merged == true
timeout-minutes: 15
permissions:
id-token: write
env:
NPM_CONFIG_PROVENANCE: true
outputs:
release: ${{ steps.set-release.outputs.release }}
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 0
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: 22.x
- name: Setup corepack and pnpm
run: |
npm i -g corepack@0.33
corepack enable
- run: pnpm install --frozen-lockfile
- name: Set release version in env
run: echo "RELEASE=$(node -e 'console.log(require("./package.json").version)')" >> "$GITHUB_ENV"
- name: Build
run: pnpm build
- name: Cache build artifacts
uses: actions/cache/save@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with:
path: ./packages/**/dist
key: ${{ github.sha }}-release:build
- name: Dry-run publishing
run: pnpm publish -r --no-git-checks --dry-run
- name: Pre publishing changes
run: |
echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc
node .github/scripts/trim-fe-packageJson.js
node .github/scripts/ensure-provenance-fields.mjs
cp README.md packages/cli/README.md
sed -i "s/default: 'dev'/default: 'stable'/g" packages/cli/dist/config/schema.js
- name: Publish to NPM
run: pnpm publish -r --publish-branch ${{github.event.pull_request.base.ref}} --access public --tag rc --no-git-checks
- name: Cleanup rc tag
run: npm dist-tag rm n8n rc
continue-on-error: true
- id: set-release
run: echo "release=${{ env.RELEASE }}" >> "$GITHUB_OUTPUT"
publish-to-docker-hub:
name: Publish to DockerHub
needs: [publish-to-npm]
uses: ./.github/workflows/docker-build-push.yml
with:
n8n_version: ${{ needs.publish-to-npm.outputs.release }}
release_type: stable
secrets: inherit
create-github-release:
name: Create a GitHub Release
needs: [publish-to-npm, publish-to-docker-hub]
runs-on: ubuntu-latest
if: github.event.pull_request.merged == true
timeout-minutes: 5
permissions:
contents: write
id-token: write
steps:
- name: Create a Release on GitHub
uses: ncipollo/release-action@bcfe5470707e8832e12347755757cec0eb3c22af # v1
with:
commit: ${{github.event.pull_request.base.ref}}
tag: 'n8n@${{ needs.publish-to-npm.outputs.release }}'
prerelease: true
makeLatest: false
body: ${{github.event.pull_request.body}}
create-sentry-release:
name: Create a Sentry Release
needs: [publish-to-npm, publish-to-docker-hub]
runs-on: ubuntu-latest
if: github.event.pull_request.merged == true
timeout-minutes: 5
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Restore cached build artifacts
uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with:
path: ./packages/**/dist
key: ${{ github.sha }}-release:build
- name: Create a frontend release
uses: getsentry/action-release@e769183448303de84c5a06aaaddf9da7be26d6c7 # v1.7.0
continue-on-error: true
with:
projects: ${{ secrets.SENTRY_FRONTEND_PROJECT }}
version: n8n@${{ needs.publish-to-npm.outputs.release }}
sourcemaps: packages/frontend/editor-ui/dist
- name: Create a backend release
uses: getsentry/action-release@e769183448303de84c5a06aaaddf9da7be26d6c7 # v1.7.0
continue-on-error: true
with:
projects: ${{ secrets.SENTRY_BACKEND_PROJECT }}
version: n8n@${{ needs.publish-to-npm.outputs.release }}
sourcemaps: packages/cli/dist packages/core/dist packages/nodes-base/dist packages/@n8n/n8n-nodes-langchain/dist
- name: Create a task runner release
uses: getsentry/action-release@e769183448303de84c5a06aaaddf9da7be26d6c7 # v1.7.0
continue-on-error: true
with:
projects: ${{ secrets.SENTRY_TASK_RUNNER_PROJECT }}
version: n8n@${{ needs.publish-to-npm.outputs.release }}
sourcemaps: packages/core/dist packages/workflow/dist/esm packages/@n8n/task-runner/dist
trigger-release-note:
name: Trigger a release note
needs: [publish-to-npm, create-github-release]
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
steps:
- name: Trigger a release note
run: curl -u docsWorkflows:${{ secrets.N8N_WEBHOOK_DOCS_PASSWORD }} --request GET 'https://internal.users.n8n.cloud/webhook/trigger-release-note' --header 'Content-Type:application/json' --data '{"version":"${{ needs.publish-to-npm.outputs.release }}"}'
# merge-back-into-master:
# name: Merge back into master
# needs: [publish-to-npm, create-github-release]
# if: ${{ github.event.pull_request.merged == true && !contains(github.event.pull_request.labels.*.name, 'release:patch') }}
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
# v4.1.1
# fetch-depth: 0
# - run: |
# git checkout --track origin/master
# git config user.name "github-actions[bot]"
# git config user.email 41898282+github-actions[bot]@users.noreply.github.com
# git merge --ff n8n@${{ needs.publish-to-npm.outputs.release }}
# git push origin master
# git push origin :${{github.event.pull_request.base.ref}}

View File

@@ -0,0 +1,121 @@
name: 'Release: Push to Channel'
on:
workflow_dispatch:
inputs:
version:
description: 'n8n Release version to push to a channel (e.g., 1.2.3 or 1.2.3-beta.4)'
required: true
type: string
release-channel:
description: 'Release channel'
required: true
type: choice
default: 'beta'
options:
- beta
- stable
jobs:
validate-inputs:
name: Validate Inputs
runs-on: ubuntu-latest
outputs:
version: ${{ steps.check_version.outputs.version }}
release_channel: ${{ github.event.inputs.release-channel }}
steps:
- name: Check Version Format
id: check_version
env:
INPUT_VERSION: ${{ github.event.inputs.version }}
run: |
input_version="${{ env.INPUT_VERSION }}"
version_regex='^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$'
if [[ "$input_version" =~ $version_regex ]]; then
echo "Version format is valid: $input_version"
echo "version=$input_version" >> "$GITHUB_OUTPUT"
else
echo "::error::Invalid version format provided: '$input_version'. Must match regex '$version_regex'."
exit 1
fi
release-to-npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: validate-inputs
timeout-minutes: 5
steps:
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: 22.x
- run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc
- name: Add beta/next tags to NPM
if: needs.validate-inputs.outputs.release_channel == 'beta'
run: |
npm dist-tag add "n8n@${{ needs.validate-inputs.outputs.version }}" next
npm dist-tag add "n8n@${{ needs.validate-inputs.outputs.version }}" beta
- name: Add latest/stable tags to NPM
if: needs.validate-inputs.outputs.release_channel == 'stable'
run: |
npm dist-tag add "n8n@${{ needs.validate-inputs.outputs.version }}" latest
npm dist-tag add "n8n@${{ needs.validate-inputs.outputs.version }}" stable
release-to-docker-hub:
name: Release to DockerHub
runs-on: ubuntu-latest
needs: validate-inputs
timeout-minutes: 5
steps:
- uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Tag stable/latest Docker image
if: needs.validate-inputs.outputs.release_channel == 'stable'
run: |
docker buildx imagetools create -t "${{ secrets.DOCKER_USERNAME }}/n8n:stable" "${{ secrets.DOCKER_USERNAME }}/n8n:${{ needs.validate-inputs.outputs.version }}"
docker buildx imagetools create -t "${{ secrets.DOCKER_USERNAME }}/n8n:latest" "${{ secrets.DOCKER_USERNAME }}/n8n:${{ needs.validate-inputs.outputs.version }}"
- name: Tag beta/next Docker image
if: needs.validate-inputs.outputs.release_channel == 'beta'
run: |
docker buildx imagetools create -t "${{ secrets.DOCKER_USERNAME }}/n8n:beta" "${{ secrets.DOCKER_USERNAME }}/n8n:${{ needs.validate-inputs.outputs.version }}"
docker buildx imagetools create -t "${{ secrets.DOCKER_USERNAME }}/n8n:next" "${{ secrets.DOCKER_USERNAME }}/n8n:${{ needs.validate-inputs.outputs.version }}"
release-to-github-container-registry:
name: Release to GitHub Container Registry
runs-on: ubuntu-latest
needs: validate-inputs
timeout-minutes: 5
steps:
- uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Tag stable/latest GHCR image
if: needs.validate-inputs.outputs.release_channel == 'stable'
run: |
docker buildx imagetools create -t "ghcr.io/${{ github.repository_owner }}/n8n:stable" "ghcr.io/${{ github.repository_owner }}/n8n:${{ needs.validate-inputs.outputs.version }}"
docker buildx imagetools create -t "ghcr.io/${{ github.repository_owner }}/n8n:latest" "ghcr.io/${{ github.repository_owner }}/n8n:${{ needs.validate-inputs.outputs.version }}"
- name: Tag beta/next GHCR image
if: needs.validate-inputs.outputs.release_channel == 'beta'
run: |
docker buildx imagetools create -t "ghcr.io/${{ github.repository_owner }}/n8n:beta" "ghcr.io/${{ github.repository_owner }}/n8n:${{ needs.validate-inputs.outputs.version }}"
docker buildx imagetools create -t "ghcr.io/${{ github.repository_owner }}/n8n:next" "ghcr.io/${{ github.repository_owner }}/n8n:${{ needs.validate-inputs.outputs.version }}"
update-docs:
name: Update latest and next in the docs
runs-on: ubuntu-latest
needs: [validate-inputs, release-to-npm, release-to-docker-hub]
steps:
- continue-on-error: true
run: curl -u docsWorkflows:${{ secrets.N8N_WEBHOOK_DOCS_PASSWORD }} --request GET 'https://internal.users.n8n.cloud/webhook/update-latest-next'

View File

@@ -0,0 +1,237 @@
name: Security - Scan Docker Image With Trivy
on:
workflow_dispatch:
inputs:
image_ref:
description: 'Full image reference to scan e.g. ghcr.io/n8n-io/n8n:latest'
required: true
default: 'ghcr.io/n8n-io/n8n:latest'
workflow_call:
inputs:
image_ref:
type: string
description: 'Full image reference to scan e.g. ghcr.io/n8n-io/n8n:latest'
required: true
secrets:
QBOT_SLACK_TOKEN:
required: true
permissions:
contents: read
env:
QBOT_SLACK_TOKEN: ${{ secrets.QBOT_SLACK_TOKEN }}
SLACK_CHANNEL_ID: C042WDXPTEZ #mission-security
jobs:
security_scan:
name: Security - Scan Docker Image With Trivy
runs-on: ubuntu-latest
steps:
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 # v0.32.0
id: trivy_scan
with:
image-ref: ${{ inputs.image_ref }}
format: 'json'
output: 'trivy-results.json'
severity: 'CRITICAL,HIGH,MEDIUM,LOW'
ignore-unfixed: false
exit-code: '0'
- name: Calculate vulnerability counts
id: process_results
run: |
if [ ! -s trivy-results.json ] || [ "$(jq '.Results | length' trivy-results.json)" -eq 0 ]; then
echo "No vulnerabilities found."
echo "vulnerabilities_found=false" >> "$GITHUB_OUTPUT"
exit 0
fi
# Calculate counts by severity
CRITICAL_COUNT=$(jq '([.Results[]?.Vulnerabilities[]? | select(.Severity == "CRITICAL")] | length)' trivy-results.json)
HIGH_COUNT=$(jq '([.Results[]?.Vulnerabilities[]? | select(.Severity == "HIGH")] | length)' trivy-results.json)
MEDIUM_COUNT=$(jq '([.Results[]?.Vulnerabilities[]? | select(.Severity == "MEDIUM")] | length)' trivy-results.json)
LOW_COUNT=$(jq '([.Results[]?.Vulnerabilities[]? | select(.Severity == "LOW")] | length)' trivy-results.json)
TOTAL_VULNS=$((CRITICAL_COUNT + HIGH_COUNT + MEDIUM_COUNT + LOW_COUNT))
# Get unique CVE count
UNIQUE_CVES=$(jq -r '[.Results[]?.Vulnerabilities[]?.VulnerabilityID] | unique | length' trivy-results.json)
# Get affected packages count
AFFECTED_PACKAGES=$(jq -r '[.Results[]?.Vulnerabilities[]? | .PkgName] | unique | length' trivy-results.json)
{
echo "vulnerabilities_found=$( [ "$TOTAL_VULNS" -gt 0 ] && echo 'true' || echo 'false' )"
echo "total_count=$TOTAL_VULNS"
echo "critical_count=$CRITICAL_COUNT"
echo "high_count=$HIGH_COUNT"
echo "medium_count=$MEDIUM_COUNT"
echo "low_count=$LOW_COUNT"
echo "unique_cves=$UNIQUE_CVES"
echo "affected_packages=$AFFECTED_PACKAGES"
} >> "$GITHUB_OUTPUT"
- name: Generate GitHub Job Summary
if: always()
run: |
{
echo "# 🛡️ Trivy Security Scan Results"
echo ""
echo "**Image:** \`${{ inputs.image_ref }}\`"
echo "**Scan Date:** $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
echo ""
} >> "$GITHUB_STEP_SUMMARY"
if [ "${{ steps.process_results.outputs.vulnerabilities_found }}" == "false" ]; then
{
echo "✅ **No vulnerabilities found!**"
} >> "$GITHUB_STEP_SUMMARY"
else
{
echo "## 📊 Summary"
echo "| Metric | Count |"
echo "|--------|-------|"
echo "| 🔴 Critical Vulnerabilities | ${{ steps.process_results.outputs.critical_count }} |"
echo "| 🟠 High Vulnerabilities | ${{ steps.process_results.outputs.high_count }} |"
echo "| 🟡 Medium Vulnerabilities | ${{ steps.process_results.outputs.medium_count }} |"
echo "| 🟡 Low Vulnerabilities | ${{ steps.process_results.outputs.low_count }} |"
echo "| 📋 Unique CVEs | ${{ steps.process_results.outputs.unique_cves }} |"
echo "| 📦 Affected Packages | ${{ steps.process_results.outputs.affected_packages }} |"
echo ""
echo "## 🚨 Top Vulnerabilities"
echo ""
} >> "$GITHUB_STEP_SUMMARY"
{
# Generate detailed vulnerability table
jq -r --arg image_ref "${{ inputs.image_ref }}" '
# Collect all vulnerabilities
[.Results[] | select(.Vulnerabilities != null) | .Vulnerabilities[]] |
# Group by CVE ID to avoid duplicates
group_by(.VulnerabilityID) |
map({
cve: .[0].VulnerabilityID,
severity: .[0].Severity,
cvss: (.[0].CVSS.nvd.V3Score // "N/A"),
cvss_sort: (.[0].CVSS.nvd.V3Score // 0),
packages: [.[] | "\(.PkgName)@\(.InstalledVersion)"] | unique | join(", "),
fixed: (.[0].FixedVersion // "No fix available"),
description: (.[0].Description // "No description available") | split("\n")[0] | .[0:150]
}) |
# Sort by severity (CRITICAL, HIGH, MEDIUM, LOW) and CVSS score
sort_by(
if .severity == "CRITICAL" then 0
elif .severity == "HIGH" then 1
elif .severity == "MEDIUM" then 2
elif .severity == "LOW" then 3
else 4 end,
-.cvss_sort
) |
# Take top 15
.[:15] |
# Generate markdown table
"| CVE | Severity | CVSS | Package(s) | Fix Version | Description |",
"|-----|----------|------|------------|-------------|-------------|",
(.[] | "| [\(.cve)](https://nvd.nist.gov/vuln/detail/\(.cve)) | \(.severity) | \(.cvss) | `\(.packages)` | `\(.fixed)` | \(.description) |")
' trivy-results.json
echo ""
echo "---"
echo "🔍 **View detailed logs above for full analysis**"
} >> "$GITHUB_STEP_SUMMARY"
fi
- name: Generate Slack Blocks JSON
if: steps.process_results.outputs.vulnerabilities_found == 'true'
id: generate_blocks
run: |
BLOCKS_JSON=$(jq -c --arg image_ref "${{ inputs.image_ref }}" \
--arg repo_url "${{ github.server_url }}/${{ github.repository }}" \
--arg repo_name "${{ github.repository }}" \
--arg run_url "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
--arg critical_count "${{ steps.process_results.outputs.critical_count }}" \
--arg high_count "${{ steps.process_results.outputs.high_count }}" \
--arg medium_count "${{ steps.process_results.outputs.medium_count }}" \
--arg low_count "${{ steps.process_results.outputs.low_count }}" \
--arg unique_cves "${{ steps.process_results.outputs.unique_cves }}" \
'
# Function to create a vulnerability block with emoji indicators
def vuln_block: {
"type": "section",
"text": {
"type": "mrkdwn",
"text": "\(if .Severity == "CRITICAL" then ":red_circle:" elif .Severity == "HIGH" then ":large_orange_circle:" elif .Severity == "MEDIUM" then ":large_yellow_circle:" else ":large_green_circle:" end) *<https://nvd.nist.gov/vuln/detail/\(.VulnerabilityID)|\(.VulnerabilityID)>* (CVSS: `\(.CVSS.nvd.V3Score // "N/A")`)\n*Package:* `\(.PkgName)@\(.InstalledVersion)` → `\(.FixedVersion // "No fix available")`"
}
};
# Main structure
[
{
"type": "header",
"text": { "type": "plain_text", "text": ":warning: Trivy Scan: Vulnerabilities Detected" }
},
{
"type": "section",
"fields": [
{ "type": "mrkdwn", "text": "*Repository:*\n<\($repo_url)|\($repo_name)>" },
{ "type": "mrkdwn", "text": "*Image:*\n`\($image_ref)`" },
{ "type": "mrkdwn", "text": "*Critical:*\n:red_circle: \($critical_count)" },
{ "type": "mrkdwn", "text": "*High:*\n:large_orange_circle: \($high_count)" },
{ "type": "mrkdwn", "text": "*Medium:*\n:large_yellow_circle: \($medium_count)" },
{ "type": "mrkdwn", "text": "*Low:*\n:large_green_circle: \($low_count)" }
]
},
{
"type": "context",
"elements": [
{ "type": "mrkdwn", "text": ":shield: \($unique_cves) unique CVEs affecting packages" }
]
},
{ "type": "divider" }
] +
(
# Group vulnerabilities by CVE to avoid duplicates in notification
[.Results[] | select(.Vulnerabilities != null) | .Vulnerabilities[]] |
group_by(.VulnerabilityID) |
map(.[0]) |
sort_by(
(if .Severity == "CRITICAL" then 0
elif .Severity == "HIGH" then 1
elif .Severity == "MEDIUM" then 2
elif .Severity == "LOW" then 3
else 4 end),
-((.CVSS.nvd.V3Score // 0) | tonumber? // 0)
) |
.[:8] |
map(. | vuln_block)
) +
[
{ "type": "divider" },
{
"type": "actions",
"elements": [
{
"type": "button",
"text": { "type": "plain_text", "text": ":github: View Full Report" },
"style": "primary",
"url": $run_url
}
]
}
]
' trivy-results.json)
echo "slack_blocks=$BLOCKS_JSON" >> "$GITHUB_OUTPUT"
- name: Send Slack Notification
if: steps.process_results.outputs.vulnerabilities_found == 'true'
uses: slackapi/slack-github-action@91efab103c0de0a537f72a35f6b8cda0ee76bf0a # v2.1.1
with:
method: chat.postMessage
token: ${{ secrets.QBOT_SLACK_TOKEN }}
payload: |
channel: ${{ env.SLACK_CHANNEL_ID }}
text: "🚨 Trivy Scan: ${{ steps.process_results.outputs.critical_count }} Critical, ${{ steps.process_results.outputs.high_count }} High, ${{ steps.process_results.outputs.medium_count }} Medium, ${{ steps.process_results.outputs.low_count }} Low vulnerabilities found in ${{ inputs.image_ref }}"
blocks: ${{ steps.generate_blocks.outputs.slack_blocks }}

View File

@@ -0,0 +1,110 @@
name: Sync Public API Schema to Docs Repo
on:
# Triggers for the master branch if relevant Public API files have changed
push:
branches:
- master
paths:
# Trigger if:
# - any of the public API files change
- 'packages/cli/src/public-api/**/*.{css,yaml,yml}'
# - the build script or dependencies change
- 'packages/cli/package.json'
# - any main dependencies change
- 'pnpm-lock.yaml'
# Allow manual trigger
workflow_dispatch:
jobs:
sync-public-api:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout Main n8n Repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup PNPM
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node.js
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: '22.x'
cache: 'pnpm'
- name: Install Dependencies
run: pnpm install --frozen-lockfile
shell: bash
- name: Build Public API Schema
run: pnpm run build:data
working-directory: ./packages/cli
- name: Verify OpenAPI schema exists
id: verify_file
run: |
if [[ -f "packages/cli/dist/public-api/v1/openapi.yml" ]]; then
echo "OpenAPI file found: packages/cli/dist/public-api/v1/openapi.yml"
echo "file_exists=true" >> "$GITHUB_OUTPUT"
else
echo "ERROR: OpenAPI file not found at packages/cli/dist/public-api/v1/openapi.yml after build."
echo "file_exists=false" >> "$GITHUB_OUTPUT"
fi
- name: Generate GitHub App Token
if: steps.verify_file.outputs.file_exists == 'true'
id: generate_token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
with:
app-id: ${{ secrets.N8N_ASSISTANT_APP_ID }}
private-key: ${{ secrets.N8N_ASSISTANT_PRIVATE_KEY }}
owner: ${{ github.repository_owner }}
repositories: n8n-docs
- name: Checkout Docs Repository
if: steps.verify_file.outputs.file_exists == 'true'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
repository: n8n-io/n8n-docs
token: ${{ steps.generate_token.outputs.token }}
path: public-docs
- name: Copy OpenAPI file to Docs Repo
if: steps.verify_file.outputs.file_exists == 'true'
run: |
# Destination path within the 'public-docs' checkout directory
DOCS_TARGET_PATH="public-docs/docs/api/v1/openapi.yml"
echo "Copying 'packages/cli/dist/public-api/v1/openapi.yml' to '${DOCS_TARGET_PATH}'"
cp packages/cli/dist/public-api/v1/openapi.yml "${DOCS_TARGET_PATH}"
- name: Create PR in Docs Repo
if: steps.verify_file.outputs.file_exists == 'true'
# Pin v7.0.8
uses: peter-evans/create-pull-request@18e469570b1cf0dfc11d60ec121099f8ff3e617a
with:
token: ${{ steps.generate_token.outputs.token }}
path: public-docs
commit-message: 'feat(public-api): Update Public API schema'
committer: GitHub <noreply@github.com>
author: ${{ github.actor }} <${{ github.actor_id }}+${{ github.actor }}@users.noreply.github.com>
signoff: false
# Create a single branch for multiple PRs
branch: 'chore/sync-public-api-schema'
delete-branch: false
title: 'chore: Update Public API schema'
body: |
Automated update of the Public API OpenAPI YAML schema.
This PR was generated by a GitHub Action in the [${{ github.repository }} repository](https://github.com/${{ github.repository }}).
Source commit: [${{ github.sha }}](https://github.com/${{ github.repository }}/commit/${{ github.sha }})
Please review the changes and merge if appropriate.

View File

@@ -0,0 +1,53 @@
name: Test Workflows - Reusable
on:
workflow_call:
inputs:
git_ref:
description: 'The Git ref (branch, tag, or SHA) to checkout and test.'
required: true
type: string
compare_schemas:
description: 'Set to "true" to enable schema comparison during tests.'
required: false
default: 'true'
type: string
secrets:
ENCRYPTION_KEY:
description: 'Encryption key for n8n operations.'
required: true
CURRENTS_RECORD_KEY:
description: 'Currents record key for uploading test results.'
required: true
env:
NODE_OPTIONS: --max-old-space-size=3072
jobs:
run_workflow_tests:
name: Run Workflow Tests with Snapshots
runs-on: blacksmith-2vcpu-ubuntu-2204
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ inputs.git_ref }}
- name: Set up Environment
uses: n8n-io/n8n/.github/actions/setup-nodejs-blacksmith@f5fbbbe0a28a886451c886cac6b49192a39b0eea # v1.104.1
- name: Set up Workflow Tests
run: pnpm --filter=n8n-playwright test:workflows:setup
env:
N8N_ENCRYPTION_KEY: ${{ secrets.ENCRYPTION_KEY }}
- name: Run Workflow Tests
run: pnpm --filter=n8n-playwright test:workflows --workers 4
env:
CURRENTS_RECORD_KEY: ${{ secrets.CURRENTS_RECORD_KEY }}
- name: Run Workflow Schema Tests
if: ${{ inputs.compare_schemas == 'true' }}
run: pnpm --filter=n8n-playwright test:workflows:schema
env:
CURRENTS_RECORD_KEY: ${{ secrets.CURRENTS_RECORD_KEY }}

View File

@@ -0,0 +1,20 @@
name: Test Workflows Nightly and Manual
on:
schedule:
- cron: '0 2 * * *'
workflow_dispatch:
inputs:
git_ref_to_test:
description: 'The Git ref (branch, tag, or SHA) to run tests against.'
required: true
type: string
default: 'master'
jobs:
run_workflow_tests:
name: Run Workflow Tests
uses: ./.github/workflows/test-workflows-callable.yml
with:
git_ref: ${{ github.event_name == 'schedule' && 'master' || github.event.inputs.git_ref_to_test }}
secrets: inherit

View File

@@ -0,0 +1,25 @@
name: Test Workflows on PR Approval
on:
pull_request_review:
types: [submitted]
permissions:
contents: read
jobs:
eligibility_check:
name: Check Eligibility for Test Run
if: github.event.review.state == 'approved'
uses: ./.github/workflows/check-run-eligibility.yml
with:
is_pr_approved_by_maintainer: true
run_workflow_tests:
name: Run Tests on Approved Internal PR
needs: [eligibility_check]
if: needs.eligibility_check.outputs.should_run == 'true'
uses: ./.github/workflows/test-workflows-callable.yml
with:
git_ref: ${{ github.event.pull_request.head.sha }}
secrets: inherit

View File

@@ -0,0 +1,110 @@
name: Test Workflows on PR Comment
on:
issue_comment:
types: [created]
permissions:
pull-requests: read
contents: read
jobs:
handle_comment_command:
name: Handle /test-workflows Command
if: github.event.issue.pull_request && startsWith(github.event.comment.body, '/test-workflows')
runs-on: ubuntu-latest
outputs:
permission_granted: ${{ steps.pr_check_and_details.outputs.permission_granted }}
git_ref: ${{ steps.pr_check_and_details.outputs.head_sha }}
pr_number: ${{ steps.pr_check_and_details.outputs.pr_number_string }}
steps:
- name: Validate User, Get PR Details, and React
id: pr_check_and_details
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const commenter = context.actor;
const issueOwner = context.repo.owner;
const issueRepo = context.repo.repo;
const commentId = context.payload.comment.id;
const prNumber = context.issue.number; // In issue_comment on a PR, issue.number is the PR number
// Function to add a reaction to the comment
async function addReaction(content) {
try {
await github.rest.reactions.createForIssueComment({
owner: issueOwner,
repo: issueRepo,
comment_id: commentId,
content: content
});
} catch (reactionError) {
// Log if reaction fails but don't fail the script for this
console.log(`Failed to add reaction '${content}': ${reactionError.message}`);
}
}
// Initialize outputs to a non-triggering state
core.setOutput('permission_granted', 'false');
core.setOutput('head_sha', '');
core.setOutput('pr_number_string', '');
// 1. Check user permissions
try {
const { data: permissions } = await github.rest.repos.getCollaboratorPermissionLevel({
owner: issueOwner,
repo: issueRepo,
username: commenter
});
const allowedPermissions = ['admin', 'write', 'maintain'];
if (!allowedPermissions.includes(permissions.permission)) {
console.log(`User @${commenter} has '${permissions.permission}' permission. Needs 'admin', 'write', or 'maintain'.`);
await addReaction('-1'); // User does not have permission
return; // Exit script, tests will not be triggered
}
console.log(`User @${commenter} has '${permissions.permission}' permission.`);
} catch (error) {
console.log(`Could not verify permissions for @${commenter}: ${error.message}`);
await addReaction('confused'); // Error checking permissions
return; // Exit script
}
// 2. Fetch PR details (if permission check passed)
let headSha;
try {
const { data: pr } = await github.rest.pulls.get({
owner: issueOwner,
repo: issueRepo,
pull_number: prNumber,
});
headSha = pr.head.sha;
console.log(`Workspaced PR details: SHA - ${headSha}, PR Number - ${prNumber}`);
// Set outputs for the next job
core.setOutput('permission_granted', 'true');
core.setOutput('head_sha', headSha);
core.setOutput('pr_number_string', prNumber.toString());
await addReaction('+1'); // Command accepted, tests will be triggered
} catch (error) {
console.log(`Failed to fetch PR details for PR #${prNumber}: ${error.message}`);
core.setOutput('permission_granted', 'false'); // Ensure this is false if PR fetch fails
await addReaction('confused'); // Error fetching PR details
}
trigger_reusable_tests:
name: Trigger Reusable Test Workflow
needs: handle_comment_command
if: >
always() &&
needs.handle_comment_command.result != 'skipped' &&
needs.handle_comment_command.outputs.permission_granted == 'true' &&
needs.handle_comment_command.outputs.git_ref != ''
uses: ./.github/workflows/test-workflows-callable.yml
with:
git_ref: ${{ needs.handle_comment_command.outputs.git_ref }}
secrets: inherit

View File

@@ -0,0 +1,56 @@
name: Reusable units test workflow
on:
workflow_call:
inputs:
ref:
description: GitHub ref to test.
required: false
type: string
default: master
nodeVersion:
description: Version of node to use.
required: false
type: string
default: 22.x
collectCoverage:
required: false
default: false
type: boolean
secrets:
CODECOV_TOKEN:
description: 'Codecov upload token.'
required: false
env:
NODE_OPTIONS: --max-old-space-size=7168
jobs:
unit-test:
name: Unit tests
runs-on: blacksmith-4vcpu-ubuntu-2204
env:
COVERAGE_ENABLED: ${{ inputs.collectCoverage }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ inputs.ref }}
- name: Build
uses: n8n-io/n8n/.github/actions/setup-nodejs-blacksmith@f5fbbbe0a28a886451c886cac6b49192a39b0eea # v1.104.1
with:
node-version: ${{ inputs.nodeVersion }}
- name: Test
run: pnpm test:ci
- name: Upload test results to Codecov
if: ${{ !cancelled() }}
uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1
with:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload coverage to Codecov
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
with:
token: ${{ secrets.CODECOV_TOKEN }}

42
n8n-n8n-1.109.2/.gitignore vendored Executable file
View File

@@ -0,0 +1,42 @@
node_modules
.DS_Store
.tmp
tmp
dist
coverage
npm-debug.log*
yarn.lock
google-generated-credentials.json
_START_PACKAGE
.env
.vscode/*
!.vscode/extensions.json
!.vscode/settings.default.json
.idea
nodelinter.config.json
**/package-lock.json
packages/**/.turbo
.turbo
*.tsbuildinfo
.stylelintcache
*.swp
CHANGELOG-*.md
*.mdx
build-storybook.log
*.junit.xml
junit.xml
test-results.json
*.0x
packages/testing/playwright/playwright-report
packages/testing/playwright/test-results
packages/testing/playwright/ms-playwright-cache
test-results/
compiled_app_output
trivy_report*
compiled
packages/cli/src/modules/my-feature
.secrets
packages/testing/**/.cursor/rules/
.venv
.ruff_cache
__pycache__

28
n8n-n8n-1.109.2/.npmignore Executable file
View File

@@ -0,0 +1,28 @@
dist/test
dist/**/*.{js.map}
.DS_Store
# local env files
.env.local
.env.*.local
# Log files
yarn-debug.log*
yarn-error.log*
# Editor directories and files
.idea
.vscode
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw*
.editorconfig
eslint.config.js
tsconfig.json
.turbo
*.tsbuildinfo

14
n8n-n8n-1.109.2/.npmrc Executable file
View File

@@ -0,0 +1,14 @@
audit = false
fund = false
update-notifier = false
auto-install-peers = true
strict-peer-dependencies = false
prefer-workspace-packages = true
link-workspace-packages = deep
hoist = true
shamefully-hoist = true
hoist-workspace-packages = false
loglevel = warn
package-manager-strict=false
# https://github.com/pnpm/pnpm/issues/7024
package-import-method=clone-or-copy

25
n8n-n8n-1.109.2/.prettierignore Executable file
View File

@@ -0,0 +1,25 @@
coverage
dist
package.json
pnpm-lock.yaml
packages/frontend/editor-ui/index.html
packages/nodes-base/nodes/**/test
packages/cli/templates/form-trigger.handlebars
packages/cli/templates/form-trigger-completion.handlebars
packages/cli/templates/form-trigger-409.handlebars
packages/cli/templates/form-trigger-404.handlebars
cypress/fixtures
CHANGELOG.md
.github/pull_request_template.md
# Ignored for now
**/*.md
# Handled by biome
**/*.ts
**/*.js
**/*.json
**/*.jsonc
# Auto-generated
**/components.d.ts
justfile

51
n8n-n8n-1.109.2/.prettierrc.js Executable file
View File

@@ -0,0 +1,51 @@
module.exports = {
/**
* https://prettier.io/docs/en/options.html#semicolons
*/
semi: true,
/**
* https://prettier.io/docs/en/options.html#trailing-commas
*/
trailingComma: 'all',
/**
* https://prettier.io/docs/en/options.html#bracket-spacing
*/
bracketSpacing: true,
/**
* https://prettier.io/docs/en/options.html#tabs
*/
useTabs: true,
/**
* https://prettier.io/docs/en/options.html#tab-width
*/
tabWidth: 2,
/**
* https://prettier.io/docs/en/options.html#arrow-function-parentheses
*/
arrowParens: 'always',
/**
* https://prettier.io/docs/en/options.html#quotes
*/
singleQuote: true,
/**
* https://prettier.io/docs/en/options.html#quote-props
*/
quoteProps: 'as-needed',
/**
* https://prettier.io/docs/en/options.html#end-of-line
*/
endOfLine: 'lf',
/**
* https://prettier.io/docs/en/options.html#print-width
*/
printWidth: 100,
};

14
n8n-n8n-1.109.2/.vscode/extensions.json vendored Executable file
View File

@@ -0,0 +1,14 @@
{
"recommendations": [
"biomejs.biome",
"streetsidesoftware.code-spell-checker",
"dangmai.workspace-default-settings",
"dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig",
"esbenp.prettier-vscode",
"mjmlio.vscode-mjml",
"ryanluker.vscode-coverage-gutters",
"Vue.volar",
"vitest.explorer"
]
}

37
n8n-n8n-1.109.2/.vscode/settings.default.json vendored Executable file
View File

@@ -0,0 +1,37 @@
{
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"[javascript]": {
"editor.defaultFormatter": "biomejs.biome"
},
"[typescript]": {
"editor.defaultFormatter": "biomejs.biome"
},
"[json]": {
"editor.defaultFormatter": "biomejs.biome"
},
"[jsonc]": {
"editor.defaultFormatter": "biomejs.biome"
},
"editor.codeActionsOnSave": {
"quickfix.biome": "explicit",
"source.organizeImports.biome": "never"
},
"search.exclude": {
"node_modules": true,
"dist": true,
"pnpm-lock.yaml": true,
"**/*.snapshot.json": true,
"test-workflows": true
},
"typescript.format.enable": false,
"typescript.tsdk": "node_modules/typescript/lib",
"workspace-default-settings.runOnActivation": true,
"prettier.prettierPath": "node_modules/prettier/index.cjs",
"eslint.probe": ["javascript", "typescript", "vue"],
"eslint.workingDirectories": [
{
"mode": "auto"
}
]
}

7436
n8n-n8n-1.109.2/CHANGELOG.md Executable file

File diff suppressed because it is too large Load Diff

159
n8n-n8n-1.109.2/CLAUDE.md Executable file
View File

@@ -0,0 +1,159 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with
code in the n8n repository.
## Project Overview
n8n is a workflow automation platform written in TypeScript, using a monorepo
structure managed by pnpm workspaces. It consists of a Node.js backend, Vue.js
frontend, and extensible node-based workflow engine.
## General Guidelines
- Always use pnpm
- We use Linear as a ticket tracking system
- We use Posthog for feature flags
- When starting to work on a new ticket create a new branch from fresh
master with the name specified in Linear ticket
- When creating a new branch for a ticket in Linear - use the branch name
suggested by linear
- Use mermaid diagrams in MD files when you need to visualise something
## Essential Commands
### Building
Use `pnpm build` to build all packages. ALWAYS redirect the output of the
build command to a file:
```bash
pnpm build > build.log 2>&1
```
You can inspect the last few lines of the build log file to check for errors:
```bash
tail -n 20 build.log
```
### Testing
- `pnpm test` - Run all tests
- `pnpm test:affected` - Runs tests based on what has changed since the last
commit
- `pnpm dev:e2e` - E2E tests in development mode
Running a particular test file requires going to the directory of that test
and running: `pnpm test <test-file>`.
When changing directories, use `pushd` to navigate into the directory and
`popd` to return to the previous directory. When in doubt, use `pwd` to check
your current directory.
### Code Quality
- `pnpm lint` - Lint code
- `pnpm typecheck` - Run type checks
Always run lint and typecheck before committing code to ensure quality.
Execute these commands from within the specific package directory you're
working on (e.g., `cd packages/cli && pnpm lint`). Run the full repository
check only when preparing the final PR. When your changes affect type
definitions, interfaces in `@n8n/api-types`, or cross-package dependencies,
build the system before running lint and typecheck.
## Architecture Overview
**Monorepo Structure:** pnpm workspaces with Turbo build orchestration
### Package Structure
The monorepo is organized into these key packages:
- **`packages/@n8n/api-types`**: Shared TypeScript interfaces between frontend and backend
- **`packages/workflow`**: Core workflow interfaces and types
- **`packages/core`**: Workflow execution engine
- **`packages/cli`**: Express server, REST API, and CLI commands
- **`packages/editor-ui`**: Vue 3 frontend application
- **`packages/@n8n/i18n`**: Internationalization for UI text
- **`packages/nodes-base`**: Built-in nodes for integrations
- **`packages/@n8n/nodes-langchain`**: AI/LangChain nodes
- **`@n8n/design-system`**: Vue component library for UI consistency
- **`@n8n/config`**: Centralized configuration management
## Technology Stack
- **Frontend:** Vue 3 + TypeScript + Vite + Pinia + Storybook UI Library
- **Backend:** Node.js + TypeScript + Express + TypeORM
- **Testing:** Jest (unit) + Playwright (E2E)
- **Database:** TypeORM with SQLite/PostgreSQL/MySQL support
- **Code Quality:** Biome (for formatting) + ESLint + lefthook git hooks
### Key Architectural Patterns
1. **Dependency Injection**: Uses `@n8n/di` for IoC container
2. **Controller-Service-Repository**: Backend follows MVC-like pattern
3. **Event-Driven**: Internal event bus for decoupled communication
4. **Context-Based Execution**: Different contexts for different node types
5. **State Management**: Frontend uses Pinia stores
6. **Design System**: Reusable components and design tokens are centralized in
`@n8n/design-system`, where all pure Vue components should be placed to
ensure consistency and reusability
## Key Development Patterns
- Each package has isolated build configuration and can be developed independently
- Hot reload works across the full stack during development
- Node development uses dedicated `node-dev` CLI tool
- Workflow tests are JSON-based for integration testing
- AI features have dedicated development workflow (`pnpm dev:ai`)
### TypeScript Best Practices
- **NEVER use `any` type** - use proper types or `unknown`
- **Avoid type casting with `as`** - use type guards or type predicates instead
- **Define shared interfaces in `@n8n/api-types`** package for FE/BE communication
### Error Handling
- Don't use `ApplicationError` class in CLI and nodes for throwing errors,
because it's deprecated. Use `UnexpectedError`, `OperationalError` or
`UserError` instead.
- Import from appropriate error classes in each package
### Frontend Development
- **All UI text must use i18n** - add translations to `@n8n/i18n` package
- **Use CSS variables directly** - never hardcode spacing as px values
- **data-test-id must be a single value** (no spaces or multiple values)
When implementing CSS, refer to @packages/frontend/CLAUDE.md for guidelines on
CSS variables and styling conventions.
### Testing Guidelines
- **Always work from within the package directory** when running tests
- **Mock all external dependencies** in unit tests
- **Confirm test cases with user** before writing unit tests
- **Typecheck is critical before committing** - always run `pnpm typecheck`
- **When modifying pinia stores**, check for unused computed properties
What we use for testing and writing tests:
- For testing nodes and other backend components, we use Jest for unit tests. Examples can be found in `packages/nodes-base/nodes/**/*test*`.
- We use `nock` for server mocking
- For frontend we use `vitest`
- For e2e tests we use `Playwright` and `pnpm dev:e2e`. The old Cypress tests
are being migrated to Playwright, so please use Playwright for new tests.
### Common Development Tasks
When implementing features:
1. Define API types in `packages/@n8n/api-types`
2. Implement backend logic in `packages/cli` module, follow
`@packages/cli/scripts/backend-module/backend-module.guide.md`
3. Add API endpoints via controllers
4. Update frontend in `packages/editor-ui` with i18n support
5. Write tests with proper mocks
6. Run `pnpm typecheck` to verify types
## Github Guidelines
- When creating a PR, use the conventions in
`.github/pull_request_template.md` and
`.github/pull_request_title_conventions.md`.
- Use `gh pr create --draft` to create draft PRs.
- Always reference the Linear ticket in the PR description,
use `https://linear.app/n8n/issue/[TICKET-ID]`
- always link to the github issue if mentioned in the linear ticket.

View File

@@ -0,0 +1,76 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
- Using welcoming and inclusive language
- Being respectful of differing viewpoints and experiences
- Gracefully accepting constructive criticism
- Focusing on what is best for the community
- Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
- The use of sexualized language or imagery and unwelcome sexual attention or
advances
- Trolling, insulting/derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or electronic
address, without explicit permission
- Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at jan@n8n.io. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq

423
n8n-n8n-1.109.2/CONTRIBUTING.md Executable file
View File

@@ -0,0 +1,423 @@
# Contributing to n8n
Great that you are here and you want to contribute to n8n
## Contents
- [Contributing to n8n](#contributing-to-n8n)
- [Contents](#contents)
- [Code of conduct](#code-of-conduct)
- [Directory structure](#directory-structure)
- [Development setup](#development-setup)
- [Dev Container](#dev-container)
- [Requirements](#requirements)
- [Node.js](#nodejs)
- [pnpm](#pnpm)
- [pnpm workspaces](#pnpm-workspaces)
- [corepack](#corepack)
- [Build tools](#build-tools)
- [Actual n8n setup](#actual-n8n-setup)
- [Start](#start)
- [Development cycle](#development-cycle)
- [Community PR Guidelines](#community-pr-guidelines)
- [**1. Change Request/Comment**](#1-change-requestcomment)
- [**2. General Requirements**](#2-general-requirements)
- [**3. PR Specific Requirements**](#3-pr-specific-requirements)
- [**4. Workflow Summary for Non-Compliant PRs**](#4-workflow-summary-for-non-compliant-prs)
- [Test suite](#test-suite)
- [Unit tests](#unit-tests)
- [Code Coverage](#code-coverage)
- [E2E tests](#e2e-tests)
- [Releasing](#releasing)
- [Create custom nodes](#create-custom-nodes)
- [Extend documentation](#extend-documentation)
- [Contribute workflow templates](#contribute-workflow-templates)
- [Contributor License Agreement](#contributor-license-agreement)
## Code of conduct
This project and everyone participating in it are governed by the Code of
Conduct which can be found in the file [CODE_OF_CONDUCT.md](CODE_OF_CONDUCT.md).
By participating, you are expected to uphold this code. Please report
unacceptable behavior to jan@n8n.io.
## Directory structure
n8n is split up in different modules which are all in a single mono repository.
The most important directories:
- [/docker/images](/docker/images) - Dockerfiles to create n8n containers
- [/packages](/packages) - The different n8n modules
- [/packages/cli](/packages/cli) - CLI code to run front- & backend
- [/packages/core](/packages/core) - Core code which handles workflow
execution, active webhooks and
workflows. **Contact n8n before
starting on any changes here**
- [/packages/frontend/@n8n/design-system](/packages/design-system) - Vue frontend components
- [/packages/frontend/editor-ui](/packages/editor-ui) - Vue frontend workflow editor
- [/packages/node-dev](/packages/node-dev) - CLI to create new n8n-nodes
- [/packages/nodes-base](/packages/nodes-base) - Base n8n nodes
- [/packages/workflow](/packages/workflow) - Workflow code with interfaces which
get used by front- & backend
## Development setup
If you want to change or extend n8n you have to make sure that all the needed
dependencies are installed and the packages get linked correctly. Here's a short guide on how that can be done:
### Dev Container
If you already have VS Code and Docker installed, you can click [here](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/n8n-io/n8n) to get started. Clicking these links will cause VS Code to automatically install the Dev Containers extension if needed, clone the source code into a container volume, and spin up a dev container for use.
### Requirements
#### Node.js
[Node.js](https://nodejs.org/en/) version 22.16 or newer is required for development purposes.
#### pnpm
[pnpm](https://pnpm.io/) version 10.2 or newer is required for development purposes. We recommend installing it with [corepack](#corepack).
##### pnpm workspaces
n8n is split up into different modules which are all in a single mono repository.
To facilitate the module management, [pnpm workspaces](https://pnpm.io/workspaces) are used.
This automatically sets up file-links between modules which depend on each other.
#### corepack
We recommend enabling [Node.js corepack](https://nodejs.org/docs/latest-v16.x/api/corepack.html) with `corepack enable`.
You can install the correct version of pnpm using `corepack prepare --activate`.
**IMPORTANT**: If you have installed Node.js via homebrew, you'll need to run `brew install corepack`, since homebrew explicitly removes `npm` and `corepack` from [the `node` formula](https://github.com/Homebrew/homebrew-core/blob/master/Formula/node.rb#L66).
**IMPORTANT**: If you are on windows, you'd need to run `corepack enable` and `corepack prepare --activate` in a terminal as an administrator.
#### Build tools
The packages which n8n uses depend on a few build tools:
Debian/Ubuntu:
```
apt-get install -y build-essential python
```
CentOS:
```
yum install gcc gcc-c++ make
```
Windows:
```
npm add -g windows-build-tools
```
MacOS:
No additional packages required.
#### actionlint (for GitHub Actions workflow development)
If you plan to modify GitHub Actions workflow files (`.github/workflows/*.yml`), you'll need [actionlint](https://github.com/rhysd/actionlint) for workflow validation:
**macOS (Homebrew):**
```
brew install actionlint
```
> **Note:** actionlint is only required if you're modifying workflow files. It runs automatically via git hooks when workflow files are changed.
### Actual n8n setup
> **IMPORTANT**: All the steps below have to get executed at least once to get the development setup up and running!
Now that everything n8n requires to run is installed, the actual n8n code can be
checked out and set up:
1. [Fork](https://guides.github.com/activities/forking/#fork) the n8n repository.
2. Clone your forked repository:
```
git clone https://github.com/<your_github_username>/n8n.git
```
3. Go into repository folder:
```
cd n8n
```
4. Add the original n8n repository as `upstream` to your forked repository:
```
git remote add upstream https://github.com/n8n-io/n8n.git
```
5. Install all dependencies of all modules and link them together:
```
pnpm install
```
6. Build all the code:
```
pnpm build
```
### Start
To start n8n execute:
```
pnpm start
```
To start n8n with tunnel:
```
./packages/cli/bin/n8n start --tunnel
```
## Development cycle
While iterating on n8n modules code, you can run `pnpm dev`. It will then
automatically build your code, restart the backend and refresh the frontend
(editor-ui) on every change you make.
### Basic Development Workflow
1. Start n8n in development mode:
```
pnpm dev
```
2. Hack, hack, hack
3. Check if everything still runs in production mode:
```
pnpm build
pnpm start
```
4. Create tests
5. Run all [tests](#test-suite):
```
pnpm test
```
6. Commit code and [create a pull request](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork)
### Hot Reload for Nodes (N8N_DEV_RELOAD)
When developing custom nodes or credentials, you can enable hot reload to automatically detect changes without restarting the server:
```bash
N8N_DEV_RELOAD=true pnpm dev
```
**Performance considerations:**
- File watching adds overhead to your system, especially on slower machines
- The watcher monitors potentially thousands of files, which can impact CPU and memory usage
- On resource-constrained systems, consider developing without hot reload and manually restarting when needed
### Selective Package Development
Running all packages in development mode can be resource-intensive. For better performance, run only the packages relevant to your work:
#### Available Filtered Commands
- **Backend-only development:**
```bash
pnpm dev:be
```
Excludes frontend packages like editor-ui and design-system
- **Frontend-only development:**
```bash
pnpm dev:fe
```
Runs the backend server and editor-ui development server
- **AI/LangChain nodes development:**
```bash
pnpm dev:ai
```
Runs only essential packages for AI node development
#### Custom Selective Development
For even more focused development, you can run packages individually:
**Example 1: Working on custom nodes**
```bash
# Terminal 1: Build and watch nodes package
cd packages/nodes-base
pnpm dev
# Terminal 2: Run the CLI with hot reload
cd packages/cli
N8N_DEV_RELOAD=true pnpm dev
```
**Example 2: Pure frontend development**
```bash
# Terminal 1: Start the backend server (no watching)
pnpm start
# Terminal 2: Run frontend dev server
cd packages/editor-ui
pnpm dev
```
**Example 3: Working on a specific node package**
```bash
# Terminal 1: Watch your node package
cd packages/nodes-base # or your custom node package
pnpm watch
# Terminal 2: Run CLI with hot reload
cd packages/cli
N8N_DEV_RELOAD=true pnpm dev
```
### Performance Considerations
The full development mode (`pnpm dev`) runs multiple processes in parallel:
1. **TypeScript compilation** for each package
2. **File watchers** monitoring source files
3. **Nodemon** restarting the backend on changes
4. **Vite dev server** for the frontend with HMR
5. **Multiple build processes** for various packages
**Performance impact:**
- Can consume significant CPU and memory resources
- File system watching creates overhead, especially on:
- Networked file systems
- Virtual machines with shared folders
- Systems with slower I/O performance
- The more packages you run in dev mode, the more system resources are consumed
**Recommendations for resource-constrained environments:**
1. Use selective development commands based on your task
2. Close unnecessary applications to free up resources
3. Monitor system performance and adjust your development approach accordingly
---
### Community PR Guidelines
#### **1. Change Request/Comment**
Please address the requested changes or provide feedback within 14 days. If there is no response or updates to the pull request during this time, it will be automatically closed. The PR can be reopened once the requested changes are applied.
#### **2. General Requirements**
- **Follow the Style Guide:**
- Ensure your code adheres to n8n's coding standards and conventions (e.g., formatting, naming, indentation). Use linting tools where applicable.
- **TypeScript Compliance:**
- Do not use `ts-ignore` .
- Ensure code adheres to TypeScript rules.
- **Avoid Repetitive Code:**
- Reuse existing components, parameters, and logic wherever possible instead of redefining or duplicating them.
- For nodes: Use the same parameter across multiple operations rather than defining a new parameter for each operation (if applicable).
- **Testing Requirements:**
- PRs **must include tests**:
- Unit tests
- Workflow tests for nodes (example [here](https://github.com/n8n-io/n8n/tree/master/packages/nodes-base/nodes/Switch/V3/test))
- UI tests (if applicable)
- **Typos:**
- Use a spell-checking tool, such as [**Code Spell Checker**](https://marketplace.visualstudio.com/items?itemName=streetsidesoftware.code-spell-checker), to avoid typos.
#### **3. PR Specific Requirements**
- **Small PRs Only:**
- Focus on a single feature or fix per PR.
- **Naming Convention:**
- Follow [n8n's PR Title Conventions](https://github.com/n8n-io/n8n/blob/master/.github/pull_request_title_conventions.md#L36).
- **New Nodes:**
- PRs that introduce new nodes will be **auto-closed** unless they are explicitly requested by the n8n team and aligned with an agreed project scope. However, you can still explore [building your own nodes](https://docs.n8n.io/integrations/creating-nodes/) , as n8n offers the flexibility to create your own custom nodes.
- **Typo-Only PRs:**
- Typos are not sufficient justification for a PR and will be rejected.
#### **4. Workflow Summary for Non-Compliant PRs**
- **No Tests:** If tests are not provided, the PR will be auto-closed after **14 days**.
- **Non-Small PRs:** Large or multifaceted PRs will be returned for segmentation.
- **New Nodes/Typo PRs:** Automatically rejected if not aligned with project scope or guidelines.
---
### Test suite
#### Unit tests
Unit tests can be started via:
```
pnpm test
```
If that gets executed in one of the package folders it will only run the tests
of this package. If it gets executed in the n8n-root folder it will run all
tests of all packages.
If you made a change which requires an update on a `.test.ts.snap` file, pass `-u` to the command to run tests or press `u` in watch mode.
#### Code Coverage
We track coverage for all our code on [Codecov](https://app.codecov.io/gh/n8n-io/n8n).
But when you are working on tests locally, we recommend running your tests with env variable `COVERAGE_ENABLED` set to `true`. You can then view the code coverage in the `coverage` folder, or you can use [this VSCode extension](https://marketplace.visualstudio.com/items?itemName=ryanluker.vscode-coverage-gutters) to visualize the coverage directly in VSCode.
#### E2E tests
⚠️ You have to run `pnpm cypress:install` to install cypress before running the tests for the first time and to update cypress.
E2E tests can be started via one of the following commands:
- `pnpm test:e2e:ui`: Start n8n and run e2e tests interactively using built UI code. Does not react to code changes (i.e. runs `pnpm start` and `cypress open`)
- `pnpm test:e2e:dev`: Start n8n in development mode and run e2e tests interactively. Reacts to code changes (i.e. runs `pnpm dev` and `cypress open`)
- `pnpm test:e2e:all`: Start n8n and run e2e tests headless (i.e. runs `pnpm start` and `cypress run --headless`)
⚠️ Remember to stop your dev server before. Otherwise port binding will fail.
## Releasing
To start a release, trigger [this workflow](https://github.com/n8n-io/n8n/actions/workflows/release-create-pr.yml) with the SemVer release type, and select a branch to cut this release from. This workflow will then:
1. Bump versions of packages that have changed or have dependencies that have changed
2. Update the Changelog
3. Create a new branch called `release/${VERSION}`, and
4. Create a new pull-request to track any further changes that need to be included in this release
Once ready to release, simply merge the pull-request.
This triggers [another workflow](https://github.com/n8n-io/n8n/actions/workflows/release-publish.yml), that will:
1. Build and publish the packages that have a new version in this release
2. Create a new tag, and GitHub release from squashed release commit
3. Merge the squashed release commit back into `master`
## Create custom nodes
Learn about [building nodes](https://docs.n8n.io/integrations/creating-nodes/) to create custom nodes for n8n. You can create community nodes and make them available using [npm](https://www.npmjs.com/).
## Extend documentation
The repository for the n8n documentation on [docs.n8n.io](https://docs.n8n.io) can be found [here](https://github.com/n8n-io/n8n-docs).
## Contribute workflow templates
You can submit your workflows to n8n's template library.
n8n is working on a creator program, and developing a marketplace of templates. This is an ongoing project, and details are likely to change.
Refer to [n8n Creator hub](https://www.notion.so/n8n/n8n-Creator-hub-7bd2cbe0fce0449198ecb23ff4a2f76f) for information on how to submit templates and become a creator.
## Contributor License Agreement
That we do not have any potential problems later it is sadly necessary to sign a [Contributor License Agreement](CONTRIBUTOR_LICENSE_AGREEMENT.md). That can be done literally with the push of a button.
We used the most simple one that exists. It is from [Indie Open Source](https://indieopensource.com/forms/cla) which uses plain English and is literally only a few lines long.
Once a pull request is opened, an automated bot will promptly leave a comment requesting the agreement to be signed. The pull request can only be merged once the signature is obtained.

View File

@@ -0,0 +1,5 @@
# n8n Contributor License Agreement
I give n8n permission to license my contributions on any terms they like. I am giving them this license in order to make it possible for them to accept my contributions into their project.
**_As far as the law allows, my contributions come as is, without any warranty or condition, and I will not be liable to anyone for any damages related to this software or this license, under any kind of legal claim._**

88
n8n-n8n-1.109.2/LICENSE.md Executable file
View File

@@ -0,0 +1,88 @@
# License
Portions of this software are licensed as follows:
- Content of branches other than the main branch (i.e. "master") are not licensed.
- Source code files that contain ".ee." in their filename or ".ee" in their dirname are NOT licensed under
the Sustainable Use License.
To use source code files that contain ".ee." in their filename or ".ee" in their dirname you must hold a
valid n8n Enterprise License specifically allowing you access to such source code files and as defined
in "LICENSE_EE.md".
- All third party components incorporated into the n8n Software are licensed under the original license
provided by the owner of the applicable component.
- Content outside of the above mentioned files or restrictions is available under the "Sustainable Use
License" as defined below.
## Sustainable Use License
Version 1.0
### Acceptance
By using the software, you agree to all of the terms and conditions below.
### Copyright License
The licensor grants you a non-exclusive, royalty-free, worldwide, non-sublicensable, non-transferable license
to use, copy, distribute, make available, and prepare derivative works of the software, in each case subject
to the limitations below.
### Limitations
You may use or modify the software only for your own internal business purposes or for non-commercial or
personal use. You may distribute the software or provide it to others only if you do so free of charge for
non-commercial purposes. You may not alter, remove, or obscure any licensing, copyright, or other notices of
the licensor in the software. Any use of the licensors trademarks is subject to applicable law.
### Patents
The licensor grants you a license, under any patent claims the licensor can license, or becomes able to
license, to make, have made, use, sell, offer for sale, import and have imported the software, in each case
subject to the limitations and conditions in this license. This license does not cover any patent claims that
you cause to be infringed by modifications or additions to the software. If you or your company make any
written claim that the software infringes or contributes to infringement of any patent, your patent license
for the software granted under these terms ends immediately. If your company makes such a claim, your patent
license ends immediately for work on behalf of your company.
### Notices
You must ensure that anyone who gets a copy of any part of the software from you also gets a copy of these
terms. If you modify the software, you must include in any modified copies of the software a prominent notice
stating that you have modified the software.
### No Other Rights
These terms do not imply any licenses other than those expressly granted in these terms.
### Termination
If you use the software in violation of these terms, such use is not licensed, and your license will
automatically terminate. If the licensor provides you with a notice of your violation, and you cease all
violation of this license no later than 30 days after you receive that notice, your license will be reinstated
retroactively. However, if you violate these terms after such reinstatement, any additional violation of these
terms will cause your license to terminate automatically and permanently.
### No Liability
As far as the law allows, the software comes as is, without any warranty or condition, and the licensor will
not be liable to you for any damages arising out of these terms or the use or nature of the software, under
any kind of legal claim.
### Definitions
The “licensor” is the entity offering these terms.
The “software” is the software the licensor makes available under these terms, including any portion of it.
“You” refers to the individual or entity agreeing to these terms.
“Your company” is any legal entity, sole proprietorship, or other kind of organization that you work for, plus
all organizations that have control over, are under the control of, or are under common control with that
organization. Control means ownership of substantially all the assets of an entity, or the power to direct its
management and policies by vote, contract, or otherwise. Control can be direct or indirect.
“Your license” is the license granted to you for the software under these terms.
“Use” means anything you do with the software requiring your license.
“Trademark” means trademarks, service marks, and similar rights.

27
n8n-n8n-1.109.2/LICENSE_EE.md Executable file
View File

@@ -0,0 +1,27 @@
# The n8n Enterprise License (the “Enterprise License”)
Copyright (c) 2022-present n8n GmbH.
With regard to the n8n Software:
This software and associated documentation files (the "Software") may only be used in production, if
you (and any entity that you represent) hold a valid n8n Enterprise license corresponding to your
usage. Subject to the foregoing sentence, you are free to modify this Software and publish patches
to the Software. You agree that n8n and/or its licensors (as applicable) retain all right, title and
interest in and to all such modifications and/or patches, and all such modifications and/or patches
may only be used, copied, modified, displayed, distributed, or otherwise exploited with a valid n8n
Enterprise license for the corresponding usage. Notwithstanding the foregoing, you may copy and
modify the Software for development and testing purposes, without requiring a subscription. You
agree that n8n and/or its licensors (as applicable) retain all right, title and interest in and to
all such modifications. You are not granted any other rights beyond what is expressly stated herein.
Subject to the foregoing, it is forbidden to copy, merge, publish, distribute, sublicense, and/or
sell the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
For all third party components incorporated into the n8n Software, those components are licensed
under the original license provided by the owner of the applicable component.

72
n8n-n8n-1.109.2/README.md Executable file
View File

@@ -0,0 +1,72 @@
![Banner image](https://user-images.githubusercontent.com/10284570/173569848-c624317f-42b1-45a6-ab09-f0ea3c247648.png)
# n8n - Secure Workflow Automation for Technical Teams
n8n is a workflow automation platform that gives technical teams the flexibility of code with the speed of no-code. With 400+ integrations, native AI capabilities, and a fair-code license, n8n lets you build powerful automations while maintaining full control over your data and deployments.
![n8n.io - Screenshot](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot-readme.png)
## Key Capabilities
- **Code When You Need It**: Write JavaScript/Python, add npm packages, or use the visual interface
- **AI-Native Platform**: Build AI agent workflows based on LangChain with your own data and models
- **Full Control**: Self-host with our fair-code license or use our [cloud offering](https://app.n8n.cloud/login)
- **Enterprise-Ready**: Advanced permissions, SSO, and air-gapped deployments
- **Active Community**: 400+ integrations and 900+ ready-to-use [templates](https://n8n.io/workflows)
## Quick Start
Try n8n instantly with [npx](https://docs.n8n.io/hosting/installation/npm/) (requires [Node.js](https://nodejs.org/en/)):
```
npx n8n
```
Or deploy with [Docker](https://docs.n8n.io/hosting/installation/docker/):
```
docker volume create n8n_data
docker run -it --rm --name n8n -p 5678:5678 -v n8n_data:/home/node/.n8n docker.n8n.io/n8nio/n8n
```
Access the editor at http://localhost:5678
## Resources
- 📚 [Documentation](https://docs.n8n.io)
- 🔧 [400+ Integrations](https://n8n.io/integrations)
- 💡 [Example Workflows](https://n8n.io/workflows)
- 🤖 [AI & LangChain Guide](https://docs.n8n.io/langchain/)
- 👥 [Community Forum](https://community.n8n.io)
- 📖 [Community Tutorials](https://community.n8n.io/c/tutorials/28)
## Support
Need help? Our community forum is the place to get support and connect with other users:
[community.n8n.io](https://community.n8n.io)
## License
n8n is [fair-code](https://faircode.io) distributed under the [Sustainable Use License](https://github.com/n8n-io/n8n/blob/master/LICENSE.md) and [n8n Enterprise License](https://github.com/n8n-io/n8n/blob/master/LICENSE_EE.md).
- **Source Available**: Always visible source code
- **Self-Hostable**: Deploy anywhere
- **Extensible**: Add your own nodes and functionality
[Enterprise licenses](mailto:license@n8n.io) available for additional features and support.
Additional information about the license model can be found in the [docs](https://docs.n8n.io/reference/license/).
## Contributing
Found a bug 🐛 or have a feature idea ✨? Check our [Contributing Guide](https://github.com/n8n-io/n8n/blob/master/CONTRIBUTING.md) to get started.
## Join the Team
Want to shape the future of automation? Check out our [job posts](https://n8n.io/careers) and join our team!
## What does n8n mean?
**Short answer:** It means "nodemation" and is pronounced as n-eight-n.
**Long answer:** "I get that question quite often (more often than I expected) so I decided it is probably best to answer it here. While looking for a good name for the project with a free domain I realized very quickly that all the good ones I could think of were already taken. So, in the end, I chose nodemation. 'node-' in the sense that it uses a Node-View and that it uses Node.js and '-mation' for 'automation' which is what the project is supposed to help with. However, I did not like how long the name was and I could not imagine writing something that long every time in the CLI. That is when I then ended up on 'n8n'." - **Jan Oberhauser, Founder and CEO, n8n.io**

4
n8n-n8n-1.109.2/SECURITY.md Executable file
View File

@@ -0,0 +1,4 @@
## Reporting a Vulnerability
Please report (suspected) security vulnerabilities to **[security@n8n.io](mailto:security@n8n.io)**. You will receive a response from
us within 48 hours. If the issue is confirmed, we will release a patch as soon as possible depending on complexity but historically within a few days.

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 98 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 224 KiB

54
n8n-n8n-1.109.2/biome.jsonc Executable file
View File

@@ -0,0 +1,54 @@
{
"$schema": "./node_modules/@biomejs/biome/configuration_schema.json",
"vcs": {
"clientKind": "git",
"enabled": true,
"useIgnoreFile": true
},
"files": {
"ignore": [
"**/.turbo",
"**/components.d.ts",
"**/coverage",
"**/dist",
"**/package.json",
"**/pnpm-lock.yaml",
"**/CHANGELOG.md",
"**/cl100k_base.json",
"**/o200k_base.json"
]
},
"formatter": {
"enabled": true,
"formatWithErrors": false,
"indentStyle": "tab",
"indentWidth": 2,
"lineEnding": "lf",
"lineWidth": 100,
"attributePosition": "auto",
"ignore": [
// Handled by prettier
"**/*.vue"
]
},
"organizeImports": { "enabled": false },
"linter": {
"enabled": false
},
"javascript": {
"parser": {
"unsafeParameterDecoratorsEnabled": true
},
"formatter": {
"jsxQuoteStyle": "double",
"quoteProperties": "asNeeded",
"trailingCommas": "all",
"semicolons": "always",
"arrowParentheses": "always",
"bracketSpacing": true,
"bracketSameLine": false,
"quoteStyle": "single",
"attributePosition": "auto"
}
}
}

69
n8n-n8n-1.109.2/codecov.yml Executable file
View File

@@ -0,0 +1,69 @@
codecov:
max_report_age: off
require_ci_to_pass: true
coverage:
status:
patch: false
project:
default:
threshold: 0.5
github_checks:
annotations: false
flags:
tests:
paths:
- '**'
carryforward: true
component_management:
default_rules:
statuses:
- type: project
target: auto
branches:
- '!master'
individual_components:
- component_id: backend_packages
name: Backend
paths:
- packages/@n8n/ai-workflow-builder.ee/**
- packages/@n8n/api-types/**
- packages/@n8n/config/**
- packages/@n8n/client-oauth2/**
- packages/@n8n/decorators/**
- packages/@n8n/constants/**
- packages/@n8n/backend-common/**
- packages/@n8n/backend-test-utils/**
- packages/@n8n/errors/**
- packages/@n8n/db/**
- packages/@n8n/di/**
- packages/@n8n/imap/**
- packages/@n8n/permissions/**
- packages/@n8n/task-runner/**
- packages/workflow/**
- packages/core/**
- packages/cli/**
- component_id: frontend_packages
name: Frontend
paths:
- packages/@n8n/codemirror-lang/**
- packages/frontend/**
- component_id: nodes_packages
name: Nodes
paths:
- packages/node-dev/**
- packages/nodes-base/**
- packages/@n8n/json-schema-to-zod/**
- packages/@n8n/nodes-langchain/**
statuses:
- type: project
target: auto
threshold: 0% # Enforce: Coverage must not decrease
ignore:
- (?s:.*/[^\/]*\.spec\.ts.*)\Z
- (?s:.*/[^\/]*\.test\.ts.*)\Z
- (?s:.*/[^\/]*e2e[^\/]*\.ts.*)\Z

3
n8n-n8n-1.109.2/cypress/.gitignore vendored Executable file
View File

@@ -0,0 +1,3 @@
videos/
screenshots/
downloads/

View File

@@ -0,0 +1,32 @@
## Debugging Flaky End-to-End Tests - Usage
To debug flaky end-to-end (E2E) tests, use the following command:
```bash
pnpm run debug:flaky:e2e -- <grep_filter> <burn_count>
```
**Parameters:**
* `<grep_filter>`: (Optional) A string to filter tests by their `it()` or `describe()` block titles, or by tags if using the `@cypress/grep` plugin. If omitted, all tests will be run.
* `<burn_count>`: (Optional) The number of times to run the filtered tests. Defaults to 5 if not provided.
**Examples:**
1. **Run all tests tagged with `CAT-726` ten times:**
```bash
pnpm run debug:flaky:e2e CAT-726 10
```
2. **Run all tests containing "login" five times (default burn count):**
```bash
pnpm run debug:flaky:e2e login
```
3. **Run all tests five times (default grep and burn count):**
```bash
pnpm run debug:flaky:e2e
```

4
n8n-n8n-1.109.2/cypress/augmentation.d.ts vendored Executable file
View File

@@ -0,0 +1,4 @@
declare module 'cypress-otp' {
// eslint-disable-next-line import-x/no-default-export
export default function generateOTPToken(secret: string): string;
}

View File

@@ -0,0 +1,7 @@
{
"$schema": "../node_modules/@biomejs/biome/configuration_schema.json",
"extends": ["../biome.jsonc"],
"formatter": {
"ignore": ["fixtures/**"]
}
}

View File

@@ -0,0 +1,18 @@
//#region Getters
export const getBecomeTemplateCreatorCta = () => cy.getByTestId('become-template-creator-cta');
export const getCloseBecomeTemplateCreatorCtaButton = () =>
cy.getByTestId('close-become-template-creator-cta');
//#endregion
//#region Actions
export const interceptCtaRequestWithResponse = (becomeCreator: boolean) => {
return cy.intercept('GET', '/rest/cta/become-creator', {
body: becomeCreator,
});
};
//#endregion

View File

@@ -0,0 +1,19 @@
export const universalAddButton = () => cy.getByTestId('universal-add');
export const createResource = (
resourceType: 'project' | 'workflow' | 'credential',
projectName: string,
) => {
universalAddButton().click();
cy.getByTestId('navigation-submenu')
.contains(new RegExp(resourceType, 'i'))
.should('be.visible')
.click();
if (resourceType !== 'project') {
cy.getByTestId('navigation-submenu-item')
.contains(new RegExp(projectName))
.should('be.visible')
.click();
}
};

View File

@@ -0,0 +1,114 @@
export function getCredentialsPageUrl() {
return '/home/credentials';
}
export const verifyCredentialsListPageIsLoaded = () => {
cy.get('[data-test-id="resources-list-wrapper"], [data-test-id="empty-resources-list"]').should(
'be.visible',
);
};
export const loadCredentialsPage = (credentialsPageUrl: string) => {
cy.visit(credentialsPageUrl);
verifyCredentialsListPageIsLoaded();
};
/**
* Getters - Page
*/
export function getEmptyListCreateCredentialButton() {
return cy.getByTestId('empty-resources-list').find('button');
}
export function getCredentialCards() {
return cy.getByTestId('resources-list-item');
}
/**
* Getters - Modal
*/
export function getNewCredentialModal() {
return cy.getByTestId('selectCredential-modal', { timeout: 5000 });
}
export function getEditCredentialModal() {
return cy.getByTestId('editCredential-modal', { timeout: 5000 });
}
export function getNewCredentialTypeSelect() {
return cy.getByTestId('new-credential-type-select');
}
export function getNewCredentialTypeOption(credentialType: string) {
return cy.getByTestId('new-credential-type-select-option').contains(credentialType);
}
export function getNewCredentialTypeButton() {
return cy.getByTestId('new-credential-type-button');
}
export function getCredentialConnectionParameterInputs() {
return cy.getByTestId('credential-connection-parameter');
}
export function getConnectionParameter(fieldName: string) {
return getCredentialConnectionParameterInputs().find(
`:contains('${fieldName}') .n8n-input input`,
);
}
export function getCredentialSaveButton() {
return cy.getByTestId('credential-save-button', { timeout: 5000 });
}
/**
* Actions - Modal
*/
export function setCredentialName(name: string) {
cy.getByTestId('credential-name').find('span[data-test-id=inline-edit-preview]').click();
cy.getByTestId('credential-name').type(name);
}
export function saveCredential() {
getCredentialSaveButton()
.click({ force: true })
.within(() => {
cy.get('button').should('not.exist');
});
getCredentialSaveButton().should('have.text', 'Saved');
}
export function saveCredentialWithWait() {
cy.intercept('POST', '/rest/credentials').as('saveCredential');
saveCredential();
cy.wait('@saveCredential');
getCredentialSaveButton().should('contain.text', 'Saved');
}
export function closeNewCredentialModal() {
getNewCredentialModal().find('.el-dialog__close').first().click();
}
export function createNewCredential(
type: string,
name: string,
parameter: string,
parameterValue: string,
closeModal = true,
) {
getEmptyListCreateCredentialButton().click();
getNewCredentialModal().should('be.visible');
getNewCredentialTypeSelect().should('be.visible');
getNewCredentialTypeOption(type).click();
getNewCredentialTypeButton().click();
getConnectionParameter(parameter).type(parameterValue);
setCredentialName(name);
saveCredential();
if (closeModal) {
getEditCredentialModal().find('.el-dialog__close').first().click();
}
}

View File

@@ -0,0 +1,46 @@
/**
* Getters
*/
export const getExecutionsSidebar = () => cy.getByTestId('executions-sidebar');
export const getWorkflowExecutionPreviewIframe = () => cy.getByTestId('workflow-preview-iframe');
export const getExecutionPreviewBody = () =>
getWorkflowExecutionPreviewIframe()
.its('0.contentDocument.body')
.should((body) => {
expect(body.querySelector('[data-test-id="canvas-wrapper"]')).to.exist;
})
.then((el) => cy.wrap(el));
export const getExecutionPreviewBodyNodes = () =>
getExecutionPreviewBody().findChildByTestId('canvas-node');
export const getExecutionPreviewBodyNodesByName = (name: string) =>
getExecutionPreviewBody().findChildByTestId('canvas-node').filter(`[data-name="${name}"]`).eq(0);
export function getExecutionPreviewOutputPanelRelatedExecutionLink() {
return getExecutionPreviewBody().findChildByTestId('related-execution-link');
}
export function getLogsOverviewStatus() {
return getExecutionPreviewBody().findChildByTestId('logs-overview-status');
}
export function getLogEntries() {
return getExecutionPreviewBody().findChildByTestId('logs-overview-body').find('[role=treeitem]');
}
export function getManualChatMessages() {
return getExecutionPreviewBody().find('.chat-messages-list .chat-message');
}
/**
* Actions
*/
export const openExecutionPreviewNode = (name: string) =>
getExecutionPreviewBodyNodesByName(name).dblclick();
export const toggleAutoRefresh = () => cy.getByTestId('auto-refresh-checkbox').click();

View File

@@ -0,0 +1,12 @@
export const overrideFeatureFlag = (name: string, value: boolean | string) => {
cy.window().then((win) => {
// If feature flags hasn't been initialized yet, we store the override
// in local storage and it gets loaded when the feature flags are
// initialized.
win.localStorage.setItem('N8N_EXPERIMENT_OVERRIDES', JSON.stringify({ [name]: value }));
if (win.featureFlags) {
win.featureFlags.override(name, value);
}
});
};

View File

@@ -0,0 +1,553 @@
import { errorToast, successToast } from '../pages/notifications';
/**
* Getters
*/
export function getPersonalProjectMenuItem() {
return cy.getByTestId('project-personal-menu-item');
}
export function getOverviewMenuItem() {
return cy.getByTestId('menu-item').contains('Overview');
}
export function getAddResourceDropdown() {
return cy.getByTestId('add-resource');
}
export function getFolderCards() {
return cy.getByTestId('folder-card');
}
export function getFolderCard(name: string) {
return cy.getByTestId('folder-card-name').contains(name).closest('[data-test-id="folder-card"]');
}
export function getWorkflowCards() {
return cy.getByTestId('resources-list-item-workflow');
}
export function getWorkflowCard(name: string) {
return cy
.getByTestId('workflow-card-name')
.contains(name)
.closest('[data-test-id="resources-list-item-workflow"]');
}
export function getWorkflowCardActions(name: string) {
return getWorkflowCard(name).find('[data-test-id="workflow-card-actions"]');
}
export function getWorkflowCardActionItem(workflowName: string, actionName: string) {
return getWorkflowCardActions(workflowName)
.find('span[aria-controls]')
.invoke('attr', 'aria-controls')
.then((popperId) => {
return cy.get(`#${popperId}`).find(`[data-test-id="action-${actionName}"]`);
});
}
export function getDuplicateWorkflowModal() {
return cy.getByTestId('duplicate-modal');
}
export function getWorkflowMenu() {
return cy.getByTestId('workflow-menu');
}
export function getAddFolderButton() {
return cy.getByTestId('add-folder-button');
}
export function getListBreadcrumbs() {
return cy.getByTestId('main-breadcrumbs');
}
export function getHomeProjectBreadcrumb() {
return getListBreadcrumbs().findChildByTestId('home-project');
}
export function getListBreadcrumbItem(name: string) {
return getListBreadcrumbs().findChildByTestId('breadcrumbs-item').contains(name);
}
export function getVisibleListBreadcrumbs() {
return getListBreadcrumbs().findChildByTestId('breadcrumbs-item');
}
export function getCurrentBreadcrumb() {
return getListBreadcrumbs().findChildByTestId('breadcrumbs-item-current').find('input');
}
export function getCurrentBreadcrumbText() {
return getCurrentBreadcrumb().invoke('val');
}
export function getMainBreadcrumbsEllipsis() {
return getListBreadcrumbs().findChildByTestId('hidden-items-menu');
}
export function getMainBreadcrumbsEllipsisMenuItems() {
return cy
.getByTestId('hidden-items-menu')
.find('span[aria-controls]')
.invoke('attr', 'aria-controls')
.then((popperId) => {
return cy.get(`#${popperId}`).find('li');
});
}
export function getFolderCardBreadCrumbs(folderName: string) {
return getFolderCard(folderName).find('[data-test-id="folder-card-breadcrumbs"]');
}
export function getFolderCardBreadCrumbsEllipsis(folderName: string) {
return getFolderCardBreadCrumbs(folderName).find('[data-test-id="ellipsis"]');
}
export function getFolderCardHomeProjectBreadcrumb(folderName: string) {
return getFolderCardBreadCrumbs(folderName).find('[data-test-id="folder-card-home-project"]');
}
export function getFolderCardCurrentBreadcrumb(folderName: string) {
return getFolderCardBreadCrumbs(folderName).find('[data-test-id="breadcrumbs-item-current"]');
}
export function getOpenHiddenItemsTooltip() {
return cy.getByTestId('hidden-items-tooltip').filter(':visible');
}
export function getListActionsToggle() {
return cy.getByTestId('folder-breadcrumbs-actions');
}
export function getCanvasBreadcrumbs() {
cy.getByTestId('canvas-breadcrumbs').should('exist');
return cy.getByTestId('canvas-breadcrumbs').findChildByTestId('folder-breadcrumbs');
}
export function getListActionItem(name: string) {
return cy
.getByTestId('folder-breadcrumbs-actions')
.find('span[aria-controls]')
.invoke('attr', 'aria-controls')
.then((popperId) => {
return cy.get(`#${popperId}`).find(`[data-test-id="action-${name}"]`);
});
}
export function getInlineEditInput() {
return cy.getByTestId('inline-edit-input');
}
export function getFolderCardActionToggle(folderName: string) {
return getFolderCard(folderName).find('[data-test-id="folder-card-actions"]');
}
export function getFolderCardActionItem(folderName: string, actionName: string) {
return getFolderCard(folderName)
.findChildByTestId('folder-card-actions')
.filter(':visible')
.find('span[aria-controls]')
.invoke('attr', 'aria-controls')
.then((popperId) => {
return cy.get(`#${popperId}`).find(`[data-test-id="action-${actionName}"]`);
});
}
export function getFolderDeleteModal() {
return cy.getByTestId('deleteFolder-modal');
}
export function getMoveFolderModal() {
return cy.getByTestId('moveFolder-modal');
}
export function getDeleteRadioButton() {
return cy.getByTestId('delete-content-radio');
}
export function getTransferContentRadioButton() {
return cy.getByTestId('transfer-content-radio');
}
export function getConfirmDeleteInput() {
return getFolderDeleteModal().findChildByTestId('delete-data-input').find('input');
}
export function getDeleteFolderModalConfirmButton() {
return getFolderDeleteModal().findChildByTestId('confirm-delete-folder-button');
}
export function getProjectEmptyState() {
return cy.getByTestId('list-empty-state');
}
export function getFolderEmptyState() {
return cy.getByTestId('empty-folder-container');
}
export function getProjectMenuItem(name: string) {
if (name.toLowerCase() === 'personal') {
return getPersonalProjectMenuItem();
}
return cy.getByTestId('project-menu-item').contains(name);
}
export function getMoveToFolderDropdown() {
return cy.getByTestId('move-to-folder-dropdown');
}
export function getMoveToFolderOption(name: string) {
return cy.getByTestId('move-to-folder-option').contains(name);
}
export function getMoveToFolderInput() {
return getMoveToFolderDropdown().find('input');
}
export function getProjectSharingInput() {
return cy.getByTestId('project-sharing-select');
}
export function getProjectSharingOption(name: string) {
return cy.getByTestId('project-sharing-info').contains(name);
}
export function getEmptyFolderDropdownMessage(text: string) {
return cy.get('.el-select-dropdown__empty').contains(text);
}
export function getMoveFolderConfirmButton() {
return cy.getByTestId('confirm-move-folder-button');
}
export function getMoveWorkflowModal() {
return cy.getByTestId('moveFolder-modal');
}
export function getWorkflowCardBreadcrumbs(workflowName: string) {
return getWorkflowCard(workflowName).find('[data-test-id="workflow-card-breadcrumbs"]');
}
export function getWorkflowCardBreadcrumbsEllipsis(workflowName: string) {
return getWorkflowCardBreadcrumbs(workflowName).find('[data-test-id="ellipsis"]');
}
export function getNewFolderNameInput() {
return cy.get('.add-folder-modal').filter(':visible').find('input.el-input__inner');
}
export function getNewFolderModalErrorMessage() {
return cy.get('.el-message-box__errormsg').filter(':visible');
}
export function getProjectTab(tabId: string) {
return cy.getByTestId('project-tabs').find(`#${tabId}`);
}
/**
* Actions
*/
export function goToPersonalProject() {
getPersonalProjectMenuItem().click();
}
export function createFolderInsideFolder(childName: string, parentName: string) {
getFolderCard(parentName).click();
createFolderFromListHeaderButton(childName);
}
export function createFolderFromListHeaderButton(folderName: string) {
getAddFolderButton().click();
createNewFolder(folderName);
}
export function createWorkflowFromEmptyState(workflowName?: string) {
getFolderEmptyState().find('button').contains('Create Workflow').click();
if (workflowName) {
cy.getByTestId('workflow-name-input').type(`{selectAll}{backspace}${workflowName}`, {
delay: 50,
});
}
cy.getByTestId('workflow-save-button').click();
successToast().should('exist');
}
export function createWorkflowFromProjectHeader(folderName?: string, workflowName?: string) {
cy.getByTestId('add-resource-workflow').click();
if (workflowName) {
cy.getByTestId('workflow-name-input').type(`{selectAll}{backspace}${workflowName}`, {
delay: 50,
});
}
cy.getByTestId('workflow-save-button').click();
if (folderName) {
successToast().should(
'contain.text',
`Workflow successfully created in "Personal", within "${folderName}"`,
);
}
}
export function createWorkflowFromListDropdown(workflowName?: string) {
getListActionsToggle().click();
getListActionItem('create_workflow').click();
if (workflowName) {
cy.getByTestId('workflow-name-input').type(`{selectAll}{backspace}${workflowName}`, {
delay: 50,
});
}
cy.getByTestId('workflow-save-button').click();
successToast().should('exist');
}
export function createFolderFromProjectHeader(folderName: string) {
getAddResourceDropdown().click();
cy.getByTestId('action-folder').click();
createNewFolder(folderName);
}
export function createFolderFromListDropdown(folderName: string) {
getListActionsToggle().click();
getListActionItem('create').click();
createNewFolder(folderName);
}
export function createFolderFromCardActions(parentName: string, folderName: string) {
getFolderCardActionToggle(parentName).click();
getFolderCardActionItem(parentName, 'create').click();
createNewFolder(folderName);
}
export function renameFolderFromListActions(folderName: string, newName: string) {
getFolderCard(folderName).click();
getListActionsToggle().click();
getListActionItem('rename').click();
getInlineEditInput().should('be.visible');
getInlineEditInput().type(`${newName}{enter}`, { delay: 50 });
successToast().should('exist');
}
export function renameFolderFromCardActions(folderName: string, newName: string) {
getFolderCardActionToggle(folderName).click();
getFolderCardActionItem(folderName, 'rename').click();
renameFolder(newName);
}
export function duplicateWorkflowFromCardActions(workflowName: string, duplicateName: string) {
getWorkflowCardActions(workflowName).click();
getWorkflowCardActionItem(workflowName, 'duplicate').click();
getDuplicateWorkflowModal().find('input').first().type('{selectall}');
getDuplicateWorkflowModal().find('input').first().type(duplicateName);
getDuplicateWorkflowModal().find('button').contains('Duplicate').click();
errorToast().should('not.exist');
}
export function duplicateWorkflowFromWorkflowPage(duplicateName: string) {
getWorkflowMenu().click();
cy.getByTestId('workflow-menu-item-duplicate').click();
getDuplicateWorkflowModal().find('input').first().type('{selectall}');
getDuplicateWorkflowModal().find('input').first().type(duplicateName);
getDuplicateWorkflowModal().find('button').contains('Duplicate').click();
errorToast().should('not.exist');
}
export function deleteEmptyFolderFromCardDropdown(folderName: string) {
cy.intercept('DELETE', '/rest/projects/**').as('deleteFolder');
getFolderCard(folderName).click();
getListActionsToggle().click();
getListActionItem('delete').click();
cy.wait('@deleteFolder');
successToast().should('contain.text', 'Folder deleted');
}
export function deleteEmptyFolderFromListDropdown(folderName: string) {
cy.intercept('DELETE', '/rest/projects/**').as('deleteFolder');
getFolderCard(folderName).click();
getListActionsToggle().click();
getListActionItem('delete').click();
cy.wait('@deleteFolder');
successToast().should('contain.text', 'Folder deleted');
}
export function deleteFolderWithContentsFromListDropdown(folderName: string) {
getListActionsToggle().click();
getListActionItem('delete').click();
confirmFolderDelete(folderName);
}
export function deleteFolderWithContentsFromCardDropdown(folderName: string) {
getFolderCardActionToggle(folderName).click();
getFolderCardActionItem(folderName, 'delete').click();
confirmFolderDelete(folderName);
}
export function deleteAndTransferFolderContentsFromCardDropdown(
folderName: string,
destinationName: string,
) {
getFolderCardActionToggle(folderName).click();
getFolderCardActionItem(folderName, 'delete').click();
deleteFolderAndMoveContents(folderName, destinationName);
}
export function deleteAndTransferFolderContentsFromListDropdown(destinationName: string) {
getListActionsToggle().click();
getListActionItem('delete').click();
getCurrentBreadcrumbText().then((currentFolderName) => {
deleteFolderAndMoveContents(String(currentFolderName), destinationName);
});
}
export function createNewProject(projectName: string, options: { openAfterCreate?: boolean } = {}) {
cy.getByTestId('universal-add').should('exist').click();
cy.getByTestId('navigation-menu-item').contains('Project').click();
cy.getByTestId('project-settings-name-input').type(projectName, { delay: 50 });
cy.getByTestId('project-settings-save-button').click();
successToast().should('exist');
if (options.openAfterCreate) {
getProjectMenuItem(projectName).click();
}
}
export function moveFolderFromFolderCardActions(folderName: string, destinationName: string) {
getFolderCardActionToggle(folderName).click();
getFolderCardActionItem(folderName, 'move').click();
moveFolder(folderName, destinationName);
}
export function moveFolderFromListActions(folderName: string, destinationName: string) {
getFolderCard(folderName).click();
getListActionsToggle().click();
getListActionItem('move').click();
moveFolder(folderName, destinationName);
}
export function moveWorkflowToFolder(workflowName: string, folderName: string) {
getWorkflowCardActions(workflowName).click();
getWorkflowCardActionItem(workflowName, 'moveToFolder').click();
getMoveFolderModal().should('be.visible');
getMoveToFolderDropdown().click();
getMoveToFolderInput().type(folderName, { delay: 50 });
getMoveToFolderOption(folderName).should('be.visible').click();
getMoveFolderConfirmButton().should('be.enabled').click();
}
export function dragAndDropToFolder(sourceName: string, destinationName: string) {
const draggable = `[data-test-id=draggable]:has([data-resourcename="${sourceName}"])`;
const droppable = `[data-test-id=draggable]:has([data-resourcename="${destinationName}"])`;
cy.get(draggable).trigger('mousedown');
cy.draganddrop(draggable, droppable, { position: 'center' });
}
export function dragAndDropToProjectRoot(sourceName: string) {
const draggable = `[data-test-id=draggable]:has([data-resourcename="${sourceName}"])`;
const droppable = '[data-test-id="home-project"]';
cy.get(draggable).trigger('mousedown');
cy.draganddrop(draggable, droppable, { position: 'center' });
}
/**
* Utils
*/
/**
* Types folder name in the prompt and waits for the folder to be created
* @param name
*/
function createNewFolder(name: string) {
cy.intercept('POST', '/rest/projects/**').as('createFolder');
cy.get('[role=dialog]')
.filter(':visible')
.within(() => {
cy.get('input.el-input__inner').type(name, { delay: 50 });
cy.get('button.btn--confirm').click();
});
cy.wait('@createFolder');
successToast().should('exist');
}
function renameFolder(newName: string) {
cy.intercept('PATCH', '/rest/projects/**').as('renameFolder');
cy.get('[role=dialog]')
.filter(':visible')
.within(() => {
cy.get('input.el-input__inner').type('{selectall}');
cy.get('input.el-input__inner').type(newName, { delay: 50 });
cy.get('button.btn--confirm').click();
});
cy.wait('@renameFolder');
successToast().should('exist');
}
function confirmFolderDelete(folderName: string) {
cy.intercept('DELETE', '/rest/projects/**').as('deleteFolder');
getFolderDeleteModal().should('be.visible');
getDeleteRadioButton().click();
getConfirmDeleteInput().should('be.visible');
getConfirmDeleteInput().type(`delete ${folderName}`, { delay: 50 });
getDeleteFolderModalConfirmButton().should('be.enabled').click();
cy.wait('@deleteFolder');
successToast().contains('Folder deleted').should('exist');
}
function deleteFolderAndMoveContents(folderName: string, destinationName: string) {
cy.intercept('DELETE', '/rest/projects/**').as('deleteFolder');
getFolderDeleteModal().should('be.visible');
getFolderDeleteModal().find('h1').first().contains(`Delete "${folderName}"`);
getTransferContentRadioButton().should('be.visible').click();
getMoveToFolderDropdown().click();
getMoveToFolderInput().type(destinationName);
getMoveToFolderOption(destinationName).click();
getDeleteFolderModalConfirmButton().should('be.enabled').click();
cy.wait('@deleteFolder');
successToast().should('contain.text', `Data transferred to "${destinationName}"`);
}
function moveFolder(folderName: string, destinationName: string) {
cy.intercept('PATCH', '/rest/projects/**').as('moveFolder');
getMoveFolderModal().should('be.visible');
getMoveFolderModal().find('h1').first().contains(`Move folder ${folderName}`);
// The dropdown focuses after a small delay (once modal's slide in animation is done).
// On the component we listen for an event, but here the wait should be very predictable.
cy.wait(500);
// Try to find current folder in the dropdown
// This tests that auto-focus worked as expected
cy.focused().type(folderName, { delay: 50 });
// Should not be available
getEmptyFolderDropdownMessage('No folders found').should('exist');
// Select destination folder
getMoveToFolderInput().type(`{selectall}{backspace}${destinationName}`, {
delay: 50,
});
getMoveToFolderOption(destinationName).should('be.visible').click();
getMoveFolderConfirmButton().should('be.enabled').click();
cy.wait('@moveFolder');
}
export function transferWorkflow(
workflowName: string,
projectName: string,
destinationFolder?: string,
) {
getMoveFolderModal().should('be.visible');
getMoveFolderModal().find('h1').first().contains(`Move workflow ${workflowName}`);
cy.wait(500);
getProjectSharingInput().should('be.visible').click();
cy.focused().type(projectName, { delay: 50 });
getProjectSharingOption(projectName).should('be.visible').click();
if (destinationFolder) {
getMoveToFolderInput().click();
// Select destination folder
cy.focused().type(destinationFolder, { delay: 50 });
getMoveToFolderOption(destinationFolder).should('be.visible').click();
}
getMoveFolderConfirmButton().should('be.enabled').click();
}

View File

@@ -0,0 +1,91 @@
/**
* Accessors
*/
export function getOverviewPanel() {
return cy.getByTestId('logs-overview');
}
export function getOverviewPanelBody() {
return cy.getByTestId('logs-overview-body');
}
export function getOverviewStatus() {
return cy.getByTestId('logs-overview-status');
}
export function getLogEntries() {
return cy.getByTestId('logs-overview-body').find('[role=treeitem]');
}
export function getSelectedLogEntry() {
return cy.getByTestId('logs-overview-body').find('[role=treeitem][aria-selected=true]');
}
export function getInputPanel() {
return cy.getByTestId('log-details-input');
}
export function getInputTableRows() {
return cy.getByTestId('log-details-input').find('table tr');
}
export function getInputTbodyCell(row: number, col: number) {
return cy.getByTestId('log-details-input').find('table tr').eq(row).find('td').eq(col);
}
export function getNodeErrorMessageHeader() {
return cy.getByTestId('log-details-output').findChildByTestId('node-error-message');
}
export function getOutputPanel() {
return cy.getByTestId('log-details-output');
}
export function getOutputTableRows() {
return cy.getByTestId('log-details-output').find('table tr');
}
export function getOutputTbodyCell(row: number, col: number) {
return cy.getByTestId('log-details-output').find('table tr').eq(row).find('td').eq(col);
}
/**
* Actions
*/
export function openLogsPanel() {
cy.getByTestId('logs-overview-header').click();
}
export function pressClearExecutionButton() {
cy.getByTestId('logs-overview-header').find('button').contains('Clear execution').click();
}
export function clickLogEntryAtRow(rowIndex: number) {
getLogEntries().eq(rowIndex).click();
}
export function toggleInputPanel() {
cy.getByTestId('log-details-header').contains('Input').click();
}
export function clickOpenNdvAtRow(rowIndex: number) {
getLogEntries().eq(rowIndex).realHover();
getLogEntries().eq(rowIndex).find('[aria-label="Open..."]').click();
}
export function clickTriggerPartialExecutionAtRow(rowIndex: number) {
getLogEntries().eq(rowIndex).realHover();
getLogEntries().eq(rowIndex).find('[aria-label="Execute step"]').click();
}
export function setInputDisplayMode(mode: 'table' | 'ai' | 'json' | 'schema') {
cy.getByTestId('log-details-input').realHover();
cy.getByTestId('log-details-input').findChildByTestId(`radio-button-${mode}`).click();
}
export function setOutputDisplayMode(mode: 'table' | 'ai' | 'json' | 'schema') {
cy.getByTestId('log-details-output').realHover();
cy.getByTestId('log-details-output').findChildByTestId(`radio-button-${mode}`).click();
}

View File

@@ -0,0 +1,40 @@
/**
* Getters
*/
export function getManualChatModal() {
return cy.getByTestId('canvas-chat');
}
export function getManualChatInput() {
return getManualChatModal().get('.chat-inputs textarea');
}
export function getManualChatSendButton() {
return getManualChatModal().get('.chat-input-send-button');
}
export function getManualChatMessages() {
return getManualChatModal().get('.chat-messages-list .chat-message');
}
export function getManualChatModalCloseButton() {
return cy.getByTestId('workflow-chat-button');
}
export function getManualChatDialog() {
return getManualChatModal().getByTestId('workflow-lm-chat-dialog');
}
/**
* Actions
*/
export function sendManualChatMessage(message: string) {
getManualChatInput().type(message);
getManualChatSendButton().click();
}
export function closeManualChatModal() {
getManualChatModalCloseButton().click();
}

View File

@@ -0,0 +1,62 @@
/**
* Getters
*/
import { clearNotifications } from '../../pages/notifications';
export function getCredentialConnectionParameterInputs() {
return cy.getByTestId('credential-connection-parameter');
}
export function getCredentialConnectionParameterInputByName(name: string) {
return cy.getByTestId(`parameter-input-${name}`);
}
export function getEditCredentialModal() {
return cy.getByTestId('editCredential-modal', { timeout: 5000 });
}
export function getCredentialSaveButton() {
return cy.getByTestId('credential-save-button', { timeout: 5000 });
}
export function getCredentialDeleteButton() {
return cy.getByTestId('credential-delete-button');
}
export function getCredentialModalCloseButton() {
return getEditCredentialModal().find('.el-dialog__close').first();
}
/**
* Actions
*/
export function setCredentialConnectionParameterInputByName(name: string, value: string) {
getCredentialConnectionParameterInputByName(name).type(value);
}
export function saveCredential() {
getCredentialSaveButton()
.click({ force: true })
.within(() => {
cy.get('button').should('not.exist');
});
getCredentialSaveButton().should('have.text', 'Saved');
}
export function closeCredentialModal() {
getCredentialModalCloseButton().click();
}
export function setCredentialValues(values: Record<string, string>, save = true) {
Object.entries(values).forEach(([key, value]) => {
setCredentialConnectionParameterInputByName(key, value);
});
if (save) {
saveCredential();
closeCredentialModal();
clearNotifications();
}
}

View File

@@ -0,0 +1,13 @@
export function getSaveChangesModal() {
return cy.get('.el-overlay').contains('Save changes before leaving?');
}
// this is the button next to 'Save Changes'
export function getCancelSaveChangesButton() {
return cy.get('.btn--cancel');
}
// This is the top right 'x'
export function getCloseSaveChangesButton() {
return cy.get('.el-message-box__headerbtn');
}

View File

@@ -0,0 +1,13 @@
/**
* Getters
*/
export const getWorkflowCredentialsModal = () => cy.getByTestId('setup-workflow-credentials-modal');
export const getContinueButton = () => cy.getByTestId('continue-button');
/**
* Actions
*/
export const closeModalFromContinueButton = () => getContinueButton().click();

View File

@@ -0,0 +1,347 @@
/**
* Getters
*/
import { getVisiblePopper, getVisibleSelect } from '../utils/popper';
export function getNdvContainer() {
return cy.getByTestId('ndv');
}
export function getCredentialSelect(eq = 0) {
return cy.getByTestId('node-credentials-select').eq(eq);
}
export function getCreateNewCredentialOption() {
return cy.getByTestId('node-credentials-select-item-new');
}
export function getBackToCanvasButton() {
return cy.getByTestId('back-to-canvas');
}
export function getExecuteNodeButton() {
return cy.getByTestId('node-execute-button');
}
export function getParameterInputByName(name: string) {
return cy.getByTestId(`parameter-input-${name}`);
}
export function getInputPanel() {
return cy.getByTestId('ndv-input-panel');
}
export function getInputSelect() {
return cy.getByTestId('ndv-input-select').find('input');
}
export function getInputLinkRun() {
return getInputPanel().findChildByTestId('link-run');
}
export function getMainPanel() {
return cy.getByTestId('node-parameters');
}
export function getOutputPanel() {
return cy.getByTestId('output-panel');
}
export function getFixedCollection(collectionName: string) {
return cy.getByTestId(`fixed-collection-${collectionName}`);
}
export function getResourceLocator(paramName: string) {
return cy.getByTestId(`resource-locator-${paramName}`);
}
export function getResourceLocatorInput(paramName: string) {
return getResourceLocator(paramName).find('[data-test-id="rlc-input-container"]');
}
export function getInputPanelDataContainer() {
return getInputPanel().findChildByTestId('ndv-data-container');
}
export function getInputTableRows() {
return getInputPanelDataContainer().find('table tr');
}
export function getInputTbodyCell(row: number, col: number) {
return getInputTableRows().eq(row).find('td').eq(col);
}
export function getInputRunSelector() {
return cy.get('[data-test-id="ndv-input-panel"] [data-test-id="run-selector"]');
}
export function getInputPanelItemsCount() {
return getInputPanel().getByTestId('ndv-items-count');
}
export function getOutputPanelDataContainer() {
return getOutputPanel().findChildByTestId('ndv-data-container');
}
export function getOutputTableRows() {
return getOutputPanelDataContainer().find('table tr');
}
export function getOutputTableRow(row: number) {
return getOutputTableRows().eq(row);
}
export function getOutputTableHeaders() {
return getOutputPanelDataContainer().find('table thead th');
}
export function getOutputTableHeaderByText(text: string) {
return getOutputTableHeaders().contains(text);
}
export function getOutputTbodyCell(row: number, col: number) {
return getOutputTableRows().eq(row).find('td').eq(col);
}
export function getOutputRunSelector() {
return cy.get('[data-test-id="output-panel"] [data-test-id="run-selector"]');
}
export function getOutputRunSelectorInput() {
return getOutputRunSelector().find('input');
}
export function getOutputPanelTable() {
return getOutputPanelDataContainer().get('table');
}
export function getRunDataInfoCallout() {
return cy.getByTestId('run-data-callout');
}
export function getOutputPanelItemsCount() {
return getOutputPanel().getByTestId('ndv-items-count');
}
export function getOutputPanelRelatedExecutionLink() {
return getOutputPanel().getByTestId('related-execution-link');
}
export function getNodeOutputHint() {
return cy.getByTestId('ndv-output-run-node-hint');
}
export function getWorkflowCards() {
return cy.getByTestId('resources-list-item-workflow');
}
export function getWorkflowCard(workflowName: string) {
return getWorkflowCards()
.contains(workflowName)
.parents('[data-test-id="resources-list-item-workflow"]');
}
export function getWorkflowCardContent(workflowName: string) {
return getWorkflowCard(workflowName).findChildByTestId('card-content');
}
export function getNodeRunInfoStale() {
return cy.getByTestId('node-run-info-stale');
}
export function getNodeOutputErrorMessage() {
return getOutputPanel().findChildByTestId('node-error-message');
}
export function getParameterExpressionPreviewValue() {
return cy.getByTestId('parameter-expression-preview-value');
}
/**
* Actions
*/
export function openCredentialSelect(eq = 0) {
getCredentialSelect(eq).click();
}
export function setCredentialByName(name: string) {
openCredentialSelect();
getCredentialSelect().contains(name).click();
}
export function clickCreateNewCredential() {
openCredentialSelect();
getCreateNewCredentialOption().click({ force: true });
}
export function clickGetBackToCanvas() {
getBackToCanvasButton().click();
}
export function clickExecuteNode() {
getExecuteNodeButton().click();
}
export function clickResourceLocatorInput(paramName: string) {
getResourceLocatorInput(paramName).click();
}
export function setParameterInputByName(name: string, value: string) {
getParameterInputByName(name).clear().type(value);
}
export function checkParameterCheckboxInputByName(name: string) {
getParameterInputByName(name).find('input[type="checkbox"]').check({ force: true });
}
export function uncheckParameterCheckboxInputByName(name: string) {
getParameterInputByName(name).find('input[type="checkbox"]').uncheck({ force: true });
}
export function setParameterSelectByContent(name: string, content: string) {
getParameterInputByName(name).realClick();
getVisibleSelect().find('.option-headline').contains(content).click();
}
export function changeOutputRunSelector(runName: string) {
getOutputRunSelector().click();
getVisibleSelect().find('.el-select-dropdown__item').contains(runName).click();
}
export function addItemToFixedCollection(collectionName: string) {
getFixedCollection(collectionName).getByTestId('fixed-collection-add').click();
}
export function typeIntoFixedCollectionItem(collectionName: string, index: number, value: string) {
getFixedCollection(collectionName).within(() =>
cy.getByTestId('parameter-input').eq(index).type(value),
);
}
export function selectResourceLocatorAddResourceItem(
resourceLocator: string,
expectedText: string,
) {
clickResourceLocatorInput(resourceLocator);
// getVisiblePopper().findChildByTestId('rlc-item-add-resource').eq(0).should('exist');
getVisiblePopper()
.findChildByTestId('rlc-item-add-resource')
.eq(0)
.find('span')
.should('contain.text', expectedText)
.click();
}
export function selectResourceLocatorItem(
resourceLocator: string,
index: number,
expectedText: string,
) {
clickResourceLocatorInput(resourceLocator);
getVisiblePopper().findChildByTestId('rlc-item').eq(0).should('exist');
getVisiblePopper()
.findChildByTestId('rlc-item')
.eq(index)
.find('span')
.should('contain.text', expectedText)
.click();
}
export function clickWorkflowCardContent(workflowName: string) {
getWorkflowCardContent(workflowName).click();
}
export function clickAssignmentCollectionAdd() {
cy.getByTestId('assignment-collection-drop-area').click();
}
export function assertNodeOutputHintExists() {
getNodeOutputHint().should('exist');
}
export function assertNodeOutputErrorMessageExists() {
return getNodeOutputErrorMessage().should('exist');
}
// Note that this only validates the expectedContent is *included* in the output table
export function assertOutputTableContent(expectedContent: unknown[][]) {
for (const [i, row] of expectedContent.entries()) {
for (const [j, value] of row.entries()) {
// + 1 to skip header
getOutputTbodyCell(1 + i, j).should('have.text', value);
}
}
}
export function populateMapperFields(fields: ReadonlyArray<[string, string]>) {
for (const [name, value] of fields) {
getParameterInputByName(name).type(value);
// Click on a parent to dismiss the pop up which hides the field below.
getParameterInputByName(name).parent().parent().parent().parent().click('topLeft');
}
}
/**
* Populate multiValue fixedCollections. Only supports fixedCollections for which all fields can be defined via keyboard typing
*
* @param items - 2D array of items to populate, i.e. [["myField1", "String"], ["myField2", "Number"]]
* @param collectionName - name of the fixedCollection to populate
* @param offset - amount of 'parameter-input's before start, e.g. from a controlling dropdown that makes the fields appear
* @returns
*/
export function populateFixedCollection<T extends readonly string[]>(
items: readonly T[],
collectionName: string,
offset: number = 0,
) {
if (items.length === 0) return;
const n = items[0].length;
for (const [i, params] of items.entries()) {
addItemToFixedCollection(collectionName);
for (const [j, param] of params.entries()) {
getFixedCollection(collectionName)
.getByTestId('parameter-input')
.eq(offset + i * n + j)
.type(`${param}{downArrow}{enter}`);
}
}
}
export function assertInlineExpressionValid() {
cy.getByTestId('inline-expression-editor-input').find('.cm-valid-resolvable').should('exist');
}
export function hoverInputItemByText(text: string) {
return getInputPanelDataContainer().contains(text).realHover();
}
export function verifyInputHoverState(expectedText: string) {
getInputPanelDataContainer()
.find('[data-test-id="hovering-item"]')
.should('be.visible')
.should('have.text', expectedText);
}
export function verifyOutputHoverState(expectedText: string) {
getOutputPanelDataContainer()
.find('[data-test-id="hovering-item"]')
.should('be.visible')
.should('have.text', expectedText);
}
export function resetHoverState() {
getBackToCanvasButton().realHover();
}
export function setInputDisplayMode(mode: 'Schema' | 'Table' | 'JSON' | 'Binary') {
getInputPanel().findChildByTestId('ndv-run-data-display-mode').contains(mode).click();
}
export function toggleInputRunLinking() {
getInputLinkRun().click();
}

View File

@@ -0,0 +1,31 @@
// Getters
export const nodeCreatorPlusButton = () => cy.getByTestId('node-creator-plus-button');
export const canvasAddButton = () => cy.getByTestId('canvas-add-button');
export const searchBar = () => cy.getByTestId('search-bar');
export const getCategoryItem = (label: string) => cy.get(`[data-keyboard-nav-id="${label}"]`);
export const getCreatorItem = (label: string) =>
getCreatorItems().contains(label).parents('[data-test-id="item-iterator-item"]');
export const getNthCreatorItem = (n: number) => getCreatorItems().eq(n);
export const nodeCreator = () => cy.getByTestId('node-creator');
export const nodeCreatorTabs = () => cy.getByTestId('node-creator-type-selector');
export const selectedTab = () => nodeCreatorTabs().find('.is-active');
export const categorizedItems = () => cy.getByTestId('categorized-items');
export const getCreatorItems = () => cy.getByTestId('item-iterator-item');
export const categoryItem = () => cy.getByTestId('node-creator-category-item');
export const communityNodeTooltip = () => cy.getByTestId('node-item-community-tooltip');
export const noResults = () => cy.getByTestId('node-creator-no-results');
export const nodeItemName = () => cy.getByTestId('node-creator-item-name');
export const nodeItemDescription = () => cy.getByTestId('node-creator-item-description');
export const activeSubcategory = () => cy.getByTestId('nodes-list-header');
export const expandedCategories = () =>
getCreatorItems().find('>div').filter('.active').invoke('text');
// Actions
export const openNodeCreator = () => {
nodeCreatorPlusButton().click();
nodeCreator().should('be.visible');
};
export const selectNode = (displayName: string) => {
getCreatorItem(displayName).click();
};

View File

@@ -0,0 +1,95 @@
import { CredentialsModal, WorkflowPage } from '../pages';
import { getVisibleSelect } from '../utils';
const workflowPage = new WorkflowPage();
const credentialsModal = new CredentialsModal();
export const getHomeButton = () => cy.getByTestId('project-home-menu-item');
export const getPersonalProjectsButton = () => cy.getByTestId('project-personal-menu-item');
export const getMenuItems = () => cy.getByTestId('project-menu-item');
export const getAddProjectButton = () => {
cy.getByTestId('universal-add').should('be.visible').click();
cy.getByTestId('universal-add')
.find('.el-sub-menu__title')
.as('menuitem')
.should('have.attr', 'aria-describedby');
cy.get('@menuitem')
.invoke('attr', 'aria-describedby')
.then((el) => cy.get(`[id="${el}"]`))
.as('submenu');
cy.get('@submenu').within((submenu) =>
cy
.wrap(submenu)
.getByTestId('navigation-menu-item')
.should('be.visible')
.filter(':contains("Project")')
.as('button'),
);
return cy.get('@button');
};
export const getAddFirstProjectButton = () => cy.getByTestId('add-first-project-button');
export const getIconPickerButton = () => cy.getByTestId('icon-picker-button');
export const getIconPickerTab = (tab: string) => cy.getByTestId('icon-picker-tabs').contains(tab);
export const getIconPickerIcons = () => cy.getByTestId('icon-picker-icon');
export const getIconPickerEmojis = () => cy.getByTestId('icon-picker-emoji');
// export const getAddProjectButton = () =>
// cy.getByTestId('universal-add').should('contain', 'Add project').should('be.visible');
export const getProjectTabs = () => cy.getByTestId('project-tabs').find('a');
export const getProjectTabWorkflows = () => getProjectTabs().filter('a[href$="/workflows"]');
export const getProjectTabCredentials = () => getProjectTabs().filter('a[href$="/credentials"]');
export const getProjectTabExecutions = () => getProjectTabs().filter('a[href$="/executions"]');
export const getProjectTabSettings = () => getProjectTabs().filter('a[href$="/settings"]');
export const getProjectSettingsNameInput = () =>
cy.getByTestId('project-settings-name-input').find('input');
export const getProjectSettingsSaveButton = () => cy.getByTestId('project-settings-save-button');
export const getProjectSettingsCancelButton = () =>
cy.getByTestId('project-settings-cancel-button');
export const getProjectSettingsDeleteButton = () =>
cy.getByTestId('project-settings-delete-button');
export const getProjectMembersSelect = () => cy.getByTestId('project-members-select');
export const addProjectMember = (email: string, role?: string) => {
getProjectMembersSelect().click();
getProjectMembersSelect().get('.el-select-dropdown__item').contains(email.toLowerCase()).click();
if (role) {
cy.getByTestId(`user-list-item-${email}`)
.find('[data-test-id="projects-settings-user-role-select"]')
.click();
getVisibleSelect().find('li').contains(role).click();
}
};
export const getResourceMoveModal = () => cy.getByTestId('project-move-resource-modal');
export const getProjectMoveSelect = () => cy.getByTestId('project-move-resource-modal-select');
export const getProjectSharingSelect = () => cy.getByTestId('project-sharing-select');
export const getMoveToFolderSelect = () => cy.getByTestId('move-to-folder-dropdown');
export function createProject(name: string) {
getAddProjectButton().click();
getProjectSettingsNameInput().should('be.visible').clear().type(name);
getProjectSettingsSaveButton().click();
}
export function createWorkflow(fixtureKey: string, name: string) {
workflowPage.getters.workflowImportInput().selectFile(`fixtures/${fixtureKey}`, { force: true });
workflowPage.actions.setWorkflowName(name);
workflowPage.getters.saveButton().should('contain', 'Saved');
workflowPage.actions.zoomToFit();
}
export function createCredential(name: string, closeModal = true) {
credentialsModal.getters.newCredentialModal().should('be.visible');
credentialsModal.getters.newCredentialTypeSelect().should('be.visible');
credentialsModal.getters.newCredentialTypeOption('Notion API').click();
credentialsModal.getters.newCredentialTypeButton().click();
credentialsModal.getters.connectionParameter('Internal Integration Secret').type('1234567890');
credentialsModal.actions.setName(name);
credentialsModal.actions.save();
if (closeModal) {
credentialsModal.actions.close();
}
}

View File

@@ -0,0 +1,14 @@
/**
* Getters
*/
export const getFormStep = () => cy.getByTestId('setup-credentials-form-step');
export const getStepHeading = ($el: JQuery<HTMLElement>) =>
cy.wrap($el).findChildByTestId('credential-step-heading');
export const getStepDescription = ($el: JQuery<HTMLElement>) =>
cy.wrap($el).findChildByTestId('credential-step-description');
export const getCreateAppCredentialsButton = (appName: string) =>
cy.get(`button:contains("Create new ${appName} credential")`);

View File

@@ -0,0 +1,5 @@
/**
* Getters
*/
export const getSetupWorkflowCredentialsButton = () => cy.get('button:contains("Set up template")');

View File

@@ -0,0 +1,40 @@
import { MainSidebar } from '../pages/sidebar/main-sidebar';
const mainSidebar = new MainSidebar();
/**
* Getters
*/
export function getVersionUpdatesPanelOpenButton() {
return cy.getByTestId('version-update-next-versions-link');
}
export function getVersionUpdatesPanel() {
return cy.getByTestId('version-updates-panel');
}
export function getVersionUpdatesPanelCloseButton() {
return getVersionUpdatesPanel().get('.el-drawer__close-btn').first();
}
export function getVersionCard() {
return cy.getByTestId('version-card');
}
/**
* Actions
*/
export function openWhatsNewMenu() {
mainSidebar.getters.whatsNew().should('be.visible');
mainSidebar.getters.whatsNew().click();
}
export function openVersionUpdatesPanel() {
getVersionUpdatesPanelOpenButton().should('be.visible').click();
}
export function closeVersionUpdatesPanel() {
getVersionUpdatesPanelCloseButton().click();
}

View File

@@ -0,0 +1,81 @@
import { BACKEND_BASE_URL } from '../constants';
import { NDV, WorkflowPage } from '../pages';
import { getVisibleSelect } from '../utils';
export const waitForWebhook = 500;
export interface SimpleWebhookCallOptions {
method: string;
webhookPath: string;
responseCode?: number;
respondWith?: string;
executeNow?: boolean;
responseData?: string;
authentication?: string;
}
const workflowPage = new WorkflowPage();
const ndv = new NDV();
export const simpleWebhookCall = (options: SimpleWebhookCallOptions) => {
const {
authentication,
method,
webhookPath,
responseCode,
respondWith,
responseData,
executeNow = true,
} = options;
workflowPage.actions.addInitialNodeToCanvas('Webhook');
workflowPage.actions.openNode('Webhook');
cy.getByTestId('parameter-input-httpMethod').click();
getVisibleSelect().find('.option-headline').contains(method).click();
cy.getByTestId('parameter-input-path')
.find('.parameter-input')
.find('input')
.clear()
.type(webhookPath);
if (authentication) {
cy.getByTestId('parameter-input-authentication').click();
getVisibleSelect().find('.option-headline').contains(authentication).click();
}
if (responseCode) {
cy.get('.param-options').click();
getVisibleSelect().contains('Response Code').click();
cy.get('.parameter-item-wrapper > .parameter-input-list-wrapper').children().click();
getVisibleSelect().contains('201').click();
}
if (respondWith) {
cy.getByTestId('parameter-input-responseMode').click();
getVisibleSelect().find('.option-headline').contains(respondWith).click();
}
if (responseData) {
cy.getByTestId('parameter-input-responseData').click();
getVisibleSelect().find('.option-headline').contains(responseData).click();
}
const callEndpoint = (fn: (response: Cypress.Response<unknown>) => void) => {
cy.request(method, `${BACKEND_BASE_URL}/webhook-test/${webhookPath}`).then(fn);
};
if (executeNow) {
ndv.actions.execute();
cy.wait(waitForWebhook);
callEndpoint((response) => {
expect(response.status).to.eq(200);
ndv.getters.outputPanel().contains('headers');
});
}
return {
callEndpoint,
};
};

Some files were not shown because too many files have changed in this diff Show More