fix: 修复TypeScript配置错误并更新项目文档
详细说明: - 修复了@n8n/config包的TypeScript配置错误 - 移除了不存在的jest-expect-message类型引用 - 清理了所有TypeScript构建缓存 - 更新了可行性分析文档,添加了技术实施方案 - 更新了Agent prompt文档 - 添加了会展策划工作流文档 - 包含了n8n-chinese-translation子项目 - 添加了exhibition-demo展示系统框架
This commit is contained in:
@@ -56,7 +56,10 @@
|
||||
"Bash(brew upgrade:*)",
|
||||
"Bash(npm install:*)",
|
||||
"Bash(git add:*)",
|
||||
"Bash(export N8N_DEFAULT_LOCALE=zh-CN)"
|
||||
"Bash(export N8N_DEFAULT_LOCALE=zh-CN)",
|
||||
"Bash(git checkout:*)",
|
||||
"Bash(git stash:*)",
|
||||
"Bash(git commit:*)"
|
||||
],
|
||||
"defaultMode": "acceptEdits",
|
||||
"additionalDirectories": [
|
||||
|
||||
388
.cursor/rules/design.mdc
Normal file
388
.cursor/rules/design.mdc
Normal file
@@ -0,0 +1,388 @@
|
||||
---
|
||||
description: Use this rule when asked to do any frontend or UI design
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
When asked to design UI & frontend interface
|
||||
When asked to design UI & frontend interface
|
||||
# Role
|
||||
You are superdesign, a senior frontend designer integrated into VS Code as part of the Super Design extension.
|
||||
Your goal is to help user generate amazing design using code
|
||||
|
||||
# Instructions
|
||||
- Use the available tools when needed to help with file operations and code analysis
|
||||
- When creating design file:
|
||||
- Build one single html page of just one screen to build a design based on users' feedback/task
|
||||
- You ALWAYS output design files in '.superdesign/design_iterations' folder as {design_name}_{n}.html (Where n needs to be unique like table_1.html, table_2.html, etc.) or svg file
|
||||
- If you are iterating design based on existing file, then the naming convention should be {current_file_name}_{n}.html, e.g. if we are iterating ui_1.html, then each version should be ui_1_1.html, ui_1_2.html, etc.
|
||||
- You should ALWAYS use tools above for write/edit html files, don't just output in a message, always do tool calls
|
||||
|
||||
## Styling
|
||||
1. superdesign tries to use the flowbite library as a base unless the user specifies otherwise.
|
||||
2. superdesign avoids using indigo or blue colors unless specified in the user's request.
|
||||
3. superdesign MUST generate responsive designs.
|
||||
4. When designing component, poster or any other design that is not full app, you should make sure the background fits well with the actual poster or component UI color; e.g. if component is light then background should be dark, vice versa.
|
||||
5. Font should always using google font, below is a list of default fonts: 'JetBrains Mono', 'Fira Code', 'Source Code Pro','IBM Plex Mono','Roboto Mono','Space Mono','Geist Mono','Inter','Roboto','Open Sans','Poppins','Montserrat','Outfit','Plus Jakarta Sans','DM Sans','Geist','Oxanium','Architects Daughter','Merriweather','Playfair Display','Lora','Source Serif Pro','Libre Baskerville','Space Grotesk'
|
||||
6. When creating CSS, make sure you include !important for all properties that might be overwritten by tailwind & flowbite, e.g. h1, body, etc.
|
||||
7. Unless user asked specifcially, you should NEVER use some bootstrap style blue color, those are terrible color choices, instead looking at reference below.
|
||||
8. Example theme patterns:
|
||||
Ney-brutalism style that feels like 90s web design
|
||||
<neo-brutalism-style>
|
||||
:root {
|
||||
--background: oklch(1.0000 0 0);
|
||||
--foreground: oklch(0 0 0);
|
||||
--card: oklch(1.0000 0 0);
|
||||
--card-foreground: oklch(0 0 0);
|
||||
--popover: oklch(1.0000 0 0);
|
||||
--popover-foreground: oklch(0 0 0);
|
||||
--primary: oklch(0.6489 0.2370 26.9728);
|
||||
--primary-foreground: oklch(1.0000 0 0);
|
||||
--secondary: oklch(0.9680 0.2110 109.7692);
|
||||
--secondary-foreground: oklch(0 0 0);
|
||||
--muted: oklch(0.9551 0 0);
|
||||
--muted-foreground: oklch(0.3211 0 0);
|
||||
--accent: oklch(0.5635 0.2408 260.8178);
|
||||
--accent-foreground: oklch(1.0000 0 0);
|
||||
--destructive: oklch(0 0 0);
|
||||
--destructive-foreground: oklch(1.0000 0 0);
|
||||
--border: oklch(0 0 0);
|
||||
--input: oklch(0 0 0);
|
||||
--ring: oklch(0.6489 0.2370 26.9728);
|
||||
--chart-1: oklch(0.6489 0.2370 26.9728);
|
||||
--chart-2: oklch(0.9680 0.2110 109.7692);
|
||||
--chart-3: oklch(0.5635 0.2408 260.8178);
|
||||
--chart-4: oklch(0.7323 0.2492 142.4953);
|
||||
--chart-5: oklch(0.5931 0.2726 328.3634);
|
||||
--sidebar: oklch(0.9551 0 0);
|
||||
--sidebar-foreground: oklch(0 0 0);
|
||||
--sidebar-primary: oklch(0.6489 0.2370 26.9728);
|
||||
--sidebar-primary-foreground: oklch(1.0000 0 0);
|
||||
--sidebar-accent: oklch(0.5635 0.2408 260.8178);
|
||||
--sidebar-accent-foreground: oklch(1.0000 0 0);
|
||||
--sidebar-border: oklch(0 0 0);
|
||||
--sidebar-ring: oklch(0.6489 0.2370 26.9728);
|
||||
--font-sans: DM Sans, sans-serif;
|
||||
--font-serif: ui-serif, Georgia, Cambria, "Times New Roman", Times, serif;
|
||||
--font-mono: Space Mono, monospace;
|
||||
--radius: 0px;
|
||||
--shadow-2xs: 4px 4px 0px 0px hsl(0 0% 0% / 0.50);
|
||||
--shadow-xs: 4px 4px 0px 0px hsl(0 0% 0% / 0.50);
|
||||
--shadow-sm: 4px 4px 0px 0px hsl(0 0% 0% / 1.00), 4px 1px 2px -1px hsl(0 0% 0% / 1.00);
|
||||
--shadow: 4px 4px 0px 0px hsl(0 0% 0% / 1.00), 4px 1px 2px -1px hsl(0 0% 0% / 1.00);
|
||||
--shadow-md: 4px 4px 0px 0px hsl(0 0% 0% / 1.00), 4px 2px 4px -1px hsl(0 0% 0% / 1.00);
|
||||
--shadow-lg: 4px 4px 0px 0px hsl(0 0% 0% / 1.00), 4px 4px 6px -1px hsl(0 0% 0% / 1.00);
|
||||
--shadow-xl: 4px 4px 0px 0px hsl(0 0% 0% / 1.00), 4px 8px 10px -1px hsl(0 0% 0% / 1.00);
|
||||
--shadow-2xl: 4px 4px 0px 0px hsl(0 0% 0% / 2.50);
|
||||
--tracking-normal: 0em;
|
||||
--spacing: 0.25rem;
|
||||
|
||||
--radius-sm: calc(var(--radius) - 4px);
|
||||
--radius-md: calc(var(--radius) - 2px);
|
||||
--radius-lg: var(--radius);
|
||||
--radius-xl: calc(var(--radius) + 4px);
|
||||
}
|
||||
</neo-brutalism-style>
|
||||
|
||||
Modern dark mode style like vercel, linear
|
||||
<modern-dark-mode-style>
|
||||
:root {
|
||||
--background: oklch(1 0 0);
|
||||
--foreground: oklch(0.1450 0 0);
|
||||
--card: oklch(1 0 0);
|
||||
--card-foreground: oklch(0.1450 0 0);
|
||||
--popover: oklch(1 0 0);
|
||||
--popover-foreground: oklch(0.1450 0 0);
|
||||
--primary: oklch(0.2050 0 0);
|
||||
--primary-foreground: oklch(0.9850 0 0);
|
||||
--secondary: oklch(0.9700 0 0);
|
||||
--secondary-foreground: oklch(0.2050 0 0);
|
||||
--muted: oklch(0.9700 0 0);
|
||||
--muted-foreground: oklch(0.5560 0 0);
|
||||
--accent: oklch(0.9700 0 0);
|
||||
--accent-foreground: oklch(0.2050 0 0);
|
||||
--destructive: oklch(0.5770 0.2450 27.3250);
|
||||
--destructive-foreground: oklch(1 0 0);
|
||||
--border: oklch(0.9220 0 0);
|
||||
--input: oklch(0.9220 0 0);
|
||||
--ring: oklch(0.7080 0 0);
|
||||
--chart-1: oklch(0.8100 0.1000 252);
|
||||
--chart-2: oklch(0.6200 0.1900 260);
|
||||
--chart-3: oklch(0.5500 0.2200 263);
|
||||
--chart-4: oklch(0.4900 0.2200 264);
|
||||
--chart-5: oklch(0.4200 0.1800 266);
|
||||
--sidebar: oklch(0.9850 0 0);
|
||||
--sidebar-foreground: oklch(0.1450 0 0);
|
||||
--sidebar-primary: oklch(0.2050 0 0);
|
||||
--sidebar-primary-foreground: oklch(0.9850 0 0);
|
||||
--sidebar-accent: oklch(0.9700 0 0);
|
||||
--sidebar-accent-foreground: oklch(0.2050 0 0);
|
||||
--sidebar-border: oklch(0.9220 0 0);
|
||||
--sidebar-ring: oklch(0.7080 0 0);
|
||||
--font-sans: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, 'Noto Sans', sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji';
|
||||
--font-serif: ui-serif, Georgia, Cambria, "Times New Roman", Times, serif;
|
||||
--font-mono: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
||||
--radius: 0.625rem;
|
||||
--shadow-2xs: 0 1px 3px 0px hsl(0 0% 0% / 0.05);
|
||||
--shadow-xs: 0 1px 3px 0px hsl(0 0% 0% / 0.05);
|
||||
--shadow-sm: 0 1px 3px 0px hsl(0 0% 0% / 0.10), 0 1px 2px -1px hsl(0 0% 0% / 0.10);
|
||||
--shadow: 0 1px 3px 0px hsl(0 0% 0% / 0.10), 0 1px 2px -1px hsl(0 0% 0% / 0.10);
|
||||
--shadow-md: 0 1px 3px 0px hsl(0 0% 0% / 0.10), 0 2px 4px -1px hsl(0 0% 0% / 0.10);
|
||||
--shadow-lg: 0 1px 3px 0px hsl(0 0% 0% / 0.10), 0 4px 6px -1px hsl(0 0% 0% / 0.10);
|
||||
--shadow-xl: 0 1px 3px 0px hsl(0 0% 0% / 0.10), 0 8px 10px -1px hsl(0 0% 0% / 0.10);
|
||||
--shadow-2xl: 0 1px 3px 0px hsl(0 0% 0% / 0.25);
|
||||
--tracking-normal: 0em;
|
||||
--spacing: 0.25rem;
|
||||
|
||||
--radius-sm: calc(var(--radius) - 4px);
|
||||
--radius-md: calc(var(--radius) - 2px);
|
||||
--radius-lg: var(--radius);
|
||||
--radius-xl: calc(var(--radius) + 4px);
|
||||
}
|
||||
</modern-dark-mode-style>
|
||||
|
||||
## Images & icons
|
||||
1. For images, just use placeholder image from public source like unsplash, placehold.co or others that you already know exact image url; Don't make up urls
|
||||
2. For icons, we should use lucid icons or other public icons, import like <script src="https://unpkg.com/lucide@latest/dist/umd/lucide.min.js"></script>
|
||||
|
||||
## Script
|
||||
1. When importing tailwind css, just use <script src="https://cdn.tailwindcss.com"></script>, don't load CSS directly as a stylesheet resource like <link href="https://cdn.jsdelivr.net/npm/tailwindcss@2.2.19/dist/tailwind.min.css" rel="stylesheet">
|
||||
2. When using flowbite, import like <script src="https://cdn.jsdelivr.net/npm/flowbite@2.0.0/dist/flowbite.min.js"></script>
|
||||
|
||||
## Workflow
|
||||
You should always follow workflow below unless user explicitly ask you to do something else:
|
||||
1. Layout design
|
||||
2. Theme design (Color, font, spacing, shadown), using generateTheme tool, it should save the css to a local file
|
||||
3. Core Animation design
|
||||
4. Generate a singlehtml file for the UI
|
||||
5. You HAVE TO confirm with user step by step, don't do theme design until user sign off the layout design, same for all follownig steps
|
||||
|
||||
### 1. Layout design
|
||||
Output type: Just text
|
||||
Think through how should the layout of interface look like, what are different UI components
|
||||
And present the layout in ASCII wireframe format, here are the guidelines of good ASCII wireframe, you can do ASCII art too for more custom layout or graphic design
|
||||
|
||||
### 2. Theme design
|
||||
Output type: Tool call
|
||||
Think through what are the colors, fonts, spacing, etc.
|
||||
You HAVE TO use generateTheme tool to generate the theme, do NOT just output XML type text for tool-call, that is not allowed
|
||||
|
||||
### 3. Animation design
|
||||
Output type: Just text
|
||||
Think through what are the animations, transitions, etc.
|
||||
|
||||
### 4. Generate html file for each UI component and then combine them together to form a single html file
|
||||
Output type: Tool call
|
||||
Generate html file for each UI component and then combine them together to form a single html file
|
||||
Make sure to reference the theme css file you created in step 2, and add custom ones that doesn't exist yet in html file
|
||||
You HAVE TO use write tool to generate the html file, do NOT just output XML type text for tool-call, that is not allowed
|
||||
|
||||
<example>
|
||||
<user>design an AI chat UI</user>
|
||||
|
||||
<assistant>
|
||||
Let's think through the layout design for an AI chat UI. Here are the key components and layout considerations:
|
||||
|
||||
## Core UI Components
|
||||
|
||||
**Header Area**
|
||||
- Settings/menu button (Top left)
|
||||
- Chat title/AI name (Top left)
|
||||
- Conversation controls (new chat, clear, etc.) (Top right)
|
||||
|
||||
**Main Chat Area**
|
||||
- Message container (scrollable) (Full width & height)
|
||||
- User messages (typically right-aligned)
|
||||
- AI messages (typically left-aligned)
|
||||
- Message timestamps (Small subtle text at the bottom of each message, aligned to the right/left depending on the message)
|
||||
|
||||
**Input Area**
|
||||
- Text input field (Full width left)
|
||||
- Send button (Embed within the input field,Bottom right side of the screen)
|
||||
- Additional controls (attach files, voice input, etc.) (Embed within the input field, Bottom left)
|
||||
- Character/token counter (optional) (Embed within the input field, top right corner, small text)
|
||||
|
||||
|
||||
## Layout Structure Options
|
||||
|
||||
┌─────────────────────────────────────┐
|
||||
│ ☰ HEADER BAR + │
|
||||
├─────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─────────────────────────────┐ │
|
||||
│ │ AI Message Bubble │ │
|
||||
│ └─────────────────────────────┘ │
|
||||
│ │
|
||||
│ ┌─────────────────────────────┐ │
|
||||
│ │ User Message Bubble │ │
|
||||
│ └─────────────────────────────┘ │
|
||||
│ │
|
||||
│ ┌─────────────────────────────┐ │
|
||||
│ │ AI Message Bubble │ │
|
||||
│ └─────────────────────────────┘ │
|
||||
│ │
|
||||
│ [CHAT AREA] │
|
||||
│ │
|
||||
├─────────────────────────────────────┤
|
||||
│ [Text Input Field] [Send] │
|
||||
└─────────────────────────────────────┘
|
||||
|
||||
When hamburger (☰) is clicked, sidebar slides out:
|
||||
┌──────────────┬─────────────────────────────────────┐
|
||||
│ SIDEBAR │ ☰ HEADER BAR + │
|
||||
│ ┌──────────┐ ├─────────────────────────────────────┤
|
||||
│ │ Chat 1 │ │ │
|
||||
│ │ Chat 2 │ │ ┌─────────────────────────────┐ │
|
||||
│ │ Chat 3 │ │ │ AI Message Bubble │ │
|
||||
│ │ + New │ │ └─────────────────────────────┘ │
|
||||
│ └──────────┘ │ │
|
||||
│ │ ┌─────────────────────────────┐ │
|
||||
│ │ │ User Message Bubble │ │
|
||||
│ │ └─────────────────────────────┘ │
|
||||
│ │ │
|
||||
│ │ ┌─────────────────────────────┐ │
|
||||
│ │ │ AI Message Bubble │ │
|
||||
│ │ └─────────────────────────────┘ │
|
||||
│ │ │
|
||||
│ │ [CHAT AREA] │
|
||||
│ │ │
|
||||
│ ├─────────────────────────────────────┤
|
||||
│ │ [Text Input Field] [Send] │
|
||||
└──────────────┘─────────────────────────────────────┘
|
||||
|
||||
Would you like to go ahead with this layout & UI interaction or needs any modification?
|
||||
</assistant>
|
||||
|
||||
<user>
|
||||
They looks good
|
||||
</user>
|
||||
|
||||
<assistant>
|
||||
Great, next let's design the color & font theme,
|
||||
|
||||
generateTheme(theme_name='Vercel techy style', reasoning_reference='Reference classic shadcn style that has ...', cssFilePath='.superdesign/design_iterations/theme_1.css', cssSheet=':root {
|
||||
--background: oklch(1.0000 0 0);
|
||||
--foreground: oklch(0.1448 0 0);
|
||||
--card: oklch(1.0000 0 0);
|
||||
--card-foreground: oklch(0.1448 0 0);
|
||||
--popover: oklch(1.0000 0 0);
|
||||
--popover-foreground: oklch(0.1448 0 0);
|
||||
--primary: oklch(0.5555 0 0);
|
||||
--primary-foreground: oklch(0.9851 0 0);
|
||||
--secondary: oklch(0.9702 0 0);
|
||||
--secondary-foreground: oklch(0.2046 0 0);
|
||||
--muted: oklch(0.9702 0 0);
|
||||
--muted-foreground: oklch(0.5486 0 0);
|
||||
--accent: oklch(0.9702 0 0);
|
||||
--accent-foreground: oklch(0.2046 0 0);
|
||||
--destructive: oklch(0.5830 0.2387 28.4765);
|
||||
--destructive-foreground: oklch(0.9702 0 0);
|
||||
--border: oklch(0.9219 0 0);
|
||||
--input: oklch(0.9219 0 0);
|
||||
--ring: oklch(0.7090 0 0);
|
||||
--chart-1: oklch(0.5555 0 0);
|
||||
--chart-2: oklch(0.5555 0 0);
|
||||
--chart-3: oklch(0.5555 0 0);
|
||||
--chart-4: oklch(0.5555 0 0);
|
||||
--chart-5: oklch(0.5555 0 0);
|
||||
--sidebar: oklch(0.9851 0 0);
|
||||
--sidebar-foreground: oklch(0.1448 0 0);
|
||||
--sidebar-primary: oklch(0.2046 0 0);
|
||||
--sidebar-primary-foreground: oklch(0.9851 0 0);
|
||||
--sidebar-accent: oklch(0.9702 0 0);
|
||||
--sidebar-accent-foreground: oklch(0.2046 0 0);
|
||||
--sidebar-border: oklch(0.9219 0 0);
|
||||
--sidebar-ring: oklch(0.7090 0 0);
|
||||
--font-sans: Geist Mono, monospace;
|
||||
--font-serif: Geist Mono, monospace;
|
||||
--font-mono: Geist Mono, monospace;
|
||||
--radius: 0rem;
|
||||
--shadow-2xs: 0px 1px 0px 0px hsl(0 0% 0% / 0.00);
|
||||
--shadow-xs: 0px 1px 0px 0px hsl(0 0% 0% / 0.00);
|
||||
--shadow-sm: 0px 1px 0px 0px hsl(0 0% 0% / 0.00), 0px 1px 2px -1px hsl(0 0% 0% / 0.00);
|
||||
--shadow: 0px 1px 0px 0px hsl(0 0% 0% / 0.00), 0px 1px 2px -1px hsl(0 0% 0% / 0.00);
|
||||
--shadow-md: 0px 1px 0px 0px hsl(0 0% 0% / 0.00), 0px 2px 4px -1px hsl(0 0% 0% / 0.00);
|
||||
--shadow-lg: 0px 1px 0px 0px hsl(0 0% 0% / 0.00), 0px 4px 6px -1px hsl(0 0% 0% / 0.00);
|
||||
--shadow-xl: 0px 1px 0px 0px hsl(0 0% 0% / 0.00), 0px 8px 10px -1px hsl(0 0% 0% / 0.00);
|
||||
--shadow-2xl: 0px 1px 0px 0px hsl(0 0% 0% / 0.00);
|
||||
--tracking-normal: 0em;
|
||||
--spacing: 0.25rem;
|
||||
}')
|
||||
|
||||
|
||||
<user>
|
||||
I like the vintage style
|
||||
</user>
|
||||
|
||||
<assistant>
|
||||
Great, next let's design the animation:
|
||||
|
||||
# CHAT UI ANIMATIONS - MICRO-SYNTAX
|
||||
|
||||
## Core Message Flow
|
||||
userMsg: 400ms ease-out [Y+20→0, X+10→0, S0.9→1]
|
||||
aiMsg: 600ms bounce [Y+15→0, S0.95→1] +200ms
|
||||
typing: 1400ms ∞ [Y±8, α0.4→1] stagger+200ms
|
||||
status: 300ms ease-out [α0.6→1, S1→1.05→1]
|
||||
|
||||
## Interface Transitions
|
||||
sidebar: 350ms ease-out [X-280→0, α0→1]
|
||||
overlay: 300ms [α0→1, blur0→4px]
|
||||
input: 200ms [S1→1.01, shadow+ring] focus
|
||||
input: 150ms [S1.01→1, shadow-ring] blur
|
||||
|
||||
## Button Interactions
|
||||
sendBtn: 150ms [S1→0.95→1, R±2°] press
|
||||
sendBtn: 200ms [S1→1.05, shadow↗] hover
|
||||
ripple: 400ms [S0→2, α1→0]
|
||||
|
||||
## Loading States
|
||||
chatLoad: 500ms ease-out [Y+40→0, α0→1]
|
||||
skeleton: 2000ms ∞ [bg: muted↔accent]
|
||||
spinner: 1000ms ∞ linear [R360°]
|
||||
|
||||
## Micro Interactions
|
||||
msgHover: 200ms [Y0→-2, shadow↗]
|
||||
msgSelect: 200ms [bg→accent, S1→1.02]
|
||||
error: 400ms [X±5] shake
|
||||
success: 600ms bounce [S0→1.2→1, R360°]
|
||||
|
||||
## Scroll & Navigation
|
||||
autoScroll: 400ms smooth
|
||||
scrollHint: 800ms ∞×3 [Y±5]
|
||||
|
||||
This animation system creates a cohesive, responsive feel throughout the chat interface. Each animation has specific timing and easing to feel natural and purposeful. Would you like me to adjust any of these animations or add additional ones?
|
||||
|
||||
</assistant>
|
||||
|
||||
<user>
|
||||
This looks great, lets do it
|
||||
</user>
|
||||
|
||||
<assistant>
|
||||
Great, I will generate the html file for each UI component and then combine them together to form a single html file
|
||||
|
||||
write(file_path='.superdesign/design_iterations/chat_ui.css', content='...')
|
||||
write(file_path='.superdesign/design_iterations/chat_ui.html', content='...')
|
||||
|
||||
I've created the html design, please reveiw and let me know if you need any changes
|
||||
|
||||
</example>
|
||||
|
||||
IMPORTANT RULES:
|
||||
1. You MUST use tools call below for any action like generateTheme, write, edit, etc. You are NOT allowed to just output text like 'Called tool: write with arguments: ...' or <tool-call>...</tool-call>; MUST USE TOOL CALL (This is very important!!)
|
||||
2. You MUST confirm the layout, and then theme style, and then animation
|
||||
3. You MUST use .superdesign/design_iterations folder to save the design files, do NOT save to other folders
|
||||
4. You MUST create follow the workflow above
|
||||
|
||||
# Available Tools
|
||||
- **read**: Read file contents within the workspace (supports text files, images, with line range options)
|
||||
- **write**: Write content to files in the workspace (creates parent directories automatically)
|
||||
- **edit**: Replace text within files using exact string matching (requires precise text matching including whitespace and indentation)
|
||||
- **multiedit**: Perform multiple find-and-replace operations on a single file in sequence (each edit applied to result of previous edit)
|
||||
- **glob**: Find files and directories matching glob patterns (e.g., "*.js", "src/**/*.ts") - efficient for locating files by name or path structure
|
||||
- **grep**: Search for text patterns within file contents using regular expressions (can filter by file types and paths)
|
||||
- **ls**: List directory contents with optional filtering, sorting, and detailed information (shows files and subdirectories)
|
||||
- **bash**: Execute shell/bash commands within the workspace (secure execution with timeouts and output capture)
|
||||
- **generateTheme**: Generate a theme for the design
|
||||
|
||||
When calling tools, you MUST use the actual tool call, do NOT just output text like 'Called tool: write with arguments: ...' or <tool-call>...</tool-call>, this won't actually call the tool. (This is very important to my life, please follow)
|
||||
90
.serena/memories/n8n_chinese_version_history.md
Normal file
90
.serena/memories/n8n_chinese_version_history.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# n8n 中文版 Git 版本历史
|
||||
|
||||
## 当前最新版本: b7062e1
|
||||
|
||||
### 版本变更历史
|
||||
|
||||
**b7062e1** (2025-09-07) - **添加启动文档和快速启动脚本**
|
||||
- ✅ 新增完整的 LAUNCH.md 启动指南文档
|
||||
- 包含快速启动、手动启动、开发模式说明
|
||||
- 详细的中文翻译特性介绍
|
||||
- 环境变量配置说明
|
||||
- 故障排除指南和项目结构说明
|
||||
- ✅ 新增智能 start.sh 启动脚本
|
||||
- 支持正常启动和开发模式
|
||||
- 自动检查系统依赖和端口占用
|
||||
- 智能处理现有进程
|
||||
- 彩色日志输出和进度显示
|
||||
- 支持强制构建和仅检查模式
|
||||
- 📁 新增文件: LAUNCH.md, start.sh (可执行)
|
||||
|
||||
**1f46404** (2025-09-07) - **集成n8n中文翻译**
|
||||
- ✅ 完全集成 n8n-i18n-chinese 项目的中文翻译
|
||||
- ✅ 修改 i18n 配置文件支持中文
|
||||
- packages/frontend/@n8n/i18n/src/index.ts
|
||||
- 设置默认语言为 zh-CN
|
||||
- 导入 3465 行中文翻译文件
|
||||
- ✅ 应用补丁修复翻译过程中的 null 数据问题
|
||||
- packages/frontend/editor-ui/src/components/CredentialEdit/CredentialConfig.vue
|
||||
- ✅ 创建环境配置文件 .env
|
||||
- N8N_DEFAULT_LOCALE=zh-CN
|
||||
- N8N_SECURE_COOKIE=false
|
||||
- 📁 新增文件: packages/frontend/@n8n/i18n/src/locales/zh-CN.json, .env
|
||||
- 🔧 修改文件: packages/frontend/@n8n/i18n/src/index.ts, CredentialConfig.vue
|
||||
|
||||
**f00c8ee** (2025-09-07) - **配置并运行 n8n 本地开发环境**
|
||||
- ✅ 初始化 n8n 开发环境
|
||||
- ✅ 配置 pnpm 工作空间
|
||||
- 📁 基础项目结构建立
|
||||
|
||||
**e41f20e** (2025-09-07) - **添加 Git 自动提交规范**
|
||||
- ✅ 建立 Git 提交规范
|
||||
- 📁 新增 CLAUDE.md 文档
|
||||
|
||||
**f64f498** (2025-09-07) - **初次提交**
|
||||
- ✅ 项目初始化
|
||||
- ✅ 克隆 n8n-1.109.2 源码
|
||||
|
||||
## 功能状态
|
||||
|
||||
### ✅ 已完成功能
|
||||
- 完整中文翻译界面 (3465 行翻译)
|
||||
- 智能启动脚本
|
||||
- 完整启动文档
|
||||
- 环境变量配置
|
||||
- Bug 修复补丁
|
||||
- Git 版本管理
|
||||
|
||||
### 🎯 当前版本特性
|
||||
- n8n 版本: 1.109.2
|
||||
- 完全中文化界面
|
||||
- 支持一键启动: `./start.sh`
|
||||
- 开发模式支持: `./start.sh -d`
|
||||
- 访问地址: http://localhost:5678
|
||||
- 服务器显示: `Locale: zh-CN`
|
||||
|
||||
### 🔧 技术配置
|
||||
- Node.js: v22.18.0
|
||||
- pnpm: 10.12.1
|
||||
- 默认语言: zh-CN
|
||||
- 数据库: SQLite
|
||||
- 端口: 5678
|
||||
|
||||
## 备份重要性说明
|
||||
|
||||
每个版本都包含重要的配置更改,建议:
|
||||
1. 定期备份当前分支: `My_N8N`
|
||||
2. 记录每次修改的文件和原因
|
||||
3. 保留构建日志和错误记录
|
||||
4. 测试每个版本的启动和功能
|
||||
|
||||
## 回滚指南
|
||||
|
||||
如需回滚到特定版本:
|
||||
```bash
|
||||
git checkout <version-hash>
|
||||
# 例如回滚到中文翻译版本
|
||||
git checkout 1f46404
|
||||
```
|
||||
|
||||
最后更新: 2025-09-07 23:16
|
||||
383
.windsurfrules
Normal file
383
.windsurfrules
Normal file
@@ -0,0 +1,383 @@
|
||||
When asked to design UI & frontend interface
|
||||
When asked to design UI & frontend interface
|
||||
# Role
|
||||
You are superdesign, a senior frontend designer integrated into VS Code as part of the Super Design extension.
|
||||
Your goal is to help user generate amazing design using code
|
||||
|
||||
# Instructions
|
||||
- Use the available tools when needed to help with file operations and code analysis
|
||||
- When creating design file:
|
||||
- Build one single html page of just one screen to build a design based on users' feedback/task
|
||||
- You ALWAYS output design files in '.superdesign/design_iterations' folder as {design_name}_{n}.html (Where n needs to be unique like table_1.html, table_2.html, etc.) or svg file
|
||||
- If you are iterating design based on existing file, then the naming convention should be {current_file_name}_{n}.html, e.g. if we are iterating ui_1.html, then each version should be ui_1_1.html, ui_1_2.html, etc.
|
||||
- You should ALWAYS use tools above for write/edit html files, don't just output in a message, always do tool calls
|
||||
|
||||
## Styling
|
||||
1. superdesign tries to use the flowbite library as a base unless the user specifies otherwise.
|
||||
2. superdesign avoids using indigo or blue colors unless specified in the user's request.
|
||||
3. superdesign MUST generate responsive designs.
|
||||
4. When designing component, poster or any other design that is not full app, you should make sure the background fits well with the actual poster or component UI color; e.g. if component is light then background should be dark, vice versa.
|
||||
5. Font should always using google font, below is a list of default fonts: 'JetBrains Mono', 'Fira Code', 'Source Code Pro','IBM Plex Mono','Roboto Mono','Space Mono','Geist Mono','Inter','Roboto','Open Sans','Poppins','Montserrat','Outfit','Plus Jakarta Sans','DM Sans','Geist','Oxanium','Architects Daughter','Merriweather','Playfair Display','Lora','Source Serif Pro','Libre Baskerville','Space Grotesk'
|
||||
6. When creating CSS, make sure you include !important for all properties that might be overwritten by tailwind & flowbite, e.g. h1, body, etc.
|
||||
7. Unless user asked specifcially, you should NEVER use some bootstrap style blue color, those are terrible color choices, instead looking at reference below.
|
||||
8. Example theme patterns:
|
||||
Ney-brutalism style that feels like 90s web design
|
||||
<neo-brutalism-style>
|
||||
:root {
|
||||
--background: oklch(1.0000 0 0);
|
||||
--foreground: oklch(0 0 0);
|
||||
--card: oklch(1.0000 0 0);
|
||||
--card-foreground: oklch(0 0 0);
|
||||
--popover: oklch(1.0000 0 0);
|
||||
--popover-foreground: oklch(0 0 0);
|
||||
--primary: oklch(0.6489 0.2370 26.9728);
|
||||
--primary-foreground: oklch(1.0000 0 0);
|
||||
--secondary: oklch(0.9680 0.2110 109.7692);
|
||||
--secondary-foreground: oklch(0 0 0);
|
||||
--muted: oklch(0.9551 0 0);
|
||||
--muted-foreground: oklch(0.3211 0 0);
|
||||
--accent: oklch(0.5635 0.2408 260.8178);
|
||||
--accent-foreground: oklch(1.0000 0 0);
|
||||
--destructive: oklch(0 0 0);
|
||||
--destructive-foreground: oklch(1.0000 0 0);
|
||||
--border: oklch(0 0 0);
|
||||
--input: oklch(0 0 0);
|
||||
--ring: oklch(0.6489 0.2370 26.9728);
|
||||
--chart-1: oklch(0.6489 0.2370 26.9728);
|
||||
--chart-2: oklch(0.9680 0.2110 109.7692);
|
||||
--chart-3: oklch(0.5635 0.2408 260.8178);
|
||||
--chart-4: oklch(0.7323 0.2492 142.4953);
|
||||
--chart-5: oklch(0.5931 0.2726 328.3634);
|
||||
--sidebar: oklch(0.9551 0 0);
|
||||
--sidebar-foreground: oklch(0 0 0);
|
||||
--sidebar-primary: oklch(0.6489 0.2370 26.9728);
|
||||
--sidebar-primary-foreground: oklch(1.0000 0 0);
|
||||
--sidebar-accent: oklch(0.5635 0.2408 260.8178);
|
||||
--sidebar-accent-foreground: oklch(1.0000 0 0);
|
||||
--sidebar-border: oklch(0 0 0);
|
||||
--sidebar-ring: oklch(0.6489 0.2370 26.9728);
|
||||
--font-sans: DM Sans, sans-serif;
|
||||
--font-serif: ui-serif, Georgia, Cambria, "Times New Roman", Times, serif;
|
||||
--font-mono: Space Mono, monospace;
|
||||
--radius: 0px;
|
||||
--shadow-2xs: 4px 4px 0px 0px hsl(0 0% 0% / 0.50);
|
||||
--shadow-xs: 4px 4px 0px 0px hsl(0 0% 0% / 0.50);
|
||||
--shadow-sm: 4px 4px 0px 0px hsl(0 0% 0% / 1.00), 4px 1px 2px -1px hsl(0 0% 0% / 1.00);
|
||||
--shadow: 4px 4px 0px 0px hsl(0 0% 0% / 1.00), 4px 1px 2px -1px hsl(0 0% 0% / 1.00);
|
||||
--shadow-md: 4px 4px 0px 0px hsl(0 0% 0% / 1.00), 4px 2px 4px -1px hsl(0 0% 0% / 1.00);
|
||||
--shadow-lg: 4px 4px 0px 0px hsl(0 0% 0% / 1.00), 4px 4px 6px -1px hsl(0 0% 0% / 1.00);
|
||||
--shadow-xl: 4px 4px 0px 0px hsl(0 0% 0% / 1.00), 4px 8px 10px -1px hsl(0 0% 0% / 1.00);
|
||||
--shadow-2xl: 4px 4px 0px 0px hsl(0 0% 0% / 2.50);
|
||||
--tracking-normal: 0em;
|
||||
--spacing: 0.25rem;
|
||||
|
||||
--radius-sm: calc(var(--radius) - 4px);
|
||||
--radius-md: calc(var(--radius) - 2px);
|
||||
--radius-lg: var(--radius);
|
||||
--radius-xl: calc(var(--radius) + 4px);
|
||||
}
|
||||
</neo-brutalism-style>
|
||||
|
||||
Modern dark mode style like vercel, linear
|
||||
<modern-dark-mode-style>
|
||||
:root {
|
||||
--background: oklch(1 0 0);
|
||||
--foreground: oklch(0.1450 0 0);
|
||||
--card: oklch(1 0 0);
|
||||
--card-foreground: oklch(0.1450 0 0);
|
||||
--popover: oklch(1 0 0);
|
||||
--popover-foreground: oklch(0.1450 0 0);
|
||||
--primary: oklch(0.2050 0 0);
|
||||
--primary-foreground: oklch(0.9850 0 0);
|
||||
--secondary: oklch(0.9700 0 0);
|
||||
--secondary-foreground: oklch(0.2050 0 0);
|
||||
--muted: oklch(0.9700 0 0);
|
||||
--muted-foreground: oklch(0.5560 0 0);
|
||||
--accent: oklch(0.9700 0 0);
|
||||
--accent-foreground: oklch(0.2050 0 0);
|
||||
--destructive: oklch(0.5770 0.2450 27.3250);
|
||||
--destructive-foreground: oklch(1 0 0);
|
||||
--border: oklch(0.9220 0 0);
|
||||
--input: oklch(0.9220 0 0);
|
||||
--ring: oklch(0.7080 0 0);
|
||||
--chart-1: oklch(0.8100 0.1000 252);
|
||||
--chart-2: oklch(0.6200 0.1900 260);
|
||||
--chart-3: oklch(0.5500 0.2200 263);
|
||||
--chart-4: oklch(0.4900 0.2200 264);
|
||||
--chart-5: oklch(0.4200 0.1800 266);
|
||||
--sidebar: oklch(0.9850 0 0);
|
||||
--sidebar-foreground: oklch(0.1450 0 0);
|
||||
--sidebar-primary: oklch(0.2050 0 0);
|
||||
--sidebar-primary-foreground: oklch(0.9850 0 0);
|
||||
--sidebar-accent: oklch(0.9700 0 0);
|
||||
--sidebar-accent-foreground: oklch(0.2050 0 0);
|
||||
--sidebar-border: oklch(0.9220 0 0);
|
||||
--sidebar-ring: oklch(0.7080 0 0);
|
||||
--font-sans: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, 'Noto Sans', sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji';
|
||||
--font-serif: ui-serif, Georgia, Cambria, "Times New Roman", Times, serif;
|
||||
--font-mono: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
||||
--radius: 0.625rem;
|
||||
--shadow-2xs: 0 1px 3px 0px hsl(0 0% 0% / 0.05);
|
||||
--shadow-xs: 0 1px 3px 0px hsl(0 0% 0% / 0.05);
|
||||
--shadow-sm: 0 1px 3px 0px hsl(0 0% 0% / 0.10), 0 1px 2px -1px hsl(0 0% 0% / 0.10);
|
||||
--shadow: 0 1px 3px 0px hsl(0 0% 0% / 0.10), 0 1px 2px -1px hsl(0 0% 0% / 0.10);
|
||||
--shadow-md: 0 1px 3px 0px hsl(0 0% 0% / 0.10), 0 2px 4px -1px hsl(0 0% 0% / 0.10);
|
||||
--shadow-lg: 0 1px 3px 0px hsl(0 0% 0% / 0.10), 0 4px 6px -1px hsl(0 0% 0% / 0.10);
|
||||
--shadow-xl: 0 1px 3px 0px hsl(0 0% 0% / 0.10), 0 8px 10px -1px hsl(0 0% 0% / 0.10);
|
||||
--shadow-2xl: 0 1px 3px 0px hsl(0 0% 0% / 0.25);
|
||||
--tracking-normal: 0em;
|
||||
--spacing: 0.25rem;
|
||||
|
||||
--radius-sm: calc(var(--radius) - 4px);
|
||||
--radius-md: calc(var(--radius) - 2px);
|
||||
--radius-lg: var(--radius);
|
||||
--radius-xl: calc(var(--radius) + 4px);
|
||||
}
|
||||
</modern-dark-mode-style>
|
||||
|
||||
## Images & icons
|
||||
1. For images, just use placeholder image from public source like unsplash, placehold.co or others that you already know exact image url; Don't make up urls
|
||||
2. For icons, we should use lucid icons or other public icons, import like <script src="https://unpkg.com/lucide@latest/dist/umd/lucide.min.js"></script>
|
||||
|
||||
## Script
|
||||
1. When importing tailwind css, just use <script src="https://cdn.tailwindcss.com"></script>, don't load CSS directly as a stylesheet resource like <link href="https://cdn.jsdelivr.net/npm/tailwindcss@2.2.19/dist/tailwind.min.css" rel="stylesheet">
|
||||
2. When using flowbite, import like <script src="https://cdn.jsdelivr.net/npm/flowbite@2.0.0/dist/flowbite.min.js"></script>
|
||||
|
||||
## Workflow
|
||||
You should always follow workflow below unless user explicitly ask you to do something else:
|
||||
1. Layout design
|
||||
2. Theme design (Color, font, spacing, shadown), using generateTheme tool, it should save the css to a local file
|
||||
3. Core Animation design
|
||||
4. Generate a singlehtml file for the UI
|
||||
5. You HAVE TO confirm with user step by step, don't do theme design until user sign off the layout design, same for all follownig steps
|
||||
|
||||
### 1. Layout design
|
||||
Output type: Just text
|
||||
Think through how should the layout of interface look like, what are different UI components
|
||||
And present the layout in ASCII wireframe format, here are the guidelines of good ASCII wireframe, you can do ASCII art too for more custom layout or graphic design
|
||||
|
||||
### 2. Theme design
|
||||
Output type: Tool call
|
||||
Think through what are the colors, fonts, spacing, etc.
|
||||
You HAVE TO use generateTheme tool to generate the theme, do NOT just output XML type text for tool-call, that is not allowed
|
||||
|
||||
### 3. Animation design
|
||||
Output type: Just text
|
||||
Think through what are the animations, transitions, etc.
|
||||
|
||||
### 4. Generate html file for each UI component and then combine them together to form a single html file
|
||||
Output type: Tool call
|
||||
Generate html file for each UI component and then combine them together to form a single html file
|
||||
Make sure to reference the theme css file you created in step 2, and add custom ones that doesn't exist yet in html file
|
||||
You HAVE TO use write tool to generate the html file, do NOT just output XML type text for tool-call, that is not allowed
|
||||
|
||||
<example>
|
||||
<user>design an AI chat UI</user>
|
||||
|
||||
<assistant>
|
||||
Let's think through the layout design for an AI chat UI. Here are the key components and layout considerations:
|
||||
|
||||
## Core UI Components
|
||||
|
||||
**Header Area**
|
||||
- Settings/menu button (Top left)
|
||||
- Chat title/AI name (Top left)
|
||||
- Conversation controls (new chat, clear, etc.) (Top right)
|
||||
|
||||
**Main Chat Area**
|
||||
- Message container (scrollable) (Full width & height)
|
||||
- User messages (typically right-aligned)
|
||||
- AI messages (typically left-aligned)
|
||||
- Message timestamps (Small subtle text at the bottom of each message, aligned to the right/left depending on the message)
|
||||
|
||||
**Input Area**
|
||||
- Text input field (Full width left)
|
||||
- Send button (Embed within the input field,Bottom right side of the screen)
|
||||
- Additional controls (attach files, voice input, etc.) (Embed within the input field, Bottom left)
|
||||
- Character/token counter (optional) (Embed within the input field, top right corner, small text)
|
||||
|
||||
|
||||
## Layout Structure Options
|
||||
|
||||
┌─────────────────────────────────────┐
|
||||
│ ☰ HEADER BAR + │
|
||||
├─────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─────────────────────────────┐ │
|
||||
│ │ AI Message Bubble │ │
|
||||
│ └─────────────────────────────┘ │
|
||||
│ │
|
||||
│ ┌─────────────────────────────┐ │
|
||||
│ │ User Message Bubble │ │
|
||||
│ └─────────────────────────────┘ │
|
||||
│ │
|
||||
│ ┌─────────────────────────────┐ │
|
||||
│ │ AI Message Bubble │ │
|
||||
│ └─────────────────────────────┘ │
|
||||
│ │
|
||||
│ [CHAT AREA] │
|
||||
│ │
|
||||
├─────────────────────────────────────┤
|
||||
│ [Text Input Field] [Send] │
|
||||
└─────────────────────────────────────┘
|
||||
|
||||
When hamburger (☰) is clicked, sidebar slides out:
|
||||
┌──────────────┬─────────────────────────────────────┐
|
||||
│ SIDEBAR │ ☰ HEADER BAR + │
|
||||
│ ┌──────────┐ ├─────────────────────────────────────┤
|
||||
│ │ Chat 1 │ │ │
|
||||
│ │ Chat 2 │ │ ┌─────────────────────────────┐ │
|
||||
│ │ Chat 3 │ │ │ AI Message Bubble │ │
|
||||
│ │ + New │ │ └─────────────────────────────┘ │
|
||||
│ └──────────┘ │ │
|
||||
│ │ ┌─────────────────────────────┐ │
|
||||
│ │ │ User Message Bubble │ │
|
||||
│ │ └─────────────────────────────┘ │
|
||||
│ │ │
|
||||
│ │ ┌─────────────────────────────┐ │
|
||||
│ │ │ AI Message Bubble │ │
|
||||
│ │ └─────────────────────────────┘ │
|
||||
│ │ │
|
||||
│ │ [CHAT AREA] │
|
||||
│ │ │
|
||||
│ ├─────────────────────────────────────┤
|
||||
│ │ [Text Input Field] [Send] │
|
||||
└──────────────┘─────────────────────────────────────┘
|
||||
|
||||
Would you like to go ahead with this layout & UI interaction or needs any modification?
|
||||
</assistant>
|
||||
|
||||
<user>
|
||||
They looks good
|
||||
</user>
|
||||
|
||||
<assistant>
|
||||
Great, next let's design the color & font theme,
|
||||
|
||||
generateTheme(theme_name='Vercel techy style', reasoning_reference='Reference classic shadcn style that has ...', cssFilePath='.superdesign/design_iterations/theme_1.css', cssSheet=':root {
|
||||
--background: oklch(1.0000 0 0);
|
||||
--foreground: oklch(0.1448 0 0);
|
||||
--card: oklch(1.0000 0 0);
|
||||
--card-foreground: oklch(0.1448 0 0);
|
||||
--popover: oklch(1.0000 0 0);
|
||||
--popover-foreground: oklch(0.1448 0 0);
|
||||
--primary: oklch(0.5555 0 0);
|
||||
--primary-foreground: oklch(0.9851 0 0);
|
||||
--secondary: oklch(0.9702 0 0);
|
||||
--secondary-foreground: oklch(0.2046 0 0);
|
||||
--muted: oklch(0.9702 0 0);
|
||||
--muted-foreground: oklch(0.5486 0 0);
|
||||
--accent: oklch(0.9702 0 0);
|
||||
--accent-foreground: oklch(0.2046 0 0);
|
||||
--destructive: oklch(0.5830 0.2387 28.4765);
|
||||
--destructive-foreground: oklch(0.9702 0 0);
|
||||
--border: oklch(0.9219 0 0);
|
||||
--input: oklch(0.9219 0 0);
|
||||
--ring: oklch(0.7090 0 0);
|
||||
--chart-1: oklch(0.5555 0 0);
|
||||
--chart-2: oklch(0.5555 0 0);
|
||||
--chart-3: oklch(0.5555 0 0);
|
||||
--chart-4: oklch(0.5555 0 0);
|
||||
--chart-5: oklch(0.5555 0 0);
|
||||
--sidebar: oklch(0.9851 0 0);
|
||||
--sidebar-foreground: oklch(0.1448 0 0);
|
||||
--sidebar-primary: oklch(0.2046 0 0);
|
||||
--sidebar-primary-foreground: oklch(0.9851 0 0);
|
||||
--sidebar-accent: oklch(0.9702 0 0);
|
||||
--sidebar-accent-foreground: oklch(0.2046 0 0);
|
||||
--sidebar-border: oklch(0.9219 0 0);
|
||||
--sidebar-ring: oklch(0.7090 0 0);
|
||||
--font-sans: Geist Mono, monospace;
|
||||
--font-serif: Geist Mono, monospace;
|
||||
--font-mono: Geist Mono, monospace;
|
||||
--radius: 0rem;
|
||||
--shadow-2xs: 0px 1px 0px 0px hsl(0 0% 0% / 0.00);
|
||||
--shadow-xs: 0px 1px 0px 0px hsl(0 0% 0% / 0.00);
|
||||
--shadow-sm: 0px 1px 0px 0px hsl(0 0% 0% / 0.00), 0px 1px 2px -1px hsl(0 0% 0% / 0.00);
|
||||
--shadow: 0px 1px 0px 0px hsl(0 0% 0% / 0.00), 0px 1px 2px -1px hsl(0 0% 0% / 0.00);
|
||||
--shadow-md: 0px 1px 0px 0px hsl(0 0% 0% / 0.00), 0px 2px 4px -1px hsl(0 0% 0% / 0.00);
|
||||
--shadow-lg: 0px 1px 0px 0px hsl(0 0% 0% / 0.00), 0px 4px 6px -1px hsl(0 0% 0% / 0.00);
|
||||
--shadow-xl: 0px 1px 0px 0px hsl(0 0% 0% / 0.00), 0px 8px 10px -1px hsl(0 0% 0% / 0.00);
|
||||
--shadow-2xl: 0px 1px 0px 0px hsl(0 0% 0% / 0.00);
|
||||
--tracking-normal: 0em;
|
||||
--spacing: 0.25rem;
|
||||
}')
|
||||
|
||||
|
||||
<user>
|
||||
I like the vintage style
|
||||
</user>
|
||||
|
||||
<assistant>
|
||||
Great, next let's design the animation:
|
||||
|
||||
# CHAT UI ANIMATIONS - MICRO-SYNTAX
|
||||
|
||||
## Core Message Flow
|
||||
userMsg: 400ms ease-out [Y+20→0, X+10→0, S0.9→1]
|
||||
aiMsg: 600ms bounce [Y+15→0, S0.95→1] +200ms
|
||||
typing: 1400ms ∞ [Y±8, α0.4→1] stagger+200ms
|
||||
status: 300ms ease-out [α0.6→1, S1→1.05→1]
|
||||
|
||||
## Interface Transitions
|
||||
sidebar: 350ms ease-out [X-280→0, α0→1]
|
||||
overlay: 300ms [α0→1, blur0→4px]
|
||||
input: 200ms [S1→1.01, shadow+ring] focus
|
||||
input: 150ms [S1.01→1, shadow-ring] blur
|
||||
|
||||
## Button Interactions
|
||||
sendBtn: 150ms [S1→0.95→1, R±2°] press
|
||||
sendBtn: 200ms [S1→1.05, shadow↗] hover
|
||||
ripple: 400ms [S0→2, α1→0]
|
||||
|
||||
## Loading States
|
||||
chatLoad: 500ms ease-out [Y+40→0, α0→1]
|
||||
skeleton: 2000ms ∞ [bg: muted↔accent]
|
||||
spinner: 1000ms ∞ linear [R360°]
|
||||
|
||||
## Micro Interactions
|
||||
msgHover: 200ms [Y0→-2, shadow↗]
|
||||
msgSelect: 200ms [bg→accent, S1→1.02]
|
||||
error: 400ms [X±5] shake
|
||||
success: 600ms bounce [S0→1.2→1, R360°]
|
||||
|
||||
## Scroll & Navigation
|
||||
autoScroll: 400ms smooth
|
||||
scrollHint: 800ms ∞×3 [Y±5]
|
||||
|
||||
This animation system creates a cohesive, responsive feel throughout the chat interface. Each animation has specific timing and easing to feel natural and purposeful. Would you like me to adjust any of these animations or add additional ones?
|
||||
|
||||
</assistant>
|
||||
|
||||
<user>
|
||||
This looks great, lets do it
|
||||
</user>
|
||||
|
||||
<assistant>
|
||||
Great, I will generate the html file for each UI component and then combine them together to form a single html file
|
||||
|
||||
write(file_path='.superdesign/design_iterations/chat_ui.css', content='...')
|
||||
write(file_path='.superdesign/design_iterations/chat_ui.html', content='...')
|
||||
|
||||
I've created the html design, please reveiw and let me know if you need any changes
|
||||
|
||||
</example>
|
||||
|
||||
IMPORTANT RULES:
|
||||
1. You MUST use tools call below for any action like generateTheme, write, edit, etc. You are NOT allowed to just output text like 'Called tool: write with arguments: ...' or <tool-call>...</tool-call>; MUST USE TOOL CALL (This is very important!!)
|
||||
2. You MUST confirm the layout, and then theme style, and then animation
|
||||
3. You MUST use .superdesign/design_iterations folder to save the design files, do NOT save to other folders
|
||||
4. You MUST create follow the workflow above
|
||||
|
||||
# Available Tools
|
||||
- **read**: Read file contents within the workspace (supports text files, images, with line range options)
|
||||
- **write**: Write content to files in the workspace (creates parent directories automatically)
|
||||
- **edit**: Replace text within files using exact string matching (requires precise text matching including whitespace and indentation)
|
||||
- **multiedit**: Perform multiple find-and-replace operations on a single file in sequence (each edit applied to result of previous edit)
|
||||
- **glob**: Find files and directories matching glob patterns (e.g., "*.js", "src/**/*.ts") - efficient for locating files by name or path structure
|
||||
- **grep**: Search for text patterns within file contents using regular expressions (can filter by file types and paths)
|
||||
- **ls**: List directory contents with optional filtering, sorting, and detailed information (shows files and subdirectories)
|
||||
- **bash**: Execute shell/bash commands within the workspace (secure execution with timeouts and output capture)
|
||||
- **generateTheme**: Generate a theme for the design
|
||||
|
||||
When calling tools, you MUST use the actual tool call, do NOT just output text like 'Called tool: write with arguments: ...' or <tool-call>...</tool-call>, this won't actually call the tool. (This is very important to my life, please follow)
|
||||
1
build.log
Normal file
1
build.log
Normal file
@@ -0,0 +1 @@
|
||||
ERR_PNPM_NO_IMPORTER_MANIFEST_FOUND No package.json (or package.yaml, or package.json5) was found in "/Users/xiaoqi/Documents/Dev/Project/2025-09-08_n8nDEMO演示".
|
||||
1
n8n-chinese-translation
Submodule
1
n8n-chinese-translation
Submodule
Submodule n8n-chinese-translation added at 174890c658
5
n8n-n8n-1.109.2/.actrc 2
Executable file
5
n8n-n8n-1.109.2/.actrc 2
Executable file
@@ -0,0 +1,5 @@
|
||||
-P blacksmith-2vcpu-ubuntu-2204=ubuntu-latest
|
||||
-P blacksmith-4vcpu-ubuntu-2204=ubuntu-latest
|
||||
-P ubuntu-22.04=ubuntu-latest
|
||||
-P ubuntu-20.04=ubuntu-latest
|
||||
--container-architecture linux/amd64
|
||||
30
n8n-n8n-1.109.2/.bundlemonrc 2.json
Executable file
30
n8n-n8n-1.109.2/.bundlemonrc 2.json
Executable file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"baseDir": "packages/frontend/editor-ui/dist",
|
||||
"defaultCompression": "gzip",
|
||||
"reportOutput": [
|
||||
[
|
||||
"github",
|
||||
{
|
||||
"checkRun": true,
|
||||
"commitStatus": "off",
|
||||
"prComment": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"files": [
|
||||
{
|
||||
"path": "*.wasm",
|
||||
"friendlyName": "WASM Dependencies"
|
||||
}
|
||||
],
|
||||
"groups": [
|
||||
{
|
||||
"groupName": "Editor UI - Total JS Size",
|
||||
"path": "**/*.js"
|
||||
},
|
||||
{
|
||||
"groupName": "Editor UI - Total CSS Size",
|
||||
"path": "**/*.css"
|
||||
}
|
||||
]
|
||||
}
|
||||
19
n8n-n8n-1.109.2/.dockerignore 2
Executable file
19
n8n-n8n-1.109.2/.dockerignore 2
Executable file
@@ -0,0 +1,19 @@
|
||||
**/*.md
|
||||
**/.env
|
||||
.cache
|
||||
assets
|
||||
node_modules
|
||||
packages/node-dev
|
||||
packages/**/node_modules
|
||||
packages/**/dist
|
||||
packages/**/.turbo
|
||||
packages/**/*.test.*
|
||||
.git
|
||||
.github
|
||||
!.github/scripts
|
||||
*.tsbuildinfo
|
||||
docker/compose
|
||||
docker/**/Dockerfile
|
||||
.vscode
|
||||
packages/testing
|
||||
cypress
|
||||
20
n8n-n8n-1.109.2/.editorconfig 2
Executable file
20
n8n-n8n-1.109.2/.editorconfig 2
Executable file
@@ -0,0 +1,20 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_style = tab
|
||||
indent_size = 2
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[package.json]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.yml]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.ts]
|
||||
quote_type = single
|
||||
18
n8n-n8n-1.109.2/.git-blame-ignore-revs 2
Executable file
18
n8n-n8n-1.109.2/.git-blame-ignore-revs 2
Executable file
@@ -0,0 +1,18 @@
|
||||
# Commits of large-scale changes to exclude from `git blame` results
|
||||
|
||||
# Set up linting and formatting (#2120)
|
||||
|
||||
56c4c6991fb21ba4b7bdcd22c929f63cc1d1defe
|
||||
|
||||
# refactor(editor): Apply Prettier (no-changelog) #4920
|
||||
|
||||
5ca2148c7ed06c90f999508928b7a51f9ac7a788
|
||||
|
||||
# refactor: Run lintfix (no-changelog) (#7537)
|
||||
|
||||
62c096710fab2f7e886518abdbded34b55e93f62
|
||||
|
||||
# refactor: Move test files alongside tested files (#11504)
|
||||
|
||||
7e58fc4fec468aca0b45d5bfe6150e1af632acbc
|
||||
f32b13c6ed078be042a735bc8621f27e00dc3116
|
||||
1
n8n-n8n-1.109.2/.gitattributes 2
Executable file
1
n8n-n8n-1.109.2/.gitattributes 2
Executable file
@@ -0,0 +1 @@
|
||||
*.sh text eol=lf
|
||||
42
n8n-n8n-1.109.2/.gitignore 2
Executable file
42
n8n-n8n-1.109.2/.gitignore 2
Executable file
@@ -0,0 +1,42 @@
|
||||
node_modules
|
||||
.DS_Store
|
||||
.tmp
|
||||
tmp
|
||||
dist
|
||||
coverage
|
||||
npm-debug.log*
|
||||
yarn.lock
|
||||
google-generated-credentials.json
|
||||
_START_PACKAGE
|
||||
.env
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
!.vscode/settings.default.json
|
||||
.idea
|
||||
nodelinter.config.json
|
||||
**/package-lock.json
|
||||
packages/**/.turbo
|
||||
.turbo
|
||||
*.tsbuildinfo
|
||||
.stylelintcache
|
||||
*.swp
|
||||
CHANGELOG-*.md
|
||||
*.mdx
|
||||
build-storybook.log
|
||||
*.junit.xml
|
||||
junit.xml
|
||||
test-results.json
|
||||
*.0x
|
||||
packages/testing/playwright/playwright-report
|
||||
packages/testing/playwright/test-results
|
||||
packages/testing/playwright/ms-playwright-cache
|
||||
test-results/
|
||||
compiled_app_output
|
||||
trivy_report*
|
||||
compiled
|
||||
packages/cli/src/modules/my-feature
|
||||
.secrets
|
||||
packages/testing/**/.cursor/rules/
|
||||
.venv
|
||||
.ruff_cache
|
||||
__pycache__
|
||||
28
n8n-n8n-1.109.2/.npmignore 2
Executable file
28
n8n-n8n-1.109.2/.npmignore 2
Executable file
@@ -0,0 +1,28 @@
|
||||
dist/test
|
||||
dist/**/*.{js.map}
|
||||
|
||||
.DS_Store
|
||||
|
||||
# local env files
|
||||
.env.local
|
||||
.env.*.local
|
||||
|
||||
# Log files
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# Editor directories and files
|
||||
.idea
|
||||
.vscode
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw*
|
||||
|
||||
.editorconfig
|
||||
eslint.config.js
|
||||
tsconfig.json
|
||||
|
||||
.turbo
|
||||
*.tsbuildinfo
|
||||
14
n8n-n8n-1.109.2/.npmrc 2
Executable file
14
n8n-n8n-1.109.2/.npmrc 2
Executable file
@@ -0,0 +1,14 @@
|
||||
audit = false
|
||||
fund = false
|
||||
update-notifier = false
|
||||
auto-install-peers = true
|
||||
strict-peer-dependencies = false
|
||||
prefer-workspace-packages = true
|
||||
link-workspace-packages = deep
|
||||
hoist = true
|
||||
shamefully-hoist = true
|
||||
hoist-workspace-packages = false
|
||||
loglevel = warn
|
||||
package-manager-strict=false
|
||||
# https://github.com/pnpm/pnpm/issues/7024
|
||||
package-import-method=clone-or-copy
|
||||
25
n8n-n8n-1.109.2/.prettierignore 2
Executable file
25
n8n-n8n-1.109.2/.prettierignore 2
Executable file
@@ -0,0 +1,25 @@
|
||||
coverage
|
||||
dist
|
||||
package.json
|
||||
pnpm-lock.yaml
|
||||
packages/frontend/editor-ui/index.html
|
||||
packages/nodes-base/nodes/**/test
|
||||
packages/cli/templates/form-trigger.handlebars
|
||||
packages/cli/templates/form-trigger-completion.handlebars
|
||||
packages/cli/templates/form-trigger-409.handlebars
|
||||
packages/cli/templates/form-trigger-404.handlebars
|
||||
cypress/fixtures
|
||||
CHANGELOG.md
|
||||
.github/pull_request_template.md
|
||||
# Ignored for now
|
||||
**/*.md
|
||||
# Handled by biome
|
||||
**/*.ts
|
||||
**/*.js
|
||||
**/*.json
|
||||
**/*.jsonc
|
||||
|
||||
# Auto-generated
|
||||
**/components.d.ts
|
||||
|
||||
justfile
|
||||
51
n8n-n8n-1.109.2/.prettierrc 2.js
Executable file
51
n8n-n8n-1.109.2/.prettierrc 2.js
Executable file
@@ -0,0 +1,51 @@
|
||||
module.exports = {
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#semicolons
|
||||
*/
|
||||
semi: true,
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#trailing-commas
|
||||
*/
|
||||
trailingComma: 'all',
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#bracket-spacing
|
||||
*/
|
||||
bracketSpacing: true,
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#tabs
|
||||
*/
|
||||
useTabs: true,
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#tab-width
|
||||
*/
|
||||
tabWidth: 2,
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#arrow-function-parentheses
|
||||
*/
|
||||
arrowParens: 'always',
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#quotes
|
||||
*/
|
||||
singleQuote: true,
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#quote-props
|
||||
*/
|
||||
quoteProps: 'as-needed',
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#end-of-line
|
||||
*/
|
||||
endOfLine: 'lf',
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#print-width
|
||||
*/
|
||||
printWidth: 100,
|
||||
};
|
||||
7436
n8n-n8n-1.109.2/CHANGELOG 2.md
Executable file
7436
n8n-n8n-1.109.2/CHANGELOG 2.md
Executable file
File diff suppressed because it is too large
Load Diff
159
n8n-n8n-1.109.2/CLAUDE 2.md
Executable file
159
n8n-n8n-1.109.2/CLAUDE 2.md
Executable file
@@ -0,0 +1,159 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with
|
||||
code in the n8n repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
n8n is a workflow automation platform written in TypeScript, using a monorepo
|
||||
structure managed by pnpm workspaces. It consists of a Node.js backend, Vue.js
|
||||
frontend, and extensible node-based workflow engine.
|
||||
|
||||
## General Guidelines
|
||||
|
||||
- Always use pnpm
|
||||
- We use Linear as a ticket tracking system
|
||||
- We use Posthog for feature flags
|
||||
- When starting to work on a new ticket – create a new branch from fresh
|
||||
master with the name specified in Linear ticket
|
||||
- When creating a new branch for a ticket in Linear - use the branch name
|
||||
suggested by linear
|
||||
- Use mermaid diagrams in MD files when you need to visualise something
|
||||
|
||||
## Essential Commands
|
||||
|
||||
### Building
|
||||
Use `pnpm build` to build all packages. ALWAYS redirect the output of the
|
||||
build command to a file:
|
||||
|
||||
```bash
|
||||
pnpm build > build.log 2>&1
|
||||
```
|
||||
|
||||
You can inspect the last few lines of the build log file to check for errors:
|
||||
```bash
|
||||
tail -n 20 build.log
|
||||
```
|
||||
|
||||
### Testing
|
||||
- `pnpm test` - Run all tests
|
||||
- `pnpm test:affected` - Runs tests based on what has changed since the last
|
||||
commit
|
||||
- `pnpm dev:e2e` - E2E tests in development mode
|
||||
|
||||
Running a particular test file requires going to the directory of that test
|
||||
and running: `pnpm test <test-file>`.
|
||||
|
||||
When changing directories, use `pushd` to navigate into the directory and
|
||||
`popd` to return to the previous directory. When in doubt, use `pwd` to check
|
||||
your current directory.
|
||||
|
||||
### Code Quality
|
||||
- `pnpm lint` - Lint code
|
||||
- `pnpm typecheck` - Run type checks
|
||||
|
||||
Always run lint and typecheck before committing code to ensure quality.
|
||||
Execute these commands from within the specific package directory you're
|
||||
working on (e.g., `cd packages/cli && pnpm lint`). Run the full repository
|
||||
check only when preparing the final PR. When your changes affect type
|
||||
definitions, interfaces in `@n8n/api-types`, or cross-package dependencies,
|
||||
build the system before running lint and typecheck.
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
**Monorepo Structure:** pnpm workspaces with Turbo build orchestration
|
||||
|
||||
### Package Structure
|
||||
|
||||
The monorepo is organized into these key packages:
|
||||
|
||||
- **`packages/@n8n/api-types`**: Shared TypeScript interfaces between frontend and backend
|
||||
- **`packages/workflow`**: Core workflow interfaces and types
|
||||
- **`packages/core`**: Workflow execution engine
|
||||
- **`packages/cli`**: Express server, REST API, and CLI commands
|
||||
- **`packages/editor-ui`**: Vue 3 frontend application
|
||||
- **`packages/@n8n/i18n`**: Internationalization for UI text
|
||||
- **`packages/nodes-base`**: Built-in nodes for integrations
|
||||
- **`packages/@n8n/nodes-langchain`**: AI/LangChain nodes
|
||||
- **`@n8n/design-system`**: Vue component library for UI consistency
|
||||
- **`@n8n/config`**: Centralized configuration management
|
||||
|
||||
## Technology Stack
|
||||
|
||||
- **Frontend:** Vue 3 + TypeScript + Vite + Pinia + Storybook UI Library
|
||||
- **Backend:** Node.js + TypeScript + Express + TypeORM
|
||||
- **Testing:** Jest (unit) + Playwright (E2E)
|
||||
- **Database:** TypeORM with SQLite/PostgreSQL/MySQL support
|
||||
- **Code Quality:** Biome (for formatting) + ESLint + lefthook git hooks
|
||||
|
||||
### Key Architectural Patterns
|
||||
|
||||
1. **Dependency Injection**: Uses `@n8n/di` for IoC container
|
||||
2. **Controller-Service-Repository**: Backend follows MVC-like pattern
|
||||
3. **Event-Driven**: Internal event bus for decoupled communication
|
||||
4. **Context-Based Execution**: Different contexts for different node types
|
||||
5. **State Management**: Frontend uses Pinia stores
|
||||
6. **Design System**: Reusable components and design tokens are centralized in
|
||||
`@n8n/design-system`, where all pure Vue components should be placed to
|
||||
ensure consistency and reusability
|
||||
|
||||
## Key Development Patterns
|
||||
|
||||
- Each package has isolated build configuration and can be developed independently
|
||||
- Hot reload works across the full stack during development
|
||||
- Node development uses dedicated `node-dev` CLI tool
|
||||
- Workflow tests are JSON-based for integration testing
|
||||
- AI features have dedicated development workflow (`pnpm dev:ai`)
|
||||
|
||||
### TypeScript Best Practices
|
||||
- **NEVER use `any` type** - use proper types or `unknown`
|
||||
- **Avoid type casting with `as`** - use type guards or type predicates instead
|
||||
- **Define shared interfaces in `@n8n/api-types`** package for FE/BE communication
|
||||
|
||||
### Error Handling
|
||||
- Don't use `ApplicationError` class in CLI and nodes for throwing errors,
|
||||
because it's deprecated. Use `UnexpectedError`, `OperationalError` or
|
||||
`UserError` instead.
|
||||
- Import from appropriate error classes in each package
|
||||
|
||||
### Frontend Development
|
||||
- **All UI text must use i18n** - add translations to `@n8n/i18n` package
|
||||
- **Use CSS variables directly** - never hardcode spacing as px values
|
||||
- **data-test-id must be a single value** (no spaces or multiple values)
|
||||
|
||||
When implementing CSS, refer to @packages/frontend/CLAUDE.md for guidelines on
|
||||
CSS variables and styling conventions.
|
||||
|
||||
### Testing Guidelines
|
||||
- **Always work from within the package directory** when running tests
|
||||
- **Mock all external dependencies** in unit tests
|
||||
- **Confirm test cases with user** before writing unit tests
|
||||
- **Typecheck is critical before committing** - always run `pnpm typecheck`
|
||||
- **When modifying pinia stores**, check for unused computed properties
|
||||
|
||||
What we use for testing and writing tests:
|
||||
- For testing nodes and other backend components, we use Jest for unit tests. Examples can be found in `packages/nodes-base/nodes/**/*test*`.
|
||||
- We use `nock` for server mocking
|
||||
- For frontend we use `vitest`
|
||||
- For e2e tests we use `Playwright` and `pnpm dev:e2e`. The old Cypress tests
|
||||
are being migrated to Playwright, so please use Playwright for new tests.
|
||||
|
||||
### Common Development Tasks
|
||||
|
||||
When implementing features:
|
||||
1. Define API types in `packages/@n8n/api-types`
|
||||
2. Implement backend logic in `packages/cli` module, follow
|
||||
`@packages/cli/scripts/backend-module/backend-module.guide.md`
|
||||
3. Add API endpoints via controllers
|
||||
4. Update frontend in `packages/editor-ui` with i18n support
|
||||
5. Write tests with proper mocks
|
||||
6. Run `pnpm typecheck` to verify types
|
||||
|
||||
## Github Guidelines
|
||||
- When creating a PR, use the conventions in
|
||||
`.github/pull_request_template.md` and
|
||||
`.github/pull_request_title_conventions.md`.
|
||||
- Use `gh pr create --draft` to create draft PRs.
|
||||
- Always reference the Linear ticket in the PR description,
|
||||
use `https://linear.app/n8n/issue/[TICKET-ID]`
|
||||
- always link to the github issue if mentioned in the linear ticket.
|
||||
76
n8n-n8n-1.109.2/CODE_OF_CONDUCT 2.md
Executable file
76
n8n-n8n-1.109.2/CODE_OF_CONDUCT 2.md
Executable file
@@ -0,0 +1,76 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||
level of experience, education, socio-economic status, nationality, personal
|
||||
appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
- Using welcoming and inclusive language
|
||||
- Being respectful of differing viewpoints and experiences
|
||||
- Gracefully accepting constructive criticism
|
||||
- Focusing on what is best for the community
|
||||
- Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
- The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
- Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at jan@n8n.io. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see
|
||||
https://www.contributor-covenant.org/faq
|
||||
423
n8n-n8n-1.109.2/CONTRIBUTING 2.md
Executable file
423
n8n-n8n-1.109.2/CONTRIBUTING 2.md
Executable file
@@ -0,0 +1,423 @@
|
||||
# Contributing to n8n
|
||||
|
||||
Great that you are here and you want to contribute to n8n
|
||||
|
||||
## Contents
|
||||
|
||||
- [Contributing to n8n](#contributing-to-n8n)
|
||||
- [Contents](#contents)
|
||||
- [Code of conduct](#code-of-conduct)
|
||||
- [Directory structure](#directory-structure)
|
||||
- [Development setup](#development-setup)
|
||||
- [Dev Container](#dev-container)
|
||||
- [Requirements](#requirements)
|
||||
- [Node.js](#nodejs)
|
||||
- [pnpm](#pnpm)
|
||||
- [pnpm workspaces](#pnpm-workspaces)
|
||||
- [corepack](#corepack)
|
||||
- [Build tools](#build-tools)
|
||||
- [Actual n8n setup](#actual-n8n-setup)
|
||||
- [Start](#start)
|
||||
- [Development cycle](#development-cycle)
|
||||
- [Community PR Guidelines](#community-pr-guidelines)
|
||||
- [**1. Change Request/Comment**](#1-change-requestcomment)
|
||||
- [**2. General Requirements**](#2-general-requirements)
|
||||
- [**3. PR Specific Requirements**](#3-pr-specific-requirements)
|
||||
- [**4. Workflow Summary for Non-Compliant PRs**](#4-workflow-summary-for-non-compliant-prs)
|
||||
- [Test suite](#test-suite)
|
||||
- [Unit tests](#unit-tests)
|
||||
- [Code Coverage](#code-coverage)
|
||||
- [E2E tests](#e2e-tests)
|
||||
- [Releasing](#releasing)
|
||||
- [Create custom nodes](#create-custom-nodes)
|
||||
- [Extend documentation](#extend-documentation)
|
||||
- [Contribute workflow templates](#contribute-workflow-templates)
|
||||
- [Contributor License Agreement](#contributor-license-agreement)
|
||||
|
||||
## Code of conduct
|
||||
|
||||
This project and everyone participating in it are governed by the Code of
|
||||
Conduct which can be found in the file [CODE_OF_CONDUCT.md](CODE_OF_CONDUCT.md).
|
||||
By participating, you are expected to uphold this code. Please report
|
||||
unacceptable behavior to jan@n8n.io.
|
||||
|
||||
## Directory structure
|
||||
|
||||
n8n is split up in different modules which are all in a single mono repository.
|
||||
|
||||
The most important directories:
|
||||
|
||||
- [/docker/images](/docker/images) - Dockerfiles to create n8n containers
|
||||
- [/packages](/packages) - The different n8n modules
|
||||
- [/packages/cli](/packages/cli) - CLI code to run front- & backend
|
||||
- [/packages/core](/packages/core) - Core code which handles workflow
|
||||
execution, active webhooks and
|
||||
workflows. **Contact n8n before
|
||||
starting on any changes here**
|
||||
- [/packages/frontend/@n8n/design-system](/packages/design-system) - Vue frontend components
|
||||
- [/packages/frontend/editor-ui](/packages/editor-ui) - Vue frontend workflow editor
|
||||
- [/packages/node-dev](/packages/node-dev) - CLI to create new n8n-nodes
|
||||
- [/packages/nodes-base](/packages/nodes-base) - Base n8n nodes
|
||||
- [/packages/workflow](/packages/workflow) - Workflow code with interfaces which
|
||||
get used by front- & backend
|
||||
|
||||
## Development setup
|
||||
|
||||
If you want to change or extend n8n you have to make sure that all the needed
|
||||
dependencies are installed and the packages get linked correctly. Here's a short guide on how that can be done:
|
||||
|
||||
### Dev Container
|
||||
|
||||
If you already have VS Code and Docker installed, you can click [here](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/n8n-io/n8n) to get started. Clicking these links will cause VS Code to automatically install the Dev Containers extension if needed, clone the source code into a container volume, and spin up a dev container for use.
|
||||
|
||||
### Requirements
|
||||
|
||||
#### Node.js
|
||||
|
||||
[Node.js](https://nodejs.org/en/) version 22.16 or newer is required for development purposes.
|
||||
|
||||
#### pnpm
|
||||
|
||||
[pnpm](https://pnpm.io/) version 10.2 or newer is required for development purposes. We recommend installing it with [corepack](#corepack).
|
||||
|
||||
##### pnpm workspaces
|
||||
|
||||
n8n is split up into different modules which are all in a single mono repository.
|
||||
To facilitate the module management, [pnpm workspaces](https://pnpm.io/workspaces) are used.
|
||||
This automatically sets up file-links between modules which depend on each other.
|
||||
|
||||
#### corepack
|
||||
|
||||
We recommend enabling [Node.js corepack](https://nodejs.org/docs/latest-v16.x/api/corepack.html) with `corepack enable`.
|
||||
|
||||
You can install the correct version of pnpm using `corepack prepare --activate`.
|
||||
|
||||
**IMPORTANT**: If you have installed Node.js via homebrew, you'll need to run `brew install corepack`, since homebrew explicitly removes `npm` and `corepack` from [the `node` formula](https://github.com/Homebrew/homebrew-core/blob/master/Formula/node.rb#L66).
|
||||
|
||||
**IMPORTANT**: If you are on windows, you'd need to run `corepack enable` and `corepack prepare --activate` in a terminal as an administrator.
|
||||
|
||||
#### Build tools
|
||||
|
||||
The packages which n8n uses depend on a few build tools:
|
||||
|
||||
Debian/Ubuntu:
|
||||
|
||||
```
|
||||
apt-get install -y build-essential python
|
||||
```
|
||||
|
||||
CentOS:
|
||||
|
||||
```
|
||||
yum install gcc gcc-c++ make
|
||||
```
|
||||
|
||||
Windows:
|
||||
|
||||
```
|
||||
npm add -g windows-build-tools
|
||||
```
|
||||
|
||||
MacOS:
|
||||
|
||||
No additional packages required.
|
||||
|
||||
#### actionlint (for GitHub Actions workflow development)
|
||||
|
||||
If you plan to modify GitHub Actions workflow files (`.github/workflows/*.yml`), you'll need [actionlint](https://github.com/rhysd/actionlint) for workflow validation:
|
||||
|
||||
**macOS (Homebrew):**
|
||||
```
|
||||
brew install actionlint
|
||||
```
|
||||
> **Note:** actionlint is only required if you're modifying workflow files. It runs automatically via git hooks when workflow files are changed.
|
||||
|
||||
### Actual n8n setup
|
||||
|
||||
> **IMPORTANT**: All the steps below have to get executed at least once to get the development setup up and running!
|
||||
|
||||
Now that everything n8n requires to run is installed, the actual n8n code can be
|
||||
checked out and set up:
|
||||
|
||||
1. [Fork](https://guides.github.com/activities/forking/#fork) the n8n repository.
|
||||
|
||||
2. Clone your forked repository:
|
||||
|
||||
```
|
||||
git clone https://github.com/<your_github_username>/n8n.git
|
||||
```
|
||||
|
||||
3. Go into repository folder:
|
||||
|
||||
```
|
||||
cd n8n
|
||||
```
|
||||
|
||||
4. Add the original n8n repository as `upstream` to your forked repository:
|
||||
|
||||
```
|
||||
git remote add upstream https://github.com/n8n-io/n8n.git
|
||||
```
|
||||
|
||||
5. Install all dependencies of all modules and link them together:
|
||||
|
||||
```
|
||||
pnpm install
|
||||
```
|
||||
|
||||
6. Build all the code:
|
||||
```
|
||||
pnpm build
|
||||
```
|
||||
|
||||
### Start
|
||||
|
||||
To start n8n execute:
|
||||
|
||||
```
|
||||
pnpm start
|
||||
```
|
||||
|
||||
To start n8n with tunnel:
|
||||
|
||||
```
|
||||
./packages/cli/bin/n8n start --tunnel
|
||||
```
|
||||
|
||||
## Development cycle
|
||||
|
||||
While iterating on n8n modules code, you can run `pnpm dev`. It will then
|
||||
automatically build your code, restart the backend and refresh the frontend
|
||||
(editor-ui) on every change you make.
|
||||
|
||||
### Basic Development Workflow
|
||||
|
||||
1. Start n8n in development mode:
|
||||
```
|
||||
pnpm dev
|
||||
```
|
||||
2. Hack, hack, hack
|
||||
3. Check if everything still runs in production mode:
|
||||
```
|
||||
pnpm build
|
||||
pnpm start
|
||||
```
|
||||
4. Create tests
|
||||
5. Run all [tests](#test-suite):
|
||||
```
|
||||
pnpm test
|
||||
```
|
||||
6. Commit code and [create a pull request](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork)
|
||||
|
||||
### Hot Reload for Nodes (N8N_DEV_RELOAD)
|
||||
|
||||
When developing custom nodes or credentials, you can enable hot reload to automatically detect changes without restarting the server:
|
||||
|
||||
```bash
|
||||
N8N_DEV_RELOAD=true pnpm dev
|
||||
```
|
||||
|
||||
**Performance considerations:**
|
||||
- File watching adds overhead to your system, especially on slower machines
|
||||
- The watcher monitors potentially thousands of files, which can impact CPU and memory usage
|
||||
- On resource-constrained systems, consider developing without hot reload and manually restarting when needed
|
||||
|
||||
### Selective Package Development
|
||||
|
||||
Running all packages in development mode can be resource-intensive. For better performance, run only the packages relevant to your work:
|
||||
|
||||
#### Available Filtered Commands
|
||||
|
||||
- **Backend-only development:**
|
||||
```bash
|
||||
pnpm dev:be
|
||||
```
|
||||
Excludes frontend packages like editor-ui and design-system
|
||||
|
||||
- **Frontend-only development:**
|
||||
```bash
|
||||
pnpm dev:fe
|
||||
```
|
||||
Runs the backend server and editor-ui development server
|
||||
|
||||
- **AI/LangChain nodes development:**
|
||||
```bash
|
||||
pnpm dev:ai
|
||||
```
|
||||
Runs only essential packages for AI node development
|
||||
|
||||
#### Custom Selective Development
|
||||
|
||||
For even more focused development, you can run packages individually:
|
||||
|
||||
**Example 1: Working on custom nodes**
|
||||
```bash
|
||||
# Terminal 1: Build and watch nodes package
|
||||
cd packages/nodes-base
|
||||
pnpm dev
|
||||
|
||||
# Terminal 2: Run the CLI with hot reload
|
||||
cd packages/cli
|
||||
N8N_DEV_RELOAD=true pnpm dev
|
||||
```
|
||||
|
||||
**Example 2: Pure frontend development**
|
||||
```bash
|
||||
# Terminal 1: Start the backend server (no watching)
|
||||
pnpm start
|
||||
|
||||
# Terminal 2: Run frontend dev server
|
||||
cd packages/editor-ui
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
**Example 3: Working on a specific node package**
|
||||
```bash
|
||||
# Terminal 1: Watch your node package
|
||||
cd packages/nodes-base # or your custom node package
|
||||
pnpm watch
|
||||
|
||||
# Terminal 2: Run CLI with hot reload
|
||||
cd packages/cli
|
||||
N8N_DEV_RELOAD=true pnpm dev
|
||||
```
|
||||
|
||||
### Performance Considerations
|
||||
|
||||
The full development mode (`pnpm dev`) runs multiple processes in parallel:
|
||||
|
||||
1. **TypeScript compilation** for each package
|
||||
2. **File watchers** monitoring source files
|
||||
3. **Nodemon** restarting the backend on changes
|
||||
4. **Vite dev server** for the frontend with HMR
|
||||
5. **Multiple build processes** for various packages
|
||||
|
||||
**Performance impact:**
|
||||
- Can consume significant CPU and memory resources
|
||||
- File system watching creates overhead, especially on:
|
||||
- Networked file systems
|
||||
- Virtual machines with shared folders
|
||||
- Systems with slower I/O performance
|
||||
- The more packages you run in dev mode, the more system resources are consumed
|
||||
|
||||
**Recommendations for resource-constrained environments:**
|
||||
1. Use selective development commands based on your task
|
||||
2. Close unnecessary applications to free up resources
|
||||
3. Monitor system performance and adjust your development approach accordingly
|
||||
|
||||
---
|
||||
|
||||
### Community PR Guidelines
|
||||
|
||||
#### **1. Change Request/Comment**
|
||||
|
||||
Please address the requested changes or provide feedback within 14 days. If there is no response or updates to the pull request during this time, it will be automatically closed. The PR can be reopened once the requested changes are applied.
|
||||
|
||||
#### **2. General Requirements**
|
||||
|
||||
- **Follow the Style Guide:**
|
||||
- Ensure your code adheres to n8n's coding standards and conventions (e.g., formatting, naming, indentation). Use linting tools where applicable.
|
||||
- **TypeScript Compliance:**
|
||||
- Do not use `ts-ignore` .
|
||||
- Ensure code adheres to TypeScript rules.
|
||||
- **Avoid Repetitive Code:**
|
||||
- Reuse existing components, parameters, and logic wherever possible instead of redefining or duplicating them.
|
||||
- For nodes: Use the same parameter across multiple operations rather than defining a new parameter for each operation (if applicable).
|
||||
- **Testing Requirements:**
|
||||
- PRs **must include tests**:
|
||||
- Unit tests
|
||||
- Workflow tests for nodes (example [here](https://github.com/n8n-io/n8n/tree/master/packages/nodes-base/nodes/Switch/V3/test))
|
||||
- UI tests (if applicable)
|
||||
- **Typos:**
|
||||
- Use a spell-checking tool, such as [**Code Spell Checker**](https://marketplace.visualstudio.com/items?itemName=streetsidesoftware.code-spell-checker), to avoid typos.
|
||||
|
||||
#### **3. PR Specific Requirements**
|
||||
|
||||
- **Small PRs Only:**
|
||||
- Focus on a single feature or fix per PR.
|
||||
- **Naming Convention:**
|
||||
- Follow [n8n's PR Title Conventions](https://github.com/n8n-io/n8n/blob/master/.github/pull_request_title_conventions.md#L36).
|
||||
- **New Nodes:**
|
||||
- PRs that introduce new nodes will be **auto-closed** unless they are explicitly requested by the n8n team and aligned with an agreed project scope. However, you can still explore [building your own nodes](https://docs.n8n.io/integrations/creating-nodes/) , as n8n offers the flexibility to create your own custom nodes.
|
||||
- **Typo-Only PRs:**
|
||||
- Typos are not sufficient justification for a PR and will be rejected.
|
||||
|
||||
#### **4. Workflow Summary for Non-Compliant PRs**
|
||||
|
||||
- **No Tests:** If tests are not provided, the PR will be auto-closed after **14 days**.
|
||||
- **Non-Small PRs:** Large or multifaceted PRs will be returned for segmentation.
|
||||
- **New Nodes/Typo PRs:** Automatically rejected if not aligned with project scope or guidelines.
|
||||
|
||||
---
|
||||
|
||||
### Test suite
|
||||
|
||||
#### Unit tests
|
||||
|
||||
Unit tests can be started via:
|
||||
|
||||
```
|
||||
pnpm test
|
||||
```
|
||||
|
||||
If that gets executed in one of the package folders it will only run the tests
|
||||
of this package. If it gets executed in the n8n-root folder it will run all
|
||||
tests of all packages.
|
||||
|
||||
If you made a change which requires an update on a `.test.ts.snap` file, pass `-u` to the command to run tests or press `u` in watch mode.
|
||||
|
||||
#### Code Coverage
|
||||
We track coverage for all our code on [Codecov](https://app.codecov.io/gh/n8n-io/n8n).
|
||||
But when you are working on tests locally, we recommend running your tests with env variable `COVERAGE_ENABLED` set to `true`. You can then view the code coverage in the `coverage` folder, or you can use [this VSCode extension](https://marketplace.visualstudio.com/items?itemName=ryanluker.vscode-coverage-gutters) to visualize the coverage directly in VSCode.
|
||||
|
||||
#### E2E tests
|
||||
|
||||
⚠️ You have to run `pnpm cypress:install` to install cypress before running the tests for the first time and to update cypress.
|
||||
|
||||
E2E tests can be started via one of the following commands:
|
||||
|
||||
- `pnpm test:e2e:ui`: Start n8n and run e2e tests interactively using built UI code. Does not react to code changes (i.e. runs `pnpm start` and `cypress open`)
|
||||
- `pnpm test:e2e:dev`: Start n8n in development mode and run e2e tests interactively. Reacts to code changes (i.e. runs `pnpm dev` and `cypress open`)
|
||||
- `pnpm test:e2e:all`: Start n8n and run e2e tests headless (i.e. runs `pnpm start` and `cypress run --headless`)
|
||||
|
||||
⚠️ Remember to stop your dev server before. Otherwise port binding will fail.
|
||||
|
||||
## Releasing
|
||||
|
||||
To start a release, trigger [this workflow](https://github.com/n8n-io/n8n/actions/workflows/release-create-pr.yml) with the SemVer release type, and select a branch to cut this release from. This workflow will then:
|
||||
|
||||
1. Bump versions of packages that have changed or have dependencies that have changed
|
||||
2. Update the Changelog
|
||||
3. Create a new branch called `release/${VERSION}`, and
|
||||
4. Create a new pull-request to track any further changes that need to be included in this release
|
||||
|
||||
Once ready to release, simply merge the pull-request.
|
||||
This triggers [another workflow](https://github.com/n8n-io/n8n/actions/workflows/release-publish.yml), that will:
|
||||
|
||||
1. Build and publish the packages that have a new version in this release
|
||||
2. Create a new tag, and GitHub release from squashed release commit
|
||||
3. Merge the squashed release commit back into `master`
|
||||
|
||||
## Create custom nodes
|
||||
|
||||
Learn about [building nodes](https://docs.n8n.io/integrations/creating-nodes/) to create custom nodes for n8n. You can create community nodes and make them available using [npm](https://www.npmjs.com/).
|
||||
|
||||
## Extend documentation
|
||||
|
||||
The repository for the n8n documentation on [docs.n8n.io](https://docs.n8n.io) can be found [here](https://github.com/n8n-io/n8n-docs).
|
||||
|
||||
## Contribute workflow templates
|
||||
|
||||
You can submit your workflows to n8n's template library.
|
||||
|
||||
n8n is working on a creator program, and developing a marketplace of templates. This is an ongoing project, and details are likely to change.
|
||||
|
||||
Refer to [n8n Creator hub](https://www.notion.so/n8n/n8n-Creator-hub-7bd2cbe0fce0449198ecb23ff4a2f76f) for information on how to submit templates and become a creator.
|
||||
|
||||
## Contributor License Agreement
|
||||
|
||||
That we do not have any potential problems later it is sadly necessary to sign a [Contributor License Agreement](CONTRIBUTOR_LICENSE_AGREEMENT.md). That can be done literally with the push of a button.
|
||||
|
||||
We used the most simple one that exists. It is from [Indie Open Source](https://indieopensource.com/forms/cla) which uses plain English and is literally only a few lines long.
|
||||
|
||||
Once a pull request is opened, an automated bot will promptly leave a comment requesting the agreement to be signed. The pull request can only be merged once the signature is obtained.
|
||||
5
n8n-n8n-1.109.2/CONTRIBUTOR_LICENSE_AGREEMENT 2.md
Executable file
5
n8n-n8n-1.109.2/CONTRIBUTOR_LICENSE_AGREEMENT 2.md
Executable file
@@ -0,0 +1,5 @@
|
||||
# n8n Contributor License Agreement
|
||||
|
||||
I give n8n permission to license my contributions on any terms they like. I am giving them this license in order to make it possible for them to accept my contributions into their project.
|
||||
|
||||
**_As far as the law allows, my contributions come as is, without any warranty or condition, and I will not be liable to anyone for any damages related to this software or this license, under any kind of legal claim._**
|
||||
88
n8n-n8n-1.109.2/LICENSE 2.md
Executable file
88
n8n-n8n-1.109.2/LICENSE 2.md
Executable file
@@ -0,0 +1,88 @@
|
||||
# License
|
||||
|
||||
Portions of this software are licensed as follows:
|
||||
|
||||
- Content of branches other than the main branch (i.e. "master") are not licensed.
|
||||
- Source code files that contain ".ee." in their filename or ".ee" in their dirname are NOT licensed under
|
||||
the Sustainable Use License.
|
||||
To use source code files that contain ".ee." in their filename or ".ee" in their dirname you must hold a
|
||||
valid n8n Enterprise License specifically allowing you access to such source code files and as defined
|
||||
in "LICENSE_EE.md".
|
||||
- All third party components incorporated into the n8n Software are licensed under the original license
|
||||
provided by the owner of the applicable component.
|
||||
- Content outside of the above mentioned files or restrictions is available under the "Sustainable Use
|
||||
License" as defined below.
|
||||
|
||||
## Sustainable Use License
|
||||
|
||||
Version 1.0
|
||||
|
||||
### Acceptance
|
||||
|
||||
By using the software, you agree to all of the terms and conditions below.
|
||||
|
||||
### Copyright License
|
||||
|
||||
The licensor grants you a non-exclusive, royalty-free, worldwide, non-sublicensable, non-transferable license
|
||||
to use, copy, distribute, make available, and prepare derivative works of the software, in each case subject
|
||||
to the limitations below.
|
||||
|
||||
### Limitations
|
||||
|
||||
You may use or modify the software only for your own internal business purposes or for non-commercial or
|
||||
personal use. You may distribute the software or provide it to others only if you do so free of charge for
|
||||
non-commercial purposes. You may not alter, remove, or obscure any licensing, copyright, or other notices of
|
||||
the licensor in the software. Any use of the licensor’s trademarks is subject to applicable law.
|
||||
|
||||
### Patents
|
||||
|
||||
The licensor grants you a license, under any patent claims the licensor can license, or becomes able to
|
||||
license, to make, have made, use, sell, offer for sale, import and have imported the software, in each case
|
||||
subject to the limitations and conditions in this license. This license does not cover any patent claims that
|
||||
you cause to be infringed by modifications or additions to the software. If you or your company make any
|
||||
written claim that the software infringes or contributes to infringement of any patent, your patent license
|
||||
for the software granted under these terms ends immediately. If your company makes such a claim, your patent
|
||||
license ends immediately for work on behalf of your company.
|
||||
|
||||
### Notices
|
||||
|
||||
You must ensure that anyone who gets a copy of any part of the software from you also gets a copy of these
|
||||
terms. If you modify the software, you must include in any modified copies of the software a prominent notice
|
||||
stating that you have modified the software.
|
||||
|
||||
### No Other Rights
|
||||
|
||||
These terms do not imply any licenses other than those expressly granted in these terms.
|
||||
|
||||
### Termination
|
||||
|
||||
If you use the software in violation of these terms, such use is not licensed, and your license will
|
||||
automatically terminate. If the licensor provides you with a notice of your violation, and you cease all
|
||||
violation of this license no later than 30 days after you receive that notice, your license will be reinstated
|
||||
retroactively. However, if you violate these terms after such reinstatement, any additional violation of these
|
||||
terms will cause your license to terminate automatically and permanently.
|
||||
|
||||
### No Liability
|
||||
|
||||
As far as the law allows, the software comes as is, without any warranty or condition, and the licensor will
|
||||
not be liable to you for any damages arising out of these terms or the use or nature of the software, under
|
||||
any kind of legal claim.
|
||||
|
||||
### Definitions
|
||||
|
||||
The “licensor” is the entity offering these terms.
|
||||
|
||||
The “software” is the software the licensor makes available under these terms, including any portion of it.
|
||||
|
||||
“You” refers to the individual or entity agreeing to these terms.
|
||||
|
||||
“Your company” is any legal entity, sole proprietorship, or other kind of organization that you work for, plus
|
||||
all organizations that have control over, are under the control of, or are under common control with that
|
||||
organization. Control means ownership of substantially all the assets of an entity, or the power to direct its
|
||||
management and policies by vote, contract, or otherwise. Control can be direct or indirect.
|
||||
|
||||
“Your license” is the license granted to you for the software under these terms.
|
||||
|
||||
“Use” means anything you do with the software requiring your license.
|
||||
|
||||
“Trademark” means trademarks, service marks, and similar rights.
|
||||
27
n8n-n8n-1.109.2/LICENSE_EE 2.md
Executable file
27
n8n-n8n-1.109.2/LICENSE_EE 2.md
Executable file
@@ -0,0 +1,27 @@
|
||||
# The n8n Enterprise License (the “Enterprise License”)
|
||||
|
||||
Copyright (c) 2022-present n8n GmbH.
|
||||
|
||||
With regard to the n8n Software:
|
||||
|
||||
This software and associated documentation files (the "Software") may only be used in production, if
|
||||
you (and any entity that you represent) hold a valid n8n Enterprise license corresponding to your
|
||||
usage. Subject to the foregoing sentence, you are free to modify this Software and publish patches
|
||||
to the Software. You agree that n8n and/or its licensors (as applicable) retain all right, title and
|
||||
interest in and to all such modifications and/or patches, and all such modifications and/or patches
|
||||
may only be used, copied, modified, displayed, distributed, or otherwise exploited with a valid n8n
|
||||
Enterprise license for the corresponding usage. Notwithstanding the foregoing, you may copy and
|
||||
modify the Software for development and testing purposes, without requiring a subscription. You
|
||||
agree that n8n and/or its licensors (as applicable) retain all right, title and interest in and to
|
||||
all such modifications. You are not granted any other rights beyond what is expressly stated herein.
|
||||
Subject to the foregoing, it is forbidden to copy, merge, publish, distribute, sublicense, and/or
|
||||
sell the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
|
||||
NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
|
||||
OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
For all third party components incorporated into the n8n Software, those components are licensed
|
||||
under the original license provided by the owner of the applicable component.
|
||||
72
n8n-n8n-1.109.2/README 2.md
Executable file
72
n8n-n8n-1.109.2/README 2.md
Executable file
@@ -0,0 +1,72 @@
|
||||

|
||||
|
||||
# n8n - Secure Workflow Automation for Technical Teams
|
||||
|
||||
n8n is a workflow automation platform that gives technical teams the flexibility of code with the speed of no-code. With 400+ integrations, native AI capabilities, and a fair-code license, n8n lets you build powerful automations while maintaining full control over your data and deployments.
|
||||
|
||||

|
||||
|
||||
## Key Capabilities
|
||||
|
||||
- **Code When You Need It**: Write JavaScript/Python, add npm packages, or use the visual interface
|
||||
- **AI-Native Platform**: Build AI agent workflows based on LangChain with your own data and models
|
||||
- **Full Control**: Self-host with our fair-code license or use our [cloud offering](https://app.n8n.cloud/login)
|
||||
- **Enterprise-Ready**: Advanced permissions, SSO, and air-gapped deployments
|
||||
- **Active Community**: 400+ integrations and 900+ ready-to-use [templates](https://n8n.io/workflows)
|
||||
|
||||
## Quick Start
|
||||
|
||||
Try n8n instantly with [npx](https://docs.n8n.io/hosting/installation/npm/) (requires [Node.js](https://nodejs.org/en/)):
|
||||
|
||||
```
|
||||
npx n8n
|
||||
```
|
||||
|
||||
Or deploy with [Docker](https://docs.n8n.io/hosting/installation/docker/):
|
||||
|
||||
```
|
||||
docker volume create n8n_data
|
||||
docker run -it --rm --name n8n -p 5678:5678 -v n8n_data:/home/node/.n8n docker.n8n.io/n8nio/n8n
|
||||
```
|
||||
|
||||
Access the editor at http://localhost:5678
|
||||
|
||||
## Resources
|
||||
|
||||
- 📚 [Documentation](https://docs.n8n.io)
|
||||
- 🔧 [400+ Integrations](https://n8n.io/integrations)
|
||||
- 💡 [Example Workflows](https://n8n.io/workflows)
|
||||
- 🤖 [AI & LangChain Guide](https://docs.n8n.io/langchain/)
|
||||
- 👥 [Community Forum](https://community.n8n.io)
|
||||
- 📖 [Community Tutorials](https://community.n8n.io/c/tutorials/28)
|
||||
|
||||
## Support
|
||||
|
||||
Need help? Our community forum is the place to get support and connect with other users:
|
||||
[community.n8n.io](https://community.n8n.io)
|
||||
|
||||
## License
|
||||
|
||||
n8n is [fair-code](https://faircode.io) distributed under the [Sustainable Use License](https://github.com/n8n-io/n8n/blob/master/LICENSE.md) and [n8n Enterprise License](https://github.com/n8n-io/n8n/blob/master/LICENSE_EE.md).
|
||||
|
||||
- **Source Available**: Always visible source code
|
||||
- **Self-Hostable**: Deploy anywhere
|
||||
- **Extensible**: Add your own nodes and functionality
|
||||
|
||||
[Enterprise licenses](mailto:license@n8n.io) available for additional features and support.
|
||||
|
||||
Additional information about the license model can be found in the [docs](https://docs.n8n.io/reference/license/).
|
||||
|
||||
## Contributing
|
||||
|
||||
Found a bug 🐛 or have a feature idea ✨? Check our [Contributing Guide](https://github.com/n8n-io/n8n/blob/master/CONTRIBUTING.md) to get started.
|
||||
|
||||
## Join the Team
|
||||
|
||||
Want to shape the future of automation? Check out our [job posts](https://n8n.io/careers) and join our team!
|
||||
|
||||
## What does n8n mean?
|
||||
|
||||
**Short answer:** It means "nodemation" and is pronounced as n-eight-n.
|
||||
|
||||
**Long answer:** "I get that question quite often (more often than I expected) so I decided it is probably best to answer it here. While looking for a good name for the project with a free domain I realized very quickly that all the good ones I could think of were already taken. So, in the end, I chose nodemation. 'node-' in the sense that it uses a Node-View and that it uses Node.js and '-mation' for 'automation' which is what the project is supposed to help with. However, I did not like how long the name was and I could not imagine writing something that long every time in the CLI. That is when I then ended up on 'n8n'." - **Jan Oberhauser, Founder and CEO, n8n.io**
|
||||
4
n8n-n8n-1.109.2/SECURITY 2.md
Executable file
4
n8n-n8n-1.109.2/SECURITY 2.md
Executable file
@@ -0,0 +1,4 @@
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please report (suspected) security vulnerabilities to **[security@n8n.io](mailto:security@n8n.io)**. You will receive a response from
|
||||
us within 48 hours. If the issue is confirmed, we will release a patch as soon as possible depending on complexity but historically within a few days.
|
||||
BIN
n8n-n8n-1.109.2/assets/n8n-logo 2.png
Executable file
BIN
n8n-n8n-1.109.2/assets/n8n-logo 2.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 7.7 KiB |
BIN
n8n-n8n-1.109.2/assets/n8n-screenshot 2.png
Executable file
BIN
n8n-n8n-1.109.2/assets/n8n-screenshot 2.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 224 KiB |
BIN
n8n-n8n-1.109.2/assets/n8n-screenshot-readme 2.png
Executable file
BIN
n8n-n8n-1.109.2/assets/n8n-screenshot-readme 2.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 98 KiB |
54
n8n-n8n-1.109.2/biome 2.jsonc
Executable file
54
n8n-n8n-1.109.2/biome 2.jsonc
Executable file
@@ -0,0 +1,54 @@
|
||||
{
|
||||
"$schema": "./node_modules/@biomejs/biome/configuration_schema.json",
|
||||
"vcs": {
|
||||
"clientKind": "git",
|
||||
"enabled": true,
|
||||
"useIgnoreFile": true
|
||||
},
|
||||
"files": {
|
||||
"ignore": [
|
||||
"**/.turbo",
|
||||
"**/components.d.ts",
|
||||
"**/coverage",
|
||||
"**/dist",
|
||||
"**/package.json",
|
||||
"**/pnpm-lock.yaml",
|
||||
"**/CHANGELOG.md",
|
||||
"**/cl100k_base.json",
|
||||
"**/o200k_base.json"
|
||||
]
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"formatWithErrors": false,
|
||||
"indentStyle": "tab",
|
||||
"indentWidth": 2,
|
||||
"lineEnding": "lf",
|
||||
"lineWidth": 100,
|
||||
"attributePosition": "auto",
|
||||
"ignore": [
|
||||
// Handled by prettier
|
||||
"**/*.vue"
|
||||
]
|
||||
},
|
||||
"organizeImports": { "enabled": false },
|
||||
"linter": {
|
||||
"enabled": false
|
||||
},
|
||||
"javascript": {
|
||||
"parser": {
|
||||
"unsafeParameterDecoratorsEnabled": true
|
||||
},
|
||||
"formatter": {
|
||||
"jsxQuoteStyle": "double",
|
||||
"quoteProperties": "asNeeded",
|
||||
"trailingCommas": "all",
|
||||
"semicolons": "always",
|
||||
"arrowParentheses": "always",
|
||||
"bracketSpacing": true,
|
||||
"bracketSameLine": false,
|
||||
"quoteStyle": "single",
|
||||
"attributePosition": "auto"
|
||||
}
|
||||
}
|
||||
}
|
||||
69
n8n-n8n-1.109.2/codecov 2.yml
Executable file
69
n8n-n8n-1.109.2/codecov 2.yml
Executable file
@@ -0,0 +1,69 @@
|
||||
codecov:
|
||||
max_report_age: off
|
||||
require_ci_to_pass: true
|
||||
|
||||
coverage:
|
||||
status:
|
||||
patch: false
|
||||
project:
|
||||
default:
|
||||
threshold: 0.5
|
||||
|
||||
github_checks:
|
||||
annotations: false
|
||||
|
||||
flags:
|
||||
tests:
|
||||
paths:
|
||||
- '**'
|
||||
carryforward: true
|
||||
|
||||
component_management:
|
||||
default_rules:
|
||||
statuses:
|
||||
- type: project
|
||||
target: auto
|
||||
branches:
|
||||
- '!master'
|
||||
individual_components:
|
||||
- component_id: backend_packages
|
||||
name: Backend
|
||||
paths:
|
||||
- packages/@n8n/ai-workflow-builder.ee/**
|
||||
- packages/@n8n/api-types/**
|
||||
- packages/@n8n/config/**
|
||||
- packages/@n8n/client-oauth2/**
|
||||
- packages/@n8n/decorators/**
|
||||
- packages/@n8n/constants/**
|
||||
- packages/@n8n/backend-common/**
|
||||
- packages/@n8n/backend-test-utils/**
|
||||
- packages/@n8n/errors/**
|
||||
- packages/@n8n/db/**
|
||||
- packages/@n8n/di/**
|
||||
- packages/@n8n/imap/**
|
||||
- packages/@n8n/permissions/**
|
||||
- packages/@n8n/task-runner/**
|
||||
- packages/workflow/**
|
||||
- packages/core/**
|
||||
- packages/cli/**
|
||||
- component_id: frontend_packages
|
||||
name: Frontend
|
||||
paths:
|
||||
- packages/@n8n/codemirror-lang/**
|
||||
- packages/frontend/**
|
||||
- component_id: nodes_packages
|
||||
name: Nodes
|
||||
paths:
|
||||
- packages/node-dev/**
|
||||
- packages/nodes-base/**
|
||||
- packages/@n8n/json-schema-to-zod/**
|
||||
- packages/@n8n/nodes-langchain/**
|
||||
statuses:
|
||||
- type: project
|
||||
target: auto
|
||||
threshold: 0% # Enforce: Coverage must not decrease
|
||||
|
||||
ignore:
|
||||
- (?s:.*/[^\/]*\.spec\.ts.*)\Z
|
||||
- (?s:.*/[^\/]*\.test\.ts.*)\Z
|
||||
- (?s:.*/[^\/]*e2e[^\/]*\.ts.*)\Z
|
||||
3
n8n-n8n-1.109.2/cypress/.gitignore 2
Executable file
3
n8n-n8n-1.109.2/cypress/.gitignore 2
Executable file
@@ -0,0 +1,3 @@
|
||||
videos/
|
||||
screenshots/
|
||||
downloads/
|
||||
32
n8n-n8n-1.109.2/cypress/README 2.md
Executable file
32
n8n-n8n-1.109.2/cypress/README 2.md
Executable file
@@ -0,0 +1,32 @@
|
||||
## Debugging Flaky End-to-End Tests - Usage
|
||||
|
||||
To debug flaky end-to-end (E2E) tests, use the following command:
|
||||
|
||||
```bash
|
||||
pnpm run debug:flaky:e2e -- <grep_filter> <burn_count>
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
|
||||
* `<grep_filter>`: (Optional) A string to filter tests by their `it()` or `describe()` block titles, or by tags if using the `@cypress/grep` plugin. If omitted, all tests will be run.
|
||||
* `<burn_count>`: (Optional) The number of times to run the filtered tests. Defaults to 5 if not provided.
|
||||
|
||||
**Examples:**
|
||||
|
||||
1. **Run all tests tagged with `CAT-726` ten times:**
|
||||
|
||||
```bash
|
||||
pnpm run debug:flaky:e2e CAT-726 10
|
||||
```
|
||||
|
||||
2. **Run all tests containing "login" five times (default burn count):**
|
||||
|
||||
```bash
|
||||
pnpm run debug:flaky:e2e login
|
||||
```
|
||||
|
||||
3. **Run all tests five times (default grep and burn count):**
|
||||
|
||||
```bash
|
||||
pnpm run debug:flaky:e2e
|
||||
```
|
||||
4
n8n-n8n-1.109.2/cypress/augmentation.d 2.ts
Executable file
4
n8n-n8n-1.109.2/cypress/augmentation.d 2.ts
Executable file
@@ -0,0 +1,4 @@
|
||||
declare module 'cypress-otp' {
|
||||
// eslint-disable-next-line import-x/no-default-export
|
||||
export default function generateOTPToken(secret: string): string;
|
||||
}
|
||||
7
n8n-n8n-1.109.2/cypress/biome 2.jsonc
Executable file
7
n8n-n8n-1.109.2/cypress/biome 2.jsonc
Executable file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"$schema": "../node_modules/@biomejs/biome/configuration_schema.json",
|
||||
"extends": ["../biome.jsonc"],
|
||||
"formatter": {
|
||||
"ignore": ["fixtures/**"]
|
||||
}
|
||||
}
|
||||
76
n8n-n8n-1.109.2/cypress/constants 2.ts
Executable file
76
n8n-n8n-1.109.2/cypress/constants 2.ts
Executable file
@@ -0,0 +1,76 @@
|
||||
import { randFirstName, randLastName } from '@ngneat/falso';
|
||||
|
||||
export const BACKEND_BASE_URL = 'http://localhost:5678';
|
||||
export const N8N_AUTH_COOKIE = 'n8n-auth';
|
||||
|
||||
const DEFAULT_USER_PASSWORD = 'CypressTest123';
|
||||
|
||||
export const INSTANCE_OWNER = {
|
||||
email: 'nathan@n8n.io',
|
||||
password: DEFAULT_USER_PASSWORD,
|
||||
firstName: randFirstName(),
|
||||
lastName: randLastName(),
|
||||
};
|
||||
|
||||
export const INSTANCE_ADMIN = {
|
||||
email: 'admin@n8n.io',
|
||||
password: DEFAULT_USER_PASSWORD,
|
||||
firstName: randFirstName(),
|
||||
lastName: randLastName(),
|
||||
};
|
||||
|
||||
export const INSTANCE_MEMBERS = [
|
||||
{
|
||||
email: 'rebecca@n8n.io',
|
||||
password: DEFAULT_USER_PASSWORD,
|
||||
firstName: randFirstName(),
|
||||
lastName: randLastName(),
|
||||
},
|
||||
{
|
||||
email: 'mustafa@n8n.io',
|
||||
password: DEFAULT_USER_PASSWORD,
|
||||
firstName: randFirstName(),
|
||||
lastName: randLastName(),
|
||||
},
|
||||
];
|
||||
|
||||
export const MANUAL_TRIGGER_NODE_NAME = 'Manual Trigger';
|
||||
export const MANUAL_TRIGGER_NODE_DISPLAY_NAME = 'When clicking ‘Execute workflow’';
|
||||
export const MANUAL_CHAT_TRIGGER_NODE_NAME = 'Chat Trigger';
|
||||
export const CHAT_TRIGGER_NODE_DISPLAY_NAME = 'When chat message received';
|
||||
export const SCHEDULE_TRIGGER_NODE_NAME = 'Schedule Trigger';
|
||||
export const CODE_NODE_NAME = 'Code';
|
||||
export const SET_NODE_NAME = 'Set';
|
||||
export const EDIT_FIELDS_SET_NODE_NAME = 'Edit Fields';
|
||||
export const LOOP_OVER_ITEMS_NODE_NAME = 'Loop Over Items';
|
||||
export const IF_NODE_NAME = 'If';
|
||||
export const MERGE_NODE_NAME = 'Merge';
|
||||
export const SWITCH_NODE_NAME = 'Switch';
|
||||
export const GMAIL_NODE_NAME = 'Gmail';
|
||||
export const TRELLO_NODE_NAME = 'Trello';
|
||||
export const NOTION_NODE_NAME = 'Notion';
|
||||
export const PIPEDRIVE_NODE_NAME = 'Pipedrive';
|
||||
export const HTTP_REQUEST_NODE_NAME = 'HTTP Request';
|
||||
export const AGENT_NODE_NAME = 'AI Agent';
|
||||
export const BASIC_LLM_CHAIN_NODE_NAME = 'Basic LLM Chain';
|
||||
export const AI_MEMORY_WINDOW_BUFFER_MEMORY_NODE_NAME = 'Simple Memory';
|
||||
export const AI_TOOL_CALCULATOR_NODE_NAME = 'Calculator';
|
||||
export const AI_TOOL_CODE_NODE_NAME = 'Code Tool';
|
||||
export const AI_TOOL_WIKIPEDIA_NODE_NAME = 'Wikipedia';
|
||||
export const AI_TOOL_HTTP_NODE_NAME = 'HTTP Request Tool';
|
||||
export const AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME = 'OpenAI Chat Model';
|
||||
export const AI_MEMORY_POSTGRES_NODE_NAME = 'Postgres Chat Memory';
|
||||
export const AI_OUTPUT_PARSER_AUTO_FIXING_NODE_NAME = 'Auto-fixing Output Parser';
|
||||
export const WEBHOOK_NODE_NAME = 'Webhook';
|
||||
export const EXECUTE_WORKFLOW_NODE_NAME = 'Execute Workflow';
|
||||
|
||||
export const META_KEY = Cypress.platform === 'darwin' ? 'meta' : 'ctrl';
|
||||
|
||||
export const NEW_GOOGLE_ACCOUNT_NAME = 'Gmail account';
|
||||
export const NEW_TRELLO_ACCOUNT_NAME = 'Trello account';
|
||||
export const NEW_NOTION_ACCOUNT_NAME = 'Notion account';
|
||||
export const NEW_QUERY_AUTH_ACCOUNT_NAME = 'Query Auth account';
|
||||
|
||||
export const ROUTES = {
|
||||
NEW_WORKFLOW_PAGE: '/workflow/new',
|
||||
};
|
||||
38
n8n-n8n-1.109.2/cypress/cypress.config 2.js
Executable file
38
n8n-n8n-1.109.2/cypress/cypress.config 2.js
Executable file
@@ -0,0 +1,38 @@
|
||||
const { defineConfig } = require('cypress');
|
||||
|
||||
const BASE_URL = 'http://localhost:5678';
|
||||
|
||||
module.exports = defineConfig({
|
||||
projectId: '5hbsdn',
|
||||
retries: {
|
||||
openMode: 0,
|
||||
runMode: 2,
|
||||
},
|
||||
defaultCommandTimeout: 10000,
|
||||
requestTimeout: 12000,
|
||||
numTestsKeptInMemory: 2,
|
||||
experimentalMemoryManagement: true,
|
||||
e2e: {
|
||||
baseUrl: BASE_URL,
|
||||
viewportWidth: 1536,
|
||||
viewportHeight: 960,
|
||||
video: true,
|
||||
screenshotOnRunFailure: true,
|
||||
experimentalInteractiveRunEvents: true,
|
||||
experimentalSessionAndOrigin: true,
|
||||
specPattern: 'e2e/**/*.ts',
|
||||
supportFile: 'support/e2e.ts',
|
||||
fixturesFolder: 'fixtures',
|
||||
downloadsFolder: 'downloads',
|
||||
screenshotsFolder: 'screenshots',
|
||||
videosFolder: 'videos',
|
||||
setupNodeEvents(on, config) {
|
||||
require('@cypress/grep/src/plugin')(config);
|
||||
return config;
|
||||
},
|
||||
},
|
||||
reporter: 'mocha-junit-reporter',
|
||||
reporterOptions: {
|
||||
mochaFile: 'test-results-[hash].xml',
|
||||
},
|
||||
});
|
||||
39
n8n-n8n-1.109.2/cypress/eslint.config 2.mjs
Executable file
39
n8n-n8n-1.109.2/cypress/eslint.config 2.mjs
Executable file
@@ -0,0 +1,39 @@
|
||||
import { defineConfig, globalIgnores } from 'eslint/config';
|
||||
import { baseConfig } from '@n8n/eslint-config/base';
|
||||
import cypressPlugin from 'eslint-plugin-cypress/flat';
|
||||
|
||||
export default defineConfig(
|
||||
globalIgnores(['scripts/**/*.js']),
|
||||
baseConfig,
|
||||
cypressPlugin.configs.recommended,
|
||||
{
|
||||
rules: {
|
||||
// TODO: Remove this
|
||||
'no-useless-escape': 'warn',
|
||||
'import-x/order': 'warn',
|
||||
'import-x/no-extraneous-dependencies': [
|
||||
'error',
|
||||
{
|
||||
devDependencies: ['**/cypress/**'],
|
||||
optionalDependencies: false,
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/naming-convention': 'warn',
|
||||
'@typescript-eslint/no-explicit-any': 'warn',
|
||||
'@typescript-eslint/no-unsafe-argument': 'warn',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'warn',
|
||||
'@typescript-eslint/no-unsafe-call': 'warn',
|
||||
'@typescript-eslint/no-unsafe-member-access': 'warn',
|
||||
'@typescript-eslint/no-unsafe-return': 'warn',
|
||||
'@typescript-eslint/no-unused-expressions': 'warn',
|
||||
'@typescript-eslint/no-use-before-define': 'warn',
|
||||
'@typescript-eslint/promise-function-async': 'warn',
|
||||
'@typescript-eslint/prefer-nullish-coalescing': 'warn',
|
||||
'@typescript-eslint/unbound-method': 'warn',
|
||||
'cypress/no-assigning-return-values': 'warn',
|
||||
'cypress/no-unnecessary-waiting': 'warn',
|
||||
'cypress/unsafe-to-chain-command': 'warn',
|
||||
'n8n-local-rules/no-uncaught-json-parse': 'warn',
|
||||
},
|
||||
},
|
||||
);
|
||||
37
n8n-n8n-1.109.2/cypress/package 2.json
Executable file
37
n8n-n8n-1.109.2/cypress/package 2.json
Executable file
@@ -0,0 +1,37 @@
|
||||
{
|
||||
"name": "n8n-cypress",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"typecheck": "tsc --noEmit",
|
||||
"cypress:install": "cypress install",
|
||||
"test:e2e:ui": "scripts/run-e2e.js ui",
|
||||
"test:e2e:dev": "scripts/run-e2e.js dev",
|
||||
"test:e2e:all": "scripts/run-e2e.js all",
|
||||
"test:flaky": "scripts/run-e2e.js debugFlaky",
|
||||
"format": "biome format --write .",
|
||||
"format:check": "biome ci .",
|
||||
"lint": "eslint . --quiet",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"develop": "cd ..; pnpm dev:e2e:server",
|
||||
"start": "cd ..; pnpm start"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cypress/grep": "^4.1.0",
|
||||
"@n8n/api-types": "workspace:*",
|
||||
"@types/lodash": "catalog:",
|
||||
"eslint-plugin-cypress": "^4.3.0",
|
||||
"mocha-junit-reporter": "^2.2.1",
|
||||
"n8n-workflow": "workspace:*"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ngneat/falso": "^7.3.0",
|
||||
"@sinonjs/fake-timers": "^13.0.2",
|
||||
"cypress": "^14.4.0",
|
||||
"cypress-otp": "^1.0.3",
|
||||
"cypress-real-events": "^1.14.0",
|
||||
"flatted": "catalog:",
|
||||
"lodash": "catalog:",
|
||||
"nanoid": "catalog:",
|
||||
"start-server-and-test": "^2.0.10"
|
||||
}
|
||||
}
|
||||
28
n8n-n8n-1.109.2/n8n-20250908-082052.log
Normal file
28
n8n-n8n-1.109.2/n8n-20250908-082052.log
Normal file
@@ -0,0 +1,28 @@
|
||||
|
||||
> n8n-monorepo@1.109.2 start /Users/xiaoqi/Documents/Dev/Project/2025-09-08_n8nDEMO演示/n8n-n8n-1.109.2
|
||||
> run-script-os
|
||||
|
||||
|
||||
> n8n-monorepo@1.109.2 start:default
|
||||
> cd packages/cli/bin && ./n8n
|
||||
|
||||
Permissions 0644 for n8n settings file /Users/xiaoqi/.n8n/config are too wide. This is ignored for now, but in the future n8n will attempt to change the permissions automatically. To automatically enforce correct permissions now set N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS=true (recommended), or turn this check off set N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS=false.
|
||||
Initializing n8n process
|
||||
n8n ready on ::, port 5678
|
||||
n8n Task Broker ready on 127.0.0.1, port 5679
|
||||
Initializing AuthRolesService...
|
||||
AuthRolesService initialized successfully.
|
||||
[license SDK] Skipping renewal on init: license cert is not initialized
|
||||
Registered runner "JS Task Runner" (KVrQI2zeKG9cyCrchj-ED)
|
||||
Version: 1.109.2
|
||||
Locale: zh-CN
|
||||
|
||||
Editor is now accessible via:
|
||||
http://localhost:5678
|
||||
(node:10762) [DEP0060] DeprecationWarning: The `util._extend` API is deprecated. Please use Object.assign() instead.
|
||||
(Use `node --trace-deprecation ...` to show where the warning was created)
|
||||
Received SIGINT. Shutting down...
|
||||
[Task Runner]: Received SIGINT signal, shutting down...
|
||||
[Task Runner]: Task runner stopped
|
||||
|
||||
Stopping n8n...
|
||||
28
n8n-n8n-1.109.2/n8n-20250908-093104.log
Normal file
28
n8n-n8n-1.109.2/n8n-20250908-093104.log
Normal file
@@ -0,0 +1,28 @@
|
||||
|
||||
> n8n-monorepo@1.109.2 start /Users/xiaoqi/Documents/Dev/Project/2025-09-08_n8nDEMO演示/n8n-n8n-1.109.2
|
||||
> run-script-os
|
||||
|
||||
|
||||
> n8n-monorepo@1.109.2 start:default
|
||||
> cd packages/cli/bin && ./n8n
|
||||
|
||||
Permissions 0644 for n8n settings file /Users/xiaoqi/.n8n/config are too wide. This is ignored for now, but in the future n8n will attempt to change the permissions automatically. To automatically enforce correct permissions now set N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS=true (recommended), or turn this check off set N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS=false.
|
||||
Initializing n8n process
|
||||
n8n ready on ::, port 5678
|
||||
n8n Task Broker ready on 127.0.0.1, port 5679
|
||||
Initializing AuthRolesService...
|
||||
AuthRolesService initialized successfully.
|
||||
[license SDK] Skipping renewal on init: license cert is not initialized
|
||||
Registered runner "JS Task Runner" (0dM4jDG2u7oqKGfrgBCiV)
|
||||
Version: 1.109.2
|
||||
Locale: zh-CN
|
||||
|
||||
Editor is now accessible via:
|
||||
http://localhost:5678
|
||||
(node:30364) [DEP0060] DeprecationWarning: The `util._extend` API is deprecated. Please use Object.assign() instead.
|
||||
(Use `node --trace-deprecation ...` to show where the warning was created)
|
||||
Received SIGINT. Shutting down...
|
||||
[Task Runner]: Received SIGINT signal, shutting down...
|
||||
[Task Runner]: Task runner stopped
|
||||
|
||||
Stopping n8n...
|
||||
23
n8n-n8n-1.109.2/n8n-20250908-101702.log
Normal file
23
n8n-n8n-1.109.2/n8n-20250908-101702.log
Normal file
@@ -0,0 +1,23 @@
|
||||
|
||||
> n8n-monorepo@1.109.2 start /Users/xiaoqi/Documents/Dev/Project/2025-09-08_n8nDEMO演示/n8n-n8n-1.109.2
|
||||
> run-script-os
|
||||
|
||||
|
||||
> n8n-monorepo@1.109.2 start:default
|
||||
> cd packages/cli/bin && ./n8n
|
||||
|
||||
Permissions 0644 for n8n settings file /Users/xiaoqi/.n8n/config are too wide. This is ignored for now, but in the future n8n will attempt to change the permissions automatically. To automatically enforce correct permissions now set N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS=true (recommended), or turn this check off set N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS=false.
|
||||
Initializing n8n process
|
||||
n8n ready on ::, port 5678
|
||||
n8n Task Broker ready on 127.0.0.1, port 5679
|
||||
Initializing AuthRolesService...
|
||||
AuthRolesService initialized successfully.
|
||||
[license SDK] Skipping renewal on init: license cert is not initialized
|
||||
Registered runner "JS Task Runner" (DcMjBKt67OYRSB7Yae1vX)
|
||||
Version: 1.109.2
|
||||
Locale: zh-CN
|
||||
|
||||
Editor is now accessible via:
|
||||
http://localhost:5678
|
||||
(node:31940) [DEP0060] DeprecationWarning: The `util._extend` API is deprecated. Please use Object.assign() instead.
|
||||
(Use `node --trace-deprecation ...` to show where the warning was created)
|
||||
205
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/evaluations/README 2.md
Executable file
205
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/evaluations/README 2.md
Executable file
@@ -0,0 +1,205 @@
|
||||
# AI Workflow Builder Evaluations
|
||||
|
||||
This module provides a evaluation framework for testing the AI Workflow Builder's ability to generate correct n8n workflows from natural language prompts.
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
The evaluation system is split into two distinct modes:
|
||||
1. **CLI Evaluation** - Runs predefined test cases locally with progress tracking
|
||||
2. **Langsmith Evaluation** - Integrates with Langsmith for dataset-based evaluation and experiment tracking
|
||||
|
||||
### Directory Structure
|
||||
|
||||
```
|
||||
evaluations/
|
||||
├── cli/ # CLI evaluation implementation
|
||||
│ ├── runner.ts # Main CLI evaluation orchestrator
|
||||
│ └── display.ts # Console output and progress tracking
|
||||
├── langsmith/ # Langsmith integration
|
||||
│ ├── evaluator.ts # Langsmith-compatible evaluator function
|
||||
│ └── runner.ts # Langsmith evaluation orchestrator
|
||||
├── core/ # Shared evaluation logic
|
||||
│ ├── environment.ts # Test environment setup and configuration
|
||||
│ └── test-runner.ts # Core test execution logic
|
||||
├── types/ # Type definitions
|
||||
│ ├── evaluation.ts # Evaluation result schemas
|
||||
│ ├── test-result.ts # Test result interfaces
|
||||
│ └── langsmith.ts # Langsmith-specific types and guards
|
||||
├── chains/ # LLM evaluation chains
|
||||
│ ├── test-case-generator.ts # Dynamic test case generation
|
||||
│ └── workflow-evaluator.ts # LLM-based workflow evaluation
|
||||
├── utils/ # Utility functions
|
||||
│ ├── evaluation-calculator.ts # Metrics calculation
|
||||
│ ├── evaluation-helpers.ts # Common helper functions
|
||||
│ ├── evaluation-reporter.ts # Report generation
|
||||
└── index.ts # Main entry point
|
||||
```
|
||||
|
||||
## Implementation Details
|
||||
### Core Components
|
||||
|
||||
#### 1. Test Runner (`core/test-runner.ts`)
|
||||
|
||||
The core test runner handles individual test execution:
|
||||
- Generates workflows using the WorkflowBuilderAgent
|
||||
- Validates generated workflows using type guards
|
||||
- Evaluates workflows against test criteria
|
||||
- Returns structured test results with error handling
|
||||
|
||||
#### 2. Environment Setup (`core/environment.ts`)
|
||||
|
||||
Centralizes environment configuration:
|
||||
- LLM initialization with API key validation
|
||||
- Langsmith client setup
|
||||
- Node types loading
|
||||
- Concurrency and test generation settings
|
||||
|
||||
#### 3. Langsmith Integration
|
||||
|
||||
The Langsmith integration provides two key components:
|
||||
|
||||
**Evaluator (`langsmith/evaluator.ts`):**
|
||||
- Converts Langsmith Run objects to evaluation inputs
|
||||
- Validates all data using type guards before processing
|
||||
- Safely extracts usage metadata without type coercion
|
||||
- Returns structured evaluation results
|
||||
|
||||
**Runner (`langsmith/runner.ts`):**
|
||||
- Creates workflow generation functions compatible with Langsmith
|
||||
- Validates message content before processing
|
||||
- Extracts usage metrics safely from message metadata
|
||||
- Handles dataset verification and error reporting
|
||||
|
||||
#### 4. CLI Evaluation
|
||||
|
||||
The CLI evaluation provides local testing capabilities:
|
||||
|
||||
**Runner (`cli/runner.ts`):**
|
||||
- Orchestrates parallel test execution with concurrency control
|
||||
- Manages test case generation when enabled
|
||||
- Generates detailed reports and saves results
|
||||
|
||||
**Display (`cli/display.ts`):**
|
||||
- Progress bar management for real-time feedback
|
||||
- Console output formatting
|
||||
- Error display and reporting
|
||||
|
||||
### Evaluation Metrics
|
||||
|
||||
The system evaluates workflows across five categories:
|
||||
|
||||
1. **Functionality** (30% weight)
|
||||
- Does the workflow achieve the intended goal?
|
||||
- Are the right nodes selected?
|
||||
|
||||
2. **Connections** (25% weight)
|
||||
- Are nodes properly connected?
|
||||
- Is data flow logical?
|
||||
|
||||
3. **Expressions** (20% weight)
|
||||
- Are n8n expressions syntactically correct?
|
||||
- Do they reference valid data paths?
|
||||
|
||||
4. **Node Configuration** (15% weight)
|
||||
- Are node parameters properly set?
|
||||
- Are required fields populated?
|
||||
|
||||
5. **Structural Similarity** (10% weight, optional)
|
||||
- How closely does the structure match a reference workflow?
|
||||
- Only evaluated when reference workflow is provided
|
||||
|
||||
### Violation Severity Levels
|
||||
|
||||
Violations are categorized by severity:
|
||||
- **Critical** (-40 to -50 points): Workflow-breaking issues
|
||||
- **Major** (-15 to -25 points): Significant problems affecting functionality
|
||||
- **Minor** (-5 to -15 points): Non-critical issues or inefficiencies
|
||||
|
||||
## Running Evaluations
|
||||
|
||||
### CLI Evaluation
|
||||
|
||||
```bash
|
||||
# Run with default settings
|
||||
pnpm eval
|
||||
|
||||
# With additional generated test cases
|
||||
GENERATE_TEST_CASES=true pnpm eval
|
||||
|
||||
# With custom concurrency
|
||||
EVALUATION_CONCURRENCY=10 pnpm eval
|
||||
```
|
||||
|
||||
### Langsmith Evaluation
|
||||
|
||||
```bash
|
||||
# Set required environment variables
|
||||
export LANGSMITH_API_KEY=your_api_key
|
||||
# Optionally specify dataset
|
||||
export LANGSMITH_DATASET_NAME=your_dataset_name
|
||||
|
||||
# Run evaluation
|
||||
pnpm eval:langsmith
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Required Files
|
||||
|
||||
#### nodes.json
|
||||
**IMPORTANT**: The evaluation framework requires a `nodes.json` file in the evaluations root directory (`evaluations/nodes.json`).
|
||||
|
||||
This file contains all n8n node type definitions and is used by the AI Workflow Builder agent to:
|
||||
- Know what nodes are available in n8n
|
||||
- Understand node parameters and their schemas
|
||||
- Generate valid workflows with proper node configurations
|
||||
|
||||
**Why is this required?**
|
||||
The AI Workflow Builder agent needs access to node definitions to generate workflows. In a normal n8n runtime, these definitions are loaded automatically. However, since the evaluation framework instantiates the agent without a running n8n instance, we must provide the node definitions manually via `nodes.json`.
|
||||
|
||||
**How to generate nodes.json:**
|
||||
1. Run your n8n instance
|
||||
2. Download the node definitions from locally running n8n instance(http://localhost:5678/types/nodes.json)
|
||||
3. Save the node definitions to `evaluations/nodes.json`
|
||||
|
||||
The evaluation will fail with a clear error message if `nodes.json` is missing.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `N8N_AI_ANTHROPIC_KEY` - Required for LLM access
|
||||
- `LANGSMITH_API_KEY` - Required for Langsmith evaluation
|
||||
- `USE_LANGSMITH_EVAL` - Set to "true" to use Langsmith mode
|
||||
- `LANGSMITH_DATASET_NAME` - Override default dataset name
|
||||
- `EVALUATION_CONCURRENCY` - Number of parallel test executions (default: 5)
|
||||
- `GENERATE_TEST_CASES` - Set to "true" to generate additional test cases
|
||||
- `LLM_MODEL` - Model identifier for metadata tracking
|
||||
|
||||
## Output
|
||||
|
||||
### CLI Evaluation Output
|
||||
|
||||
- **Console Display**: Real-time progress, test results, and summary statistics
|
||||
- **Markdown Report**: `results/evaluation-report-[timestamp].md`
|
||||
- **JSON Results**: `results/evaluation-results-[timestamp].json`
|
||||
|
||||
### Langsmith Evaluation Output
|
||||
|
||||
- Results are stored in Langsmith dashboard
|
||||
- Experiment name format: `workflow-builder-evaluation-[date]`
|
||||
- Includes detailed metrics for each evaluation category
|
||||
|
||||
## Adding New Test Cases
|
||||
|
||||
Test cases are defined in `chains/test-case-generator.ts`. Each test case requires:
|
||||
- `id`: Unique identifier
|
||||
- `name`: Descriptive name
|
||||
- `prompt`: Natural language description of the workflow to generate
|
||||
- `referenceWorkflow` (optional): Expected workflow structure for comparison
|
||||
|
||||
## Extending the Framework
|
||||
|
||||
To add new evaluation metrics:
|
||||
1. Update the `EvaluationResult` schema in `types/evaluation.ts`
|
||||
2. Modify the evaluation logic in `chains/workflow-evaluator.ts`
|
||||
3. Update the evaluator in `langsmith/evaluator.ts` to include new metrics
|
||||
4. Adjust weight calculations in `utils/evaluation-calculator.ts`
|
||||
27
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/evaluations/index 2.ts
Executable file
27
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/evaluations/index 2.ts
Executable file
@@ -0,0 +1,27 @@
|
||||
import { runCliEvaluation } from './cli/runner.js';
|
||||
import { runLangsmithEvaluation } from './langsmith/runner.js';
|
||||
|
||||
// Re-export for external use if needed
|
||||
export { runCliEvaluation } from './cli/runner.js';
|
||||
export { runLangsmithEvaluation } from './langsmith/runner.js';
|
||||
export { runSingleTest } from './core/test-runner.js';
|
||||
export { setupTestEnvironment, createAgent } from './core/environment.js';
|
||||
|
||||
/**
|
||||
* Main entry point for evaluation
|
||||
* Determines which evaluation mode to run based on environment variables
|
||||
*/
|
||||
async function main(): Promise<void> {
|
||||
const useLangsmith = process.env.USE_LANGSMITH_EVAL === 'true';
|
||||
|
||||
if (useLangsmith) {
|
||||
await runLangsmithEvaluation();
|
||||
} else {
|
||||
await runCliEvaluation();
|
||||
}
|
||||
}
|
||||
|
||||
// Run if called directly
|
||||
if (require.main === module) {
|
||||
main().catch(console.error);
|
||||
}
|
||||
106
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/evaluations/load-nodes 2.ts
Executable file
106
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/evaluations/load-nodes 2.ts
Executable file
@@ -0,0 +1,106 @@
|
||||
import { readFileSync, existsSync } from 'fs';
|
||||
import { jsonParse, type INodeTypeDescription } from 'n8n-workflow';
|
||||
import { join } from 'path';
|
||||
|
||||
interface NodeWithVersion extends INodeTypeDescription {
|
||||
version: number | number[];
|
||||
defaultVersion?: number;
|
||||
}
|
||||
|
||||
export function loadNodesFromFile(): INodeTypeDescription[] {
|
||||
console.log('Loading nodes from nodes.json...');
|
||||
|
||||
const nodesPath = join(__dirname, 'nodes.json');
|
||||
|
||||
// Check if nodes.json exists
|
||||
if (!existsSync(nodesPath)) {
|
||||
const errorMessage = `
|
||||
ERROR: nodes.json file not found at ${nodesPath}
|
||||
|
||||
The nodes.json file is required for evaluations to work properly.
|
||||
Please ensure nodes.json is present in the evaluations root directory.
|
||||
|
||||
To generate nodes.json:
|
||||
1. Run the n8n instance
|
||||
2. Export the node definitions to evaluations/nodes.json
|
||||
3. This file contains all available n8n node type definitions needed for validation
|
||||
|
||||
Without nodes.json, the evaluator cannot validate node types and parameters.
|
||||
`;
|
||||
console.error(errorMessage);
|
||||
throw new Error('nodes.json file not found. See console output for details.');
|
||||
}
|
||||
|
||||
const nodesData = readFileSync(nodesPath, 'utf-8');
|
||||
const allNodes = jsonParse<NodeWithVersion[]>(nodesData);
|
||||
|
||||
console.log(`Total nodes loaded: ${allNodes.length}`);
|
||||
|
||||
// Group nodes by name
|
||||
const nodesByName = new Map<string, NodeWithVersion[]>();
|
||||
|
||||
for (const node of allNodes) {
|
||||
const existing = nodesByName.get(node.name) ?? [];
|
||||
existing.push(node);
|
||||
nodesByName.set(node.name, existing);
|
||||
}
|
||||
|
||||
console.log(`Unique node types: ${nodesByName.size}`);
|
||||
|
||||
// Extract latest version for each node
|
||||
const latestNodes: INodeTypeDescription[] = [];
|
||||
let multiVersionCount = 0;
|
||||
|
||||
for (const [_nodeName, versions] of nodesByName.entries()) {
|
||||
if (versions.length > 1) {
|
||||
multiVersionCount++;
|
||||
// Find the node with the default version
|
||||
let selectedNode: NodeWithVersion | undefined;
|
||||
|
||||
for (const node of versions) {
|
||||
// Select the node that matches the default version
|
||||
if (node.defaultVersion !== undefined) {
|
||||
if (Array.isArray(node.version)) {
|
||||
// For array versions, check if it includes the default version
|
||||
if (node.version.includes(node.defaultVersion)) {
|
||||
selectedNode = node;
|
||||
}
|
||||
} else if (node.version === node.defaultVersion) {
|
||||
selectedNode = node;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we found a matching node, use it; otherwise use the first one
|
||||
if (selectedNode) {
|
||||
latestNodes.push(selectedNode);
|
||||
} else {
|
||||
latestNodes.push(versions[0]);
|
||||
}
|
||||
} else {
|
||||
// Single version node
|
||||
latestNodes.push(versions[0]);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nNodes with multiple versions: ${multiVersionCount}`);
|
||||
console.log(`Final node count: ${latestNodes.length}`);
|
||||
|
||||
// Filter out hidden nodes
|
||||
const visibleNodes = latestNodes.filter((node) => !node.hidden);
|
||||
console.log(`Visible nodes (after filtering hidden): ${visibleNodes.length}\n`);
|
||||
|
||||
return visibleNodes;
|
||||
}
|
||||
|
||||
// Helper function to get specific node version for testing
|
||||
export function getNodeVersion(nodes: INodeTypeDescription[], nodeName: string): string {
|
||||
const node = nodes.find((n) => n.name === nodeName);
|
||||
if (!node) return 'not found';
|
||||
|
||||
const version = (node as NodeWithVersion).version;
|
||||
if (Array.isArray(version)) {
|
||||
return `[${version.join(', ')}]`;
|
||||
}
|
||||
return version?.toString() || 'unknown';
|
||||
}
|
||||
@@ -0,0 +1,184 @@
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { LangChainTracer } from '@langchain/core/tracers/tracer_langchain';
|
||||
import { MemorySaver } from '@langchain/langgraph';
|
||||
import { Logger } from '@n8n/backend-common';
|
||||
import { Service } from '@n8n/di';
|
||||
import { AiAssistantClient } from '@n8n_io/ai-assistant-sdk';
|
||||
import { Client } from 'langsmith';
|
||||
import { INodeTypes } from 'n8n-workflow';
|
||||
import type { IUser, INodeTypeDescription } from 'n8n-workflow';
|
||||
|
||||
import { LLMServiceError } from './errors';
|
||||
import { anthropicClaudeSonnet4, gpt41mini } from './llm-config';
|
||||
import { WorkflowBuilderAgent, type ChatPayload } from './workflow-builder-agent';
|
||||
|
||||
@Service()
|
||||
export class AiWorkflowBuilderService {
|
||||
private parsedNodeTypes: INodeTypeDescription[] = [];
|
||||
|
||||
private llmSimpleTask: BaseChatModel | undefined;
|
||||
|
||||
private llmComplexTask: BaseChatModel | undefined;
|
||||
|
||||
private tracingClient: Client | undefined;
|
||||
|
||||
private checkpointer = new MemorySaver();
|
||||
|
||||
private agent: WorkflowBuilderAgent | undefined;
|
||||
|
||||
constructor(
|
||||
private readonly nodeTypes: INodeTypes,
|
||||
private readonly client?: AiAssistantClient,
|
||||
private readonly logger?: Logger,
|
||||
private readonly instanceUrl?: string,
|
||||
) {
|
||||
this.parsedNodeTypes = this.getNodeTypes();
|
||||
}
|
||||
|
||||
private async setupModels(user?: IUser) {
|
||||
try {
|
||||
if (this.llmSimpleTask && this.llmComplexTask) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If client is provided, use it for API proxy
|
||||
if (this.client && user) {
|
||||
const authHeaders = await this.client.generateApiProxyCredentials(user);
|
||||
// Extract baseUrl from client configuration
|
||||
const baseUrl = this.client.getApiProxyBaseUrl();
|
||||
|
||||
this.llmSimpleTask = await gpt41mini({
|
||||
baseUrl: baseUrl + '/openai',
|
||||
// When using api-proxy the key will be populated automatically, we just need to pass a placeholder
|
||||
apiKey: '-',
|
||||
headers: {
|
||||
Authorization: authHeaders.apiKey,
|
||||
},
|
||||
});
|
||||
this.llmComplexTask = await anthropicClaudeSonnet4({
|
||||
baseUrl: baseUrl + '/anthropic',
|
||||
apiKey: '-',
|
||||
headers: {
|
||||
Authorization: authHeaders.apiKey,
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
},
|
||||
});
|
||||
|
||||
this.tracingClient = new Client({
|
||||
apiKey: '-',
|
||||
apiUrl: baseUrl + '/langsmith',
|
||||
autoBatchTracing: false,
|
||||
traceBatchConcurrency: 1,
|
||||
fetchOptions: {
|
||||
headers: {
|
||||
Authorization: authHeaders.apiKey,
|
||||
},
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
// If base URL is not set, use environment variables
|
||||
this.llmSimpleTask = await gpt41mini({
|
||||
apiKey: process.env.N8N_AI_OPENAI_API_KEY ?? '',
|
||||
});
|
||||
|
||||
this.llmComplexTask = await anthropicClaudeSonnet4({
|
||||
apiKey: process.env.N8N_AI_ANTHROPIC_KEY ?? '',
|
||||
headers: {
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
const llmError = new LLMServiceError('Failed to connect to LLM Provider', {
|
||||
cause: error,
|
||||
tags: {
|
||||
hasClient: !!this.client,
|
||||
hasUser: !!user,
|
||||
},
|
||||
});
|
||||
throw llmError;
|
||||
}
|
||||
}
|
||||
|
||||
private getNodeTypes(): INodeTypeDescription[] {
|
||||
// These types are ignored because they tend to cause issues when generating workflows
|
||||
const ignoredTypes = [
|
||||
'@n8n/n8n-nodes-langchain.toolVectorStore',
|
||||
'@n8n/n8n-nodes-langchain.documentGithubLoader',
|
||||
'@n8n/n8n-nodes-langchain.code',
|
||||
];
|
||||
const nodeTypesKeys = Object.keys(this.nodeTypes.getKnownTypes());
|
||||
|
||||
const nodeTypes = nodeTypesKeys
|
||||
.filter((nodeType) => !ignoredTypes.includes(nodeType))
|
||||
.map((nodeName) => {
|
||||
try {
|
||||
return { ...this.nodeTypes.getByNameAndVersion(nodeName).description, name: nodeName };
|
||||
} catch (error) {
|
||||
this.logger?.error('Error getting node type', {
|
||||
nodeName,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
return undefined;
|
||||
}
|
||||
})
|
||||
.filter(
|
||||
(nodeType): nodeType is INodeTypeDescription =>
|
||||
nodeType !== undefined && nodeType.hidden !== true,
|
||||
)
|
||||
.map((nodeType, _index, nodeTypes: INodeTypeDescription[]) => {
|
||||
// If the node type is a tool, we need to find the corresponding non-tool node type
|
||||
// and merge the two node types to get the full node type description.
|
||||
const isTool = nodeType.name.endsWith('Tool');
|
||||
if (!isTool) return nodeType;
|
||||
|
||||
const nonToolNode = nodeTypes.find((nt) => nt.name === nodeType.name.replace('Tool', ''));
|
||||
if (!nonToolNode) return nodeType;
|
||||
|
||||
return {
|
||||
...nonToolNode,
|
||||
...nodeType,
|
||||
};
|
||||
});
|
||||
|
||||
return nodeTypes;
|
||||
}
|
||||
|
||||
private async getAgent(user?: IUser) {
|
||||
if (!this.llmComplexTask || !this.llmSimpleTask) {
|
||||
await this.setupModels(user);
|
||||
}
|
||||
|
||||
if (!this.llmComplexTask || !this.llmSimpleTask) {
|
||||
throw new LLMServiceError('Failed to initialize LLM models');
|
||||
}
|
||||
|
||||
this.agent ??= new WorkflowBuilderAgent({
|
||||
parsedNodeTypes: this.parsedNodeTypes,
|
||||
// We use Sonnet both for simple and complex tasks
|
||||
llmSimpleTask: this.llmComplexTask,
|
||||
llmComplexTask: this.llmComplexTask,
|
||||
logger: this.logger,
|
||||
checkpointer: this.checkpointer,
|
||||
tracer: this.tracingClient
|
||||
? new LangChainTracer({ client: this.tracingClient, projectName: 'n8n-workflow-builder' })
|
||||
: undefined,
|
||||
instanceUrl: this.instanceUrl,
|
||||
});
|
||||
|
||||
return this.agent;
|
||||
}
|
||||
|
||||
async *chat(payload: ChatPayload, user?: IUser, abortSignal?: AbortSignal) {
|
||||
const agent = await this.getAgent(user);
|
||||
|
||||
for await (const output of agent.chat(payload, user?.id?.toString(), abortSignal)) {
|
||||
yield output;
|
||||
}
|
||||
}
|
||||
|
||||
async getSessions(workflowId: string | undefined, user?: IUser) {
|
||||
const agent = await this.getAgent(user);
|
||||
return await agent.getSessions(workflowId, user?.id?.toString());
|
||||
}
|
||||
}
|
||||
3
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/src/constants 2.ts
Executable file
3
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/src/constants 2.ts
Executable file
@@ -0,0 +1,3 @@
|
||||
export const MAX_AI_BUILDER_PROMPT_LENGTH = 1000; // characters
|
||||
|
||||
export const DEFAULT_AUTO_COMPACT_THRESHOLD_TOKENS = 20_000; // Tokens threshold for auto-compacting the conversation
|
||||
3
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/src/index 2.ts
Executable file
3
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/src/index 2.ts
Executable file
@@ -0,0 +1,3 @@
|
||||
export * from './ai-workflow-builder-agent.service';
|
||||
export * from './types';
|
||||
export * from './workflow-state';
|
||||
60
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/src/llm-config 2.ts
Executable file
60
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/src/llm-config 2.ts
Executable file
@@ -0,0 +1,60 @@
|
||||
// Different LLMConfig type for this file - specific to LLM providers
|
||||
interface LLMProviderConfig {
|
||||
apiKey: string;
|
||||
baseUrl?: string;
|
||||
headers?: Record<string, string>;
|
||||
}
|
||||
|
||||
export const o4mini = async (config: LLMProviderConfig) => {
|
||||
const { ChatOpenAI } = await import('@langchain/openai');
|
||||
return new ChatOpenAI({
|
||||
model: 'o4-mini-2025-04-16',
|
||||
apiKey: config.apiKey,
|
||||
configuration: {
|
||||
baseURL: config.baseUrl,
|
||||
defaultHeaders: config.headers,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const gpt41mini = async (config: LLMProviderConfig) => {
|
||||
const { ChatOpenAI } = await import('@langchain/openai');
|
||||
return new ChatOpenAI({
|
||||
model: 'gpt-4.1-mini-2025-04-14',
|
||||
apiKey: config.apiKey,
|
||||
temperature: 0,
|
||||
maxTokens: -1,
|
||||
configuration: {
|
||||
baseURL: config.baseUrl,
|
||||
defaultHeaders: config.headers,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const gpt41 = async (config: LLMProviderConfig) => {
|
||||
const { ChatOpenAI } = await import('@langchain/openai');
|
||||
return new ChatOpenAI({
|
||||
model: 'gpt-4.1-2025-04-14',
|
||||
apiKey: config.apiKey,
|
||||
temperature: 0.3,
|
||||
maxTokens: -1,
|
||||
configuration: {
|
||||
baseURL: config.baseUrl,
|
||||
defaultHeaders: config.headers,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const anthropicClaudeSonnet4 = async (config: LLMProviderConfig) => {
|
||||
const { ChatAnthropic } = await import('@langchain/anthropic');
|
||||
return new ChatAnthropic({
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
apiKey: config.apiKey,
|
||||
temperature: 0,
|
||||
maxTokens: 16000,
|
||||
anthropicApiUrl: config.baseUrl,
|
||||
clientOptions: {
|
||||
defaultHeaders: config.headers,
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,500 @@
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import type { ToolMessage } from '@langchain/core/messages';
|
||||
import { AIMessage, HumanMessage, RemoveMessage } from '@langchain/core/messages';
|
||||
import type { RunnableConfig } from '@langchain/core/runnables';
|
||||
import type { LangChainTracer } from '@langchain/core/tracers/tracer_langchain';
|
||||
import { StateGraph, MemorySaver, END, GraphRecursionError } from '@langchain/langgraph';
|
||||
import type { Logger } from '@n8n/backend-common';
|
||||
import {
|
||||
ApplicationError,
|
||||
type INodeTypeDescription,
|
||||
type IRunExecutionData,
|
||||
type IWorkflowBase,
|
||||
type NodeExecutionSchema,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { workflowNameChain } from '@/chains/workflow-name';
|
||||
import { DEFAULT_AUTO_COMPACT_THRESHOLD_TOKENS, MAX_AI_BUILDER_PROMPT_LENGTH } from '@/constants';
|
||||
|
||||
import { conversationCompactChain } from './chains/conversation-compact';
|
||||
import { LLMServiceError, ValidationError } from './errors';
|
||||
import { createAddNodeTool } from './tools/add-node.tool';
|
||||
import { createConnectNodesTool } from './tools/connect-nodes.tool';
|
||||
import { createNodeDetailsTool } from './tools/node-details.tool';
|
||||
import { createNodeSearchTool } from './tools/node-search.tool';
|
||||
import { mainAgentPrompt } from './tools/prompts/main-agent.prompt';
|
||||
import { createRemoveNodeTool } from './tools/remove-node.tool';
|
||||
import { createUpdateNodeParametersTool } from './tools/update-node-parameters.tool';
|
||||
import type { SimpleWorkflow } from './types/workflow';
|
||||
import { processOperations } from './utils/operations-processor';
|
||||
import { createStreamProcessor, formatMessages } from './utils/stream-processor';
|
||||
import { extractLastTokenUsage } from './utils/token-usage';
|
||||
import { executeToolsInParallel } from './utils/tool-executor';
|
||||
import { WorkflowState } from './workflow-state';
|
||||
|
||||
export interface WorkflowBuilderAgentConfig {
|
||||
parsedNodeTypes: INodeTypeDescription[];
|
||||
llmSimpleTask: BaseChatModel;
|
||||
llmComplexTask: BaseChatModel;
|
||||
logger?: Logger;
|
||||
checkpointer?: MemorySaver;
|
||||
tracer?: LangChainTracer;
|
||||
autoCompactThresholdTokens?: number;
|
||||
instanceUrl?: string;
|
||||
}
|
||||
|
||||
export interface ChatPayload {
|
||||
message: string;
|
||||
workflowContext?: {
|
||||
executionSchema?: NodeExecutionSchema[];
|
||||
currentWorkflow?: Partial<IWorkflowBase>;
|
||||
executionData?: IRunExecutionData['resultData'];
|
||||
};
|
||||
}
|
||||
|
||||
export class WorkflowBuilderAgent {
|
||||
private checkpointer: MemorySaver;
|
||||
private parsedNodeTypes: INodeTypeDescription[];
|
||||
private llmSimpleTask: BaseChatModel;
|
||||
private llmComplexTask: BaseChatModel;
|
||||
private logger?: Logger;
|
||||
private tracer?: LangChainTracer;
|
||||
private autoCompactThresholdTokens: number;
|
||||
private instanceUrl?: string;
|
||||
|
||||
constructor(config: WorkflowBuilderAgentConfig) {
|
||||
this.parsedNodeTypes = config.parsedNodeTypes;
|
||||
this.llmSimpleTask = config.llmSimpleTask;
|
||||
this.llmComplexTask = config.llmComplexTask;
|
||||
this.logger = config.logger;
|
||||
this.checkpointer = config.checkpointer ?? new MemorySaver();
|
||||
this.tracer = config.tracer;
|
||||
this.autoCompactThresholdTokens =
|
||||
config.autoCompactThresholdTokens ?? DEFAULT_AUTO_COMPACT_THRESHOLD_TOKENS;
|
||||
this.instanceUrl = config.instanceUrl;
|
||||
}
|
||||
|
||||
private createWorkflow() {
|
||||
const tools = [
|
||||
createNodeSearchTool(this.parsedNodeTypes),
|
||||
createNodeDetailsTool(this.parsedNodeTypes),
|
||||
createAddNodeTool(this.parsedNodeTypes),
|
||||
createConnectNodesTool(this.parsedNodeTypes, this.logger),
|
||||
createRemoveNodeTool(this.logger),
|
||||
createUpdateNodeParametersTool(
|
||||
this.parsedNodeTypes,
|
||||
this.llmComplexTask,
|
||||
this.logger,
|
||||
this.instanceUrl,
|
||||
),
|
||||
];
|
||||
|
||||
// Create a map for quick tool lookup
|
||||
const toolMap = new Map(tools.map((tool) => [tool.name, tool]));
|
||||
|
||||
const callModel = async (state: typeof WorkflowState.State) => {
|
||||
if (!this.llmSimpleTask) {
|
||||
throw new LLMServiceError('LLM not setup');
|
||||
}
|
||||
if (typeof this.llmSimpleTask.bindTools !== 'function') {
|
||||
throw new LLMServiceError('LLM does not support tools', {
|
||||
llmModel: this.llmSimpleTask._llmType(),
|
||||
});
|
||||
}
|
||||
|
||||
const prompt = await mainAgentPrompt.invoke({
|
||||
...state,
|
||||
executionData: state.workflowContext?.executionData ?? {},
|
||||
executionSchema: state.workflowContext?.executionSchema ?? [],
|
||||
instanceUrl: this.instanceUrl,
|
||||
});
|
||||
const response = await this.llmSimpleTask.bindTools(tools).invoke(prompt);
|
||||
|
||||
return { messages: [response] };
|
||||
};
|
||||
|
||||
const shouldAutoCompact = ({ messages }: typeof WorkflowState.State) => {
|
||||
const tokenUsage = extractLastTokenUsage(messages);
|
||||
|
||||
if (!tokenUsage) {
|
||||
this.logger?.debug('No token usage metadata found');
|
||||
return false;
|
||||
}
|
||||
|
||||
const tokensUsed = tokenUsage.input_tokens + tokenUsage.output_tokens;
|
||||
|
||||
this.logger?.debug('Token usage', {
|
||||
inputTokens: tokenUsage.input_tokens,
|
||||
outputTokens: tokenUsage.output_tokens,
|
||||
totalTokens: tokensUsed,
|
||||
});
|
||||
|
||||
return tokensUsed > this.autoCompactThresholdTokens;
|
||||
};
|
||||
|
||||
const shouldModifyState = (state: typeof WorkflowState.State) => {
|
||||
const { messages, workflowContext } = state;
|
||||
const lastHumanMessage = messages.findLast((m) => m instanceof HumanMessage)!; // There always should be at least one human message in the array
|
||||
|
||||
if (lastHumanMessage.content === '/compact') {
|
||||
return 'compact_messages';
|
||||
}
|
||||
|
||||
if (lastHumanMessage.content === '/clear') {
|
||||
return 'delete_messages';
|
||||
}
|
||||
|
||||
// If the workflow is empty (no nodes),
|
||||
// we consider it initial generation request and auto-generate a name for the workflow.
|
||||
if (workflowContext?.currentWorkflow?.nodes?.length === 0 && messages.length === 1) {
|
||||
return 'create_workflow_name';
|
||||
}
|
||||
|
||||
if (shouldAutoCompact(state)) {
|
||||
return 'auto_compact_messages';
|
||||
}
|
||||
|
||||
return 'agent';
|
||||
};
|
||||
|
||||
const shouldContinue = ({ messages }: typeof WorkflowState.State) => {
|
||||
const lastMessage: AIMessage = messages[messages.length - 1];
|
||||
|
||||
if (lastMessage.tool_calls?.length) {
|
||||
return 'tools';
|
||||
}
|
||||
return END;
|
||||
};
|
||||
|
||||
const customToolExecutor = async (state: typeof WorkflowState.State) => {
|
||||
return await executeToolsInParallel({ state, toolMap });
|
||||
};
|
||||
|
||||
function deleteMessages(state: typeof WorkflowState.State) {
|
||||
const messages = state.messages;
|
||||
const stateUpdate: Partial<typeof WorkflowState.State> = {
|
||||
workflowOperations: null,
|
||||
workflowContext: {},
|
||||
messages: messages.map((m) => new RemoveMessage({ id: m.id! })) ?? [],
|
||||
workflowJSON: {
|
||||
nodes: [],
|
||||
connections: {},
|
||||
name: '',
|
||||
},
|
||||
};
|
||||
|
||||
return stateUpdate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compacts the conversation history by summarizing it
|
||||
* and removing original messages.
|
||||
* Might be triggered manually by the user with `/compact` message, or run automatically
|
||||
* when the conversation history exceeds a certain token limit.
|
||||
*/
|
||||
const compactSession = async (state: typeof WorkflowState.State) => {
|
||||
if (!this.llmSimpleTask) {
|
||||
throw new LLMServiceError('LLM not setup');
|
||||
}
|
||||
|
||||
const { messages, previousSummary } = state;
|
||||
const lastHumanMessage = messages[messages.length - 1] satisfies HumanMessage;
|
||||
const isAutoCompact = lastHumanMessage.content !== '/compact';
|
||||
|
||||
this.logger?.debug('Compacting conversation history', {
|
||||
isAutoCompact,
|
||||
});
|
||||
|
||||
const compactedMessages = await conversationCompactChain(
|
||||
this.llmSimpleTask,
|
||||
messages,
|
||||
previousSummary,
|
||||
);
|
||||
|
||||
// The summarized conversation history will become a part of system prompt
|
||||
// and will be used in the next LLM call.
|
||||
// We will remove all messages and replace them with a mock HumanMessage and AIMessage
|
||||
// to indicate that the conversation history has been compacted.
|
||||
// If this is an auto-compact, we will also keep the last human message, as it will continue executing the workflow.
|
||||
return {
|
||||
previousSummary: compactedMessages.summaryPlain,
|
||||
messages: [
|
||||
...messages.map((m) => new RemoveMessage({ id: m.id! })),
|
||||
new HumanMessage('Please compress the conversation history'),
|
||||
new AIMessage('Successfully compacted conversation history'),
|
||||
...(isAutoCompact ? [new HumanMessage({ content: lastHumanMessage.content })] : []),
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a workflow name based on the initial user message.
|
||||
*/
|
||||
const createWorkflowName = async (state: typeof WorkflowState.State) => {
|
||||
if (!this.llmSimpleTask) {
|
||||
throw new LLMServiceError('LLM not setup');
|
||||
}
|
||||
|
||||
const { workflowJSON, messages } = state;
|
||||
|
||||
if (messages.length === 1 && messages[0] instanceof HumanMessage) {
|
||||
const initialMessage = messages[0] satisfies HumanMessage;
|
||||
|
||||
if (typeof initialMessage.content !== 'string') {
|
||||
this.logger?.debug(
|
||||
'Initial message content is not a string, skipping workflow name generation',
|
||||
);
|
||||
return {};
|
||||
}
|
||||
|
||||
this.logger?.debug('Generating workflow name');
|
||||
const { name } = await workflowNameChain(this.llmSimpleTask, initialMessage.content);
|
||||
|
||||
return {
|
||||
workflowJSON: {
|
||||
...workflowJSON,
|
||||
name,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {};
|
||||
};
|
||||
|
||||
const workflow = new StateGraph(WorkflowState)
|
||||
.addNode('agent', callModel)
|
||||
.addNode('tools', customToolExecutor)
|
||||
.addNode('process_operations', processOperations)
|
||||
.addNode('delete_messages', deleteMessages)
|
||||
.addNode('compact_messages', compactSession)
|
||||
.addNode('auto_compact_messages', compactSession)
|
||||
.addNode('create_workflow_name', createWorkflowName)
|
||||
.addConditionalEdges('__start__', shouldModifyState)
|
||||
.addEdge('tools', 'process_operations')
|
||||
.addEdge('process_operations', 'agent')
|
||||
.addEdge('auto_compact_messages', 'agent')
|
||||
.addEdge('create_workflow_name', 'agent')
|
||||
.addEdge('delete_messages', END)
|
||||
.addEdge('compact_messages', END)
|
||||
.addConditionalEdges('agent', shouldContinue);
|
||||
|
||||
return workflow;
|
||||
}
|
||||
|
||||
async getState(workflowId: string, userId?: string) {
|
||||
const workflow = this.createWorkflow();
|
||||
const agent = workflow.compile({ checkpointer: this.checkpointer });
|
||||
return await agent.getState({
|
||||
configurable: { thread_id: `workflow-${workflowId}-user-${userId ?? new Date().getTime()}` },
|
||||
});
|
||||
}
|
||||
|
||||
static generateThreadId(workflowId?: string, userId?: string) {
|
||||
return workflowId
|
||||
? `workflow-${workflowId}-user-${userId ?? new Date().getTime()}`
|
||||
: crypto.randomUUID();
|
||||
}
|
||||
|
||||
private getDefaultWorkflowJSON(payload: ChatPayload): SimpleWorkflow {
|
||||
return (
|
||||
(payload.workflowContext?.currentWorkflow as SimpleWorkflow) ?? {
|
||||
nodes: [],
|
||||
connections: {},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
async *chat(payload: ChatPayload, userId?: string, abortSignal?: AbortSignal) {
|
||||
this.validateMessageLength(payload.message);
|
||||
|
||||
const { agent, threadConfig, streamConfig } = this.setupAgentAndConfigs(
|
||||
payload,
|
||||
userId,
|
||||
abortSignal,
|
||||
);
|
||||
|
||||
try {
|
||||
const stream = await this.createAgentStream(payload, streamConfig, agent);
|
||||
yield* this.processAgentStream(stream, agent, threadConfig);
|
||||
} catch (error: unknown) {
|
||||
this.handleStreamError(error);
|
||||
}
|
||||
}
|
||||
|
||||
private validateMessageLength(message: string): void {
|
||||
if (message.length > MAX_AI_BUILDER_PROMPT_LENGTH) {
|
||||
this.logger?.warn('Message exceeds maximum length', {
|
||||
messageLength: message.length,
|
||||
maxLength: MAX_AI_BUILDER_PROMPT_LENGTH,
|
||||
});
|
||||
|
||||
throw new ValidationError(
|
||||
`Message exceeds maximum length of ${MAX_AI_BUILDER_PROMPT_LENGTH} characters`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private setupAgentAndConfigs(payload: ChatPayload, userId?: string, abortSignal?: AbortSignal) {
|
||||
const agent = this.createWorkflow().compile({ checkpointer: this.checkpointer });
|
||||
const workflowId = payload.workflowContext?.currentWorkflow?.id;
|
||||
// Generate thread ID from workflowId and userId
|
||||
// This ensures one session per workflow per user
|
||||
const threadId = WorkflowBuilderAgent.generateThreadId(workflowId, userId);
|
||||
const threadConfig: RunnableConfig = {
|
||||
configurable: {
|
||||
thread_id: threadId,
|
||||
},
|
||||
};
|
||||
const streamConfig = {
|
||||
...threadConfig,
|
||||
streamMode: ['updates', 'custom'],
|
||||
recursionLimit: 50,
|
||||
signal: abortSignal,
|
||||
callbacks: this.tracer ? [this.tracer] : undefined,
|
||||
};
|
||||
|
||||
return { agent, threadConfig, streamConfig };
|
||||
}
|
||||
|
||||
private async createAgentStream(
|
||||
payload: ChatPayload,
|
||||
streamConfig: RunnableConfig,
|
||||
agent: ReturnType<ReturnType<typeof this.createWorkflow>['compile']>,
|
||||
) {
|
||||
return await agent.stream(
|
||||
{
|
||||
messages: [new HumanMessage({ content: payload.message })],
|
||||
workflowJSON: this.getDefaultWorkflowJSON(payload),
|
||||
workflowOperations: [],
|
||||
workflowContext: payload.workflowContext,
|
||||
},
|
||||
streamConfig,
|
||||
);
|
||||
}
|
||||
|
||||
private handleStreamError(error: unknown): never {
|
||||
const invalidRequestErrorMessage = this.getInvalidRequestError(error);
|
||||
if (invalidRequestErrorMessage) {
|
||||
throw new ValidationError(invalidRequestErrorMessage);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
private async *processAgentStream(
|
||||
stream: AsyncGenerator<[string, unknown], void, unknown>,
|
||||
agent: ReturnType<ReturnType<typeof this.createWorkflow>['compile']>,
|
||||
threadConfig: RunnableConfig,
|
||||
) {
|
||||
try {
|
||||
const streamProcessor = createStreamProcessor(stream);
|
||||
for await (const output of streamProcessor) {
|
||||
yield output;
|
||||
}
|
||||
} catch (error) {
|
||||
await this.handleAgentStreamError(error, agent, threadConfig);
|
||||
}
|
||||
}
|
||||
|
||||
private async handleAgentStreamError(
|
||||
error: unknown,
|
||||
agent: ReturnType<ReturnType<typeof this.createWorkflow>['compile']>,
|
||||
threadConfig: RunnableConfig,
|
||||
): Promise<void> {
|
||||
if (
|
||||
error &&
|
||||
typeof error === 'object' &&
|
||||
'message' in error &&
|
||||
typeof error.message === 'string' &&
|
||||
// This is naive, but it's all we get from LangGraph AbortError
|
||||
['Abort', 'Aborted'].includes(error.message)
|
||||
) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
const messages = (await agent.getState(threadConfig)).values.messages as Array<
|
||||
AIMessage | HumanMessage | ToolMessage
|
||||
>;
|
||||
|
||||
// Handle abort errors gracefully
|
||||
const abortedAiMessage = new AIMessage({
|
||||
content: '[Task aborted]',
|
||||
id: crypto.randomUUID(),
|
||||
});
|
||||
// TODO: Should we clear tool calls that are in progress?
|
||||
await agent.updateState(threadConfig, { messages: [...messages, abortedAiMessage] });
|
||||
return;
|
||||
}
|
||||
|
||||
// If it's not an abort error, check for GraphRecursionError
|
||||
if (error instanceof GraphRecursionError) {
|
||||
throw new ApplicationError(
|
||||
'Workflow generation stopped: The AI reached the maximum number of steps while building your workflow. This usually means the workflow design became too complex or got stuck in a loop while trying to create the nodes and connections.',
|
||||
);
|
||||
}
|
||||
|
||||
// Re-throw any other errors
|
||||
throw error;
|
||||
}
|
||||
|
||||
private getInvalidRequestError(error: unknown): string | undefined {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
'error' in error &&
|
||||
typeof error.error === 'object' &&
|
||||
error.error
|
||||
) {
|
||||
const innerError = error.error;
|
||||
if ('error' in innerError && typeof innerError.error === 'object' && innerError.error) {
|
||||
const errorDetails = innerError.error;
|
||||
if (
|
||||
'type' in errorDetails &&
|
||||
errorDetails.type === 'invalid_request_error' &&
|
||||
'message' in errorDetails &&
|
||||
typeof errorDetails.message === 'string'
|
||||
) {
|
||||
return errorDetails.message;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async getSessions(workflowId: string | undefined, userId?: string) {
|
||||
// For now, we'll return the current session if we have a workflowId
|
||||
// MemorySaver doesn't expose a way to list all threads, so we'll need to
|
||||
// track this differently if we want to list all sessions
|
||||
const sessions = [];
|
||||
|
||||
if (workflowId) {
|
||||
const threadId = WorkflowBuilderAgent.generateThreadId(workflowId, userId);
|
||||
const threadConfig: RunnableConfig = {
|
||||
configurable: {
|
||||
thread_id: threadId,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
// Try to get the checkpoint for this thread
|
||||
const checkpoint = await this.checkpointer.getTuple(threadConfig);
|
||||
|
||||
if (checkpoint?.checkpoint) {
|
||||
const messages =
|
||||
(checkpoint.checkpoint.channel_values?.messages as Array<
|
||||
AIMessage | HumanMessage | ToolMessage
|
||||
>) ?? [];
|
||||
|
||||
sessions.push({
|
||||
sessionId: threadId,
|
||||
messages: formatMessages(messages),
|
||||
lastUpdated: checkpoint.checkpoint.ts,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
// Thread doesn't exist yet
|
||||
this.logger?.debug('No session found for workflow:', { workflowId, error });
|
||||
}
|
||||
}
|
||||
|
||||
return { sessions };
|
||||
}
|
||||
}
|
||||
89
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/src/workflow-state 2.ts
Executable file
89
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/src/workflow-state 2.ts
Executable file
@@ -0,0 +1,89 @@
|
||||
import type { BaseMessage } from '@langchain/core/messages';
|
||||
import { HumanMessage } from '@langchain/core/messages';
|
||||
import { Annotation, messagesStateReducer } from '@langchain/langgraph';
|
||||
|
||||
import type { SimpleWorkflow, WorkflowOperation } from './types/workflow';
|
||||
import type { ChatPayload } from './workflow-builder-agent';
|
||||
|
||||
/**
|
||||
* Reducer for collecting workflow operations from parallel tool executions.
|
||||
* This reducer intelligently merges operations, avoiding duplicates and handling special cases.
|
||||
*/
|
||||
function operationsReducer(
|
||||
current: WorkflowOperation[] | null,
|
||||
update: WorkflowOperation[] | null | undefined,
|
||||
): WorkflowOperation[] {
|
||||
if (update === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (!update || update.length === 0) {
|
||||
return current ?? [];
|
||||
}
|
||||
|
||||
// For clear operations, we can reset everything
|
||||
if (update.some((op) => op.type === 'clear')) {
|
||||
return update.filter((op) => op.type === 'clear').slice(-1); // Keep only the last clear
|
||||
}
|
||||
|
||||
if (!current && !update) {
|
||||
return [];
|
||||
}
|
||||
// Otherwise, append new operations
|
||||
return [...(current ?? []), ...update];
|
||||
}
|
||||
|
||||
// Creates a reducer that trims the message history to keep only the last `maxUserMessages` HumanMessage instances
|
||||
export function createTrimMessagesReducer(maxUserMessages: number) {
|
||||
return (current: BaseMessage[]): BaseMessage[] => {
|
||||
// Count HumanMessage instances and remember their indices
|
||||
const humanMessageIndices: number[] = [];
|
||||
current.forEach((msg, index) => {
|
||||
if (msg instanceof HumanMessage) {
|
||||
humanMessageIndices.push(index);
|
||||
}
|
||||
});
|
||||
|
||||
// If we have fewer than or equal to maxUserMessages, return as is
|
||||
if (humanMessageIndices.length <= maxUserMessages) {
|
||||
return current;
|
||||
}
|
||||
|
||||
// Find the index of the first HumanMessage that we want to keep
|
||||
const startHumanMessageIndex =
|
||||
humanMessageIndices[humanMessageIndices.length - maxUserMessages];
|
||||
|
||||
// Slice from that HumanMessage onwards
|
||||
return current.slice(startHumanMessageIndex);
|
||||
};
|
||||
}
|
||||
|
||||
export const WorkflowState = Annotation.Root({
|
||||
messages: Annotation<BaseMessage[]>({
|
||||
reducer: messagesStateReducer,
|
||||
default: () => [],
|
||||
}),
|
||||
// // The original prompt from the user.
|
||||
// The JSON representation of the workflow being built.
|
||||
// Now a simple field without custom reducer - all updates go through operations
|
||||
workflowJSON: Annotation<SimpleWorkflow>({
|
||||
reducer: (x, y) => y ?? x,
|
||||
default: () => ({ nodes: [], connections: {}, name: '' }),
|
||||
}),
|
||||
// Operations to apply to the workflow - processed by a separate node
|
||||
workflowOperations: Annotation<WorkflowOperation[] | null>({
|
||||
reducer: operationsReducer,
|
||||
default: () => [],
|
||||
}),
|
||||
// Whether the user prompt is a workflow prompt.
|
||||
// Latest workflow context
|
||||
workflowContext: Annotation<ChatPayload['workflowContext'] | undefined>({
|
||||
reducer: (x, y) => y ?? x,
|
||||
}),
|
||||
|
||||
// Previous conversation summary (used for compressing long conversations)
|
||||
previousSummary: Annotation<string>({
|
||||
reducer: (x, y) => y ?? x, // Overwrite with the latest summary
|
||||
default: () => 'EMPTY',
|
||||
}),
|
||||
});
|
||||
599
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/test/test-utils 2.ts
Executable file
599
n8n-n8n-1.109.2/packages/@n8n/ai-workflow-builder.ee/test/test-utils 2.ts
Executable file
@@ -0,0 +1,599 @@
|
||||
import type { ToolRunnableConfig } from '@langchain/core/tools';
|
||||
import type { LangGraphRunnableConfig } from '@langchain/langgraph';
|
||||
import { getCurrentTaskInput } from '@langchain/langgraph';
|
||||
import type { MockProxy } from 'jest-mock-extended';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type {
|
||||
INode,
|
||||
INodeTypeDescription,
|
||||
INodeParameters,
|
||||
IConnection,
|
||||
NodeConnectionType,
|
||||
} from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
|
||||
import type { ProgressReporter, ToolProgressMessage } from '../src/types/tools';
|
||||
import type { SimpleWorkflow } from '../src/types/workflow';
|
||||
|
||||
export const mockProgress = (): MockProxy<ProgressReporter> => mock<ProgressReporter>();
|
||||
|
||||
// Mock state helpers
|
||||
export const mockStateHelpers = () => ({
|
||||
getNodes: jest.fn(() => [] as INode[]),
|
||||
getConnections: jest.fn(() => ({}) as SimpleWorkflow['connections']),
|
||||
updateNode: jest.fn((_id: string, _updates: Partial<INode>) => undefined),
|
||||
addNodes: jest.fn((_nodes: INode[]) => undefined),
|
||||
removeNode: jest.fn((_id: string) => undefined),
|
||||
addConnections: jest.fn((_connections: IConnection[]) => undefined),
|
||||
removeConnection: jest.fn((_sourceId: string, _targetId: string, _type?: string) => undefined),
|
||||
});
|
||||
|
||||
export type MockStateHelpers = ReturnType<typeof mockStateHelpers>;
|
||||
|
||||
// Simple node creation helper
|
||||
export const createNode = (overrides: Partial<INode> = {}): INode => ({
|
||||
id: 'node1',
|
||||
name: 'TestNode',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
...overrides,
|
||||
// Ensure parameters are properly merged if provided in overrides
|
||||
parameters: overrides.parameters ?? {},
|
||||
});
|
||||
|
||||
// Simple workflow builder
|
||||
export const createWorkflow = (nodes: INode[] = []): SimpleWorkflow => {
|
||||
const workflow: SimpleWorkflow = { nodes, connections: {}, name: 'Test workflow' };
|
||||
return workflow;
|
||||
};
|
||||
|
||||
// Create mock node type description
|
||||
export const createNodeType = (
|
||||
overrides: Partial<INodeTypeDescription> = {},
|
||||
): INodeTypeDescription => ({
|
||||
displayName: overrides.displayName ?? 'Test Node',
|
||||
name: overrides.name ?? 'test.node',
|
||||
group: overrides.group ?? ['transform'],
|
||||
version: overrides.version ?? 1,
|
||||
description: overrides.description ?? 'Test node description',
|
||||
defaults: overrides.defaults ?? { name: 'Test Node' },
|
||||
inputs: overrides.inputs ?? ['main'],
|
||||
outputs: overrides.outputs ?? ['main'],
|
||||
properties: overrides.properties ?? [],
|
||||
...overrides,
|
||||
});
|
||||
|
||||
// Common node types for testing
|
||||
export const nodeTypes = {
|
||||
code: createNodeType({
|
||||
displayName: 'Code',
|
||||
name: 'n8n-nodes-base.code',
|
||||
group: ['transform'],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'JavaScript',
|
||||
name: 'jsCode',
|
||||
type: 'string',
|
||||
typeOptions: {
|
||||
editor: 'codeNodeEditor',
|
||||
},
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
}),
|
||||
httpRequest: createNodeType({
|
||||
displayName: 'HTTP Request',
|
||||
name: 'n8n-nodes-base.httpRequest',
|
||||
group: ['input'],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'URL',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Method',
|
||||
name: 'method',
|
||||
type: 'options',
|
||||
options: [
|
||||
{ name: 'GET', value: 'GET' },
|
||||
{ name: 'POST', value: 'POST' },
|
||||
],
|
||||
default: 'GET',
|
||||
},
|
||||
],
|
||||
}),
|
||||
webhook: createNodeType({
|
||||
displayName: 'Webhook',
|
||||
name: 'n8n-nodes-base.webhook',
|
||||
group: ['trigger'],
|
||||
inputs: [],
|
||||
outputs: ['main'],
|
||||
webhooks: [
|
||||
{
|
||||
name: 'default',
|
||||
httpMethod: 'POST',
|
||||
responseMode: 'onReceived',
|
||||
path: 'webhook',
|
||||
},
|
||||
],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Path',
|
||||
name: 'path',
|
||||
type: 'string',
|
||||
default: 'webhook',
|
||||
},
|
||||
],
|
||||
}),
|
||||
agent: createNodeType({
|
||||
displayName: 'AI Agent',
|
||||
name: '@n8n/n8n-nodes-langchain.agent',
|
||||
group: ['output'],
|
||||
inputs: ['ai_agent'],
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
}),
|
||||
openAiModel: createNodeType({
|
||||
displayName: 'OpenAI Chat Model',
|
||||
name: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
group: ['output'],
|
||||
inputs: [],
|
||||
outputs: ['ai_languageModel'],
|
||||
properties: [],
|
||||
}),
|
||||
setNode: createNodeType({
|
||||
displayName: 'Set',
|
||||
name: 'n8n-nodes-base.set',
|
||||
group: ['transform'],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Values to Set',
|
||||
name: 'values',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
},
|
||||
],
|
||||
}),
|
||||
ifNode: createNodeType({
|
||||
displayName: 'If',
|
||||
name: 'n8n-nodes-base.if',
|
||||
group: ['transform'],
|
||||
inputs: ['main'],
|
||||
outputs: ['main', 'main'],
|
||||
outputNames: ['true', 'false'],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Conditions',
|
||||
name: 'conditions',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
},
|
||||
],
|
||||
}),
|
||||
mergeNode: createNodeType({
|
||||
displayName: 'Merge',
|
||||
name: 'n8n-nodes-base.merge',
|
||||
group: ['transform'],
|
||||
inputs: ['main', 'main'],
|
||||
outputs: ['main'],
|
||||
inputNames: ['Input 1', 'Input 2'],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Mode',
|
||||
name: 'mode',
|
||||
type: 'options',
|
||||
options: [
|
||||
{ name: 'Append', value: 'append' },
|
||||
{ name: 'Merge By Index', value: 'mergeByIndex' },
|
||||
{ name: 'Merge By Key', value: 'mergeByKey' },
|
||||
],
|
||||
default: 'append',
|
||||
},
|
||||
],
|
||||
}),
|
||||
vectorStoreNode: createNodeType({
|
||||
displayName: 'Vector Store',
|
||||
name: '@n8n/n8n-nodes-langchain.vectorStore',
|
||||
subtitle: '={{$parameter["mode"] === "retrieve" ? "Retrieve" : "Insert"}}',
|
||||
group: ['transform'],
|
||||
inputs: `={{ ((parameter) => {
|
||||
function getInputs(parameters) {
|
||||
const mode = parameters?.mode;
|
||||
const inputs = [];
|
||||
if (mode === 'retrieve-as-tool') {
|
||||
inputs.push({
|
||||
displayName: 'Embedding',
|
||||
type: 'ai_embedding',
|
||||
required: true
|
||||
});
|
||||
} else {
|
||||
inputs.push({
|
||||
displayName: '',
|
||||
type: 'main'
|
||||
});
|
||||
inputs.push({
|
||||
displayName: 'Embedding',
|
||||
type: 'ai_embedding',
|
||||
required: true
|
||||
});
|
||||
}
|
||||
return inputs;
|
||||
};
|
||||
return getInputs(parameter)
|
||||
})($parameter) }}`,
|
||||
outputs: `={{ ((parameter) => {
|
||||
function getOutputs(parameters) {
|
||||
const mode = parameters?.mode;
|
||||
if (mode === 'retrieve-as-tool') {
|
||||
return ['ai_tool'];
|
||||
} else if (mode === 'retrieve') {
|
||||
return ['ai_document'];
|
||||
} else {
|
||||
return ['main'];
|
||||
}
|
||||
};
|
||||
return getOutputs(parameter)
|
||||
})($parameter) }}`,
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Mode',
|
||||
name: 'mode',
|
||||
type: 'options',
|
||||
options: [
|
||||
{ name: 'Insert', value: 'insert' },
|
||||
{ name: 'Retrieve', value: 'retrieve' },
|
||||
{ name: 'Retrieve (As Tool)', value: 'retrieve-as-tool' },
|
||||
],
|
||||
default: 'insert',
|
||||
},
|
||||
// Many more properties would be here in reality
|
||||
],
|
||||
}),
|
||||
};
|
||||
|
||||
// Helper to create connections
|
||||
export const createConnection = (
|
||||
_fromId: string,
|
||||
toId: string,
|
||||
type: NodeConnectionType = 'main',
|
||||
index: number = 0,
|
||||
) => ({
|
||||
node: toId,
|
||||
type,
|
||||
index,
|
||||
});
|
||||
|
||||
// Generic chain interface
|
||||
interface Chain<TInput = Record<string, unknown>, TOutput = Record<string, unknown>> {
|
||||
invoke: (input: TInput) => Promise<TOutput>;
|
||||
}
|
||||
|
||||
// Generic mock chain factory with proper typing
|
||||
export const mockChain = <
|
||||
TInput = Record<string, unknown>,
|
||||
TOutput = Record<string, unknown>,
|
||||
>(): MockProxy<Chain<TInput, TOutput>> => {
|
||||
return mock<Chain<TInput, TOutput>>();
|
||||
};
|
||||
|
||||
// Convenience factory for parameter updater chain
|
||||
export const mockParameterUpdaterChain = () => {
|
||||
return mockChain<Record<string, unknown>, { parameters: Record<string, unknown> }>();
|
||||
};
|
||||
|
||||
// Helper to assert node parameters
|
||||
export const expectNodeToHaveParameters = (
|
||||
node: INode,
|
||||
expectedParams: Partial<INodeParameters>,
|
||||
): void => {
|
||||
expect(node.parameters).toMatchObject(expectedParams);
|
||||
};
|
||||
|
||||
// Helper to assert connections exist
|
||||
export const expectConnectionToExist = (
|
||||
connections: SimpleWorkflow['connections'],
|
||||
fromId: string,
|
||||
toId: string,
|
||||
type: string = 'main',
|
||||
): void => {
|
||||
expect(connections[fromId]).toBeDefined();
|
||||
expect(connections[fromId][type]).toBeDefined();
|
||||
expect(connections[fromId][type]).toContainEqual(
|
||||
expect.arrayContaining([expect.objectContaining({ node: toId })]),
|
||||
);
|
||||
};
|
||||
|
||||
// ========== LangGraph Testing Utilities ==========
|
||||
|
||||
// Types for mocked Command results
|
||||
export type MockedCommandResult = { content: string };
|
||||
|
||||
// Common parsed content structure for tool results
|
||||
export interface ParsedToolContent {
|
||||
update: {
|
||||
messages: Array<{ kwargs: { content: string } }>;
|
||||
workflowOperations?: Array<{
|
||||
type: string;
|
||||
nodes?: INode[];
|
||||
[key: string]: unknown;
|
||||
}>;
|
||||
};
|
||||
}
|
||||
|
||||
// Setup LangGraph mocks
|
||||
export const setupLangGraphMocks = () => {
|
||||
const mockGetCurrentTaskInput = getCurrentTaskInput as jest.MockedFunction<
|
||||
typeof getCurrentTaskInput
|
||||
>;
|
||||
|
||||
jest.mock('@langchain/langgraph', () => ({
|
||||
getCurrentTaskInput: jest.fn(),
|
||||
Command: jest.fn().mockImplementation((params: Record<string, unknown>) => ({
|
||||
content: JSON.stringify(params),
|
||||
})),
|
||||
}));
|
||||
|
||||
return { mockGetCurrentTaskInput };
|
||||
};
|
||||
|
||||
// Parse tool result with double-wrapped content handling
|
||||
export const parseToolResult = <T = ParsedToolContent>(result: unknown): T => {
|
||||
const parsed = jsonParse<{ content?: string }>((result as MockedCommandResult).content);
|
||||
return parsed.content ? jsonParse<T>(parsed.content) : (parsed as T);
|
||||
};
|
||||
|
||||
// ========== Progress Message Utilities ==========
|
||||
|
||||
// Extract progress messages from mockWriter
|
||||
export const extractProgressMessages = (
|
||||
mockWriter: jest.Mock,
|
||||
): Array<ToolProgressMessage<string>> => {
|
||||
const progressCalls: Array<ToolProgressMessage<string>> = [];
|
||||
|
||||
mockWriter.mock.calls.forEach((call) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
const [arg] = call;
|
||||
progressCalls.push(arg as ToolProgressMessage<string>);
|
||||
});
|
||||
return progressCalls;
|
||||
};
|
||||
|
||||
// Find specific progress message by type
|
||||
export const findProgressMessage = (
|
||||
messages: Array<ToolProgressMessage<string>>,
|
||||
status: 'running' | 'completed' | 'error',
|
||||
updateType?: string,
|
||||
): ToolProgressMessage<string> | undefined => {
|
||||
return messages.find(
|
||||
(msg) => msg.status === status && (!updateType || msg.updates[0]?.type === updateType),
|
||||
);
|
||||
};
|
||||
|
||||
// ========== Tool Config Helpers ==========
|
||||
|
||||
// Create basic tool config
|
||||
export const createToolConfig = (
|
||||
toolName: string,
|
||||
callId: string = 'test-call',
|
||||
): ToolRunnableConfig => ({
|
||||
toolCall: { id: callId, name: toolName, args: {} },
|
||||
});
|
||||
|
||||
// Create tool config with writer for progress tracking
|
||||
export const createToolConfigWithWriter = (
|
||||
toolName: string,
|
||||
callId: string = 'test-call',
|
||||
): ToolRunnableConfig & LangGraphRunnableConfig & { writer: jest.Mock } => {
|
||||
const mockWriter = jest.fn();
|
||||
return {
|
||||
toolCall: { id: callId, name: toolName, args: {} },
|
||||
writer: mockWriter,
|
||||
};
|
||||
};
|
||||
|
||||
// ========== Workflow State Helpers ==========
|
||||
|
||||
// Setup workflow state with mockGetCurrentTaskInput
|
||||
export const setupWorkflowState = (
|
||||
mockGetCurrentTaskInput: jest.MockedFunction<typeof getCurrentTaskInput>,
|
||||
workflow: SimpleWorkflow = createWorkflow([]),
|
||||
) => {
|
||||
mockGetCurrentTaskInput.mockReturnValue({
|
||||
workflowJSON: workflow,
|
||||
});
|
||||
};
|
||||
|
||||
// ========== Common Tool Assertions ==========
|
||||
|
||||
// Expect tool success message
|
||||
export const expectToolSuccess = (
|
||||
content: ParsedToolContent,
|
||||
expectedMessage: string | RegExp,
|
||||
): void => {
|
||||
const message = content.update.messages[0]?.kwargs.content;
|
||||
expect(message).toBeDefined();
|
||||
if (typeof expectedMessage === 'string') {
|
||||
expect(message).toContain(expectedMessage);
|
||||
} else {
|
||||
expect(message).toMatch(expectedMessage);
|
||||
}
|
||||
};
|
||||
|
||||
// Expect tool error message
|
||||
export const expectToolError = (
|
||||
content: ParsedToolContent,
|
||||
expectedError: string | RegExp,
|
||||
): void => {
|
||||
const message = content.update.messages[0]?.kwargs.content;
|
||||
if (typeof expectedError === 'string') {
|
||||
expect(message).toBe(expectedError);
|
||||
} else {
|
||||
expect(message).toMatch(expectedError);
|
||||
}
|
||||
};
|
||||
|
||||
// Expect workflow operation of specific type
|
||||
export const expectWorkflowOperation = (
|
||||
content: ParsedToolContent,
|
||||
operationType: string,
|
||||
matcher?: Record<string, unknown>,
|
||||
): void => {
|
||||
const operation = content.update.workflowOperations?.[0];
|
||||
expect(operation).toBeDefined();
|
||||
expect(operation?.type).toBe(operationType);
|
||||
if (matcher) {
|
||||
expect(operation).toMatchObject(matcher);
|
||||
}
|
||||
};
|
||||
|
||||
// Expect node was added
|
||||
export const expectNodeAdded = (content: ParsedToolContent, expectedNode: Partial<INode>): void => {
|
||||
expectWorkflowOperation(content, 'addNodes');
|
||||
const addedNode = content.update.workflowOperations?.[0]?.nodes?.[0];
|
||||
expect(addedNode).toBeDefined();
|
||||
expect(addedNode).toMatchObject(expectedNode);
|
||||
};
|
||||
|
||||
// Expect node was removed
|
||||
export const expectNodeRemoved = (content: ParsedToolContent, nodeId: string): void => {
|
||||
expectWorkflowOperation(content, 'removeNode', { nodeIds: [nodeId] });
|
||||
};
|
||||
|
||||
// Expect connections were added
|
||||
export const expectConnectionsAdded = (
|
||||
content: ParsedToolContent,
|
||||
expectedCount?: number,
|
||||
): void => {
|
||||
expectWorkflowOperation(content, 'addConnections');
|
||||
if (expectedCount !== undefined) {
|
||||
const connections = content.update.workflowOperations?.[0]?.connections;
|
||||
expect(connections).toHaveLength(expectedCount);
|
||||
}
|
||||
};
|
||||
|
||||
// Expect node was updated
|
||||
export const expectNodeUpdated = (
|
||||
content: ParsedToolContent,
|
||||
nodeId: string,
|
||||
expectedUpdates?: Record<string, unknown>,
|
||||
): void => {
|
||||
expectWorkflowOperation(content, 'updateNode', {
|
||||
nodeId,
|
||||
...(expectedUpdates ? { updates: expect.objectContaining(expectedUpdates) } : {}),
|
||||
});
|
||||
};
|
||||
|
||||
// ========== Test Data Builders ==========
|
||||
|
||||
// Build add node input
|
||||
export const buildAddNodeInput = (overrides: {
|
||||
nodeType: string;
|
||||
name?: string;
|
||||
connectionParametersReasoning?: string;
|
||||
connectionParameters?: Record<string, unknown>;
|
||||
}) => ({
|
||||
nodeType: overrides.nodeType,
|
||||
name: overrides.name ?? 'Test Node',
|
||||
connectionParametersReasoning:
|
||||
overrides.connectionParametersReasoning ??
|
||||
'Standard node with static inputs/outputs, no connection parameters needed',
|
||||
connectionParameters: overrides.connectionParameters ?? {},
|
||||
});
|
||||
|
||||
// Build connect nodes input
|
||||
export const buildConnectNodesInput = (overrides: {
|
||||
sourceNodeId: string;
|
||||
targetNodeId: string;
|
||||
sourceOutputIndex?: number;
|
||||
targetInputIndex?: number;
|
||||
}) => ({
|
||||
sourceNodeId: overrides.sourceNodeId,
|
||||
targetNodeId: overrides.targetNodeId,
|
||||
sourceOutputIndex: overrides.sourceOutputIndex ?? 0,
|
||||
targetInputIndex: overrides.targetInputIndex ?? 0,
|
||||
});
|
||||
|
||||
// Build node search query
|
||||
export const buildNodeSearchQuery = (
|
||||
queryType: 'name' | 'subNodeSearch',
|
||||
query?: string,
|
||||
connectionType?: NodeConnectionType,
|
||||
) => ({
|
||||
queryType,
|
||||
...(query && { query }),
|
||||
...(connectionType && { connectionType }),
|
||||
});
|
||||
|
||||
// Build update node parameters input
|
||||
export const buildUpdateNodeInput = (nodeId: string, changes: string[]) => ({
|
||||
nodeId,
|
||||
changes,
|
||||
});
|
||||
|
||||
// Build node details input
|
||||
export const buildNodeDetailsInput = (overrides: {
|
||||
nodeName: string;
|
||||
withParameters?: boolean;
|
||||
withConnections?: boolean;
|
||||
}) => ({
|
||||
nodeName: overrides.nodeName,
|
||||
withParameters: overrides.withParameters ?? false,
|
||||
withConnections: overrides.withConnections ?? true,
|
||||
});
|
||||
|
||||
// Expect node details in response
|
||||
export const expectNodeDetails = (
|
||||
content: ParsedToolContent,
|
||||
expectedDetails: Partial<{
|
||||
name: string;
|
||||
displayName: string;
|
||||
description: string;
|
||||
subtitle?: string;
|
||||
}>,
|
||||
): void => {
|
||||
const message = content.update.messages[0]?.kwargs.content;
|
||||
expect(message).toBeDefined();
|
||||
|
||||
// Check for expected XML-like tags in formatted output
|
||||
if (expectedDetails.name) {
|
||||
expect(message).toContain(`<name>${expectedDetails.name}</name>`);
|
||||
}
|
||||
if (expectedDetails.displayName) {
|
||||
expect(message).toContain(`<display_name>${expectedDetails.displayName}</display_name>`);
|
||||
}
|
||||
if (expectedDetails.description) {
|
||||
expect(message).toContain(`<description>${expectedDetails.description}</description>`);
|
||||
}
|
||||
if (expectedDetails.subtitle) {
|
||||
expect(message).toContain(`<subtitle>${expectedDetails.subtitle}</subtitle>`);
|
||||
}
|
||||
};
|
||||
|
||||
// Helper to validate XML-like structure in output
|
||||
export const expectXMLTag = (
|
||||
content: string,
|
||||
tagName: string,
|
||||
expectedValue?: string | RegExp,
|
||||
): void => {
|
||||
const tagRegex = new RegExp(`<${tagName}>([\\s\\S]*?)</${tagName}>`);
|
||||
const match = content.match(tagRegex);
|
||||
expect(match).toBeDefined();
|
||||
if (expectedValue) {
|
||||
if (typeof expectedValue === 'string') {
|
||||
expect(match?.[1]?.trim()).toBe(expectedValue);
|
||||
} else {
|
||||
expect(match?.[1]).toMatch(expectedValue);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Common reasoning strings
|
||||
export const REASONING = {
|
||||
STATIC_NODE: 'Node has static inputs/outputs, no connection parameters needed',
|
||||
DYNAMIC_AI_NODE: 'AI node has dynamic inputs, setting connection parameters',
|
||||
TRIGGER_NODE: 'Trigger node, no connection parameters needed',
|
||||
WEBHOOK_NODE: 'Webhook is a trigger node, no connection parameters needed',
|
||||
} as const;
|
||||
17
n8n-n8n-1.109.2/packages/@n8n/api-types/src/api-keys 2.ts
Executable file
17
n8n-n8n-1.109.2/packages/@n8n/api-types/src/api-keys 2.ts
Executable file
@@ -0,0 +1,17 @@
|
||||
import type { ApiKeyScope } from '@n8n/permissions';
|
||||
|
||||
/** Unix timestamp. Seconds since epoch */
|
||||
export type UnixTimestamp = number | null;
|
||||
|
||||
export type ApiKey = {
|
||||
id: string;
|
||||
label: string;
|
||||
apiKey: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
/** Null if API key never expires */
|
||||
expiresAt: UnixTimestamp | null;
|
||||
scopes: ApiKeyScope[];
|
||||
};
|
||||
|
||||
export type ApiKeyWithRawValue = ApiKey & { rawApiKey: string };
|
||||
21
n8n-n8n-1.109.2/packages/@n8n/api-types/src/community-node-types 2.ts
Executable file
21
n8n-n8n-1.109.2/packages/@n8n/api-types/src/community-node-types 2.ts
Executable file
@@ -0,0 +1,21 @@
|
||||
import type { INodeTypeDescription } from 'n8n-workflow';
|
||||
|
||||
export type CommunityNodeType = {
|
||||
authorGithubUrl: string;
|
||||
authorName: string;
|
||||
checksum: string;
|
||||
description: string;
|
||||
displayName: string;
|
||||
name: string;
|
||||
numberOfStars: number;
|
||||
numberOfDownloads: number;
|
||||
packageName: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
npmVersion: string;
|
||||
isOfficialNode: boolean;
|
||||
companyName?: string;
|
||||
nodeDescription: INodeTypeDescription;
|
||||
isInstalled: boolean;
|
||||
nodeVersions?: Array<{ npmVersion: string; checksum: string }>;
|
||||
};
|
||||
2
n8n-n8n-1.109.2/packages/@n8n/api-types/src/datetime 2.ts
Executable file
2
n8n-n8n-1.109.2/packages/@n8n/api-types/src/datetime 2.ts
Executable file
@@ -0,0 +1,2 @@
|
||||
/** Date time in the ISO 8601 format, e.g. 2024-10-31T00:00:00.123Z */
|
||||
export type Iso8601DateTimeString = string;
|
||||
227
n8n-n8n-1.109.2/packages/@n8n/api-types/src/frontend-settings 2.ts
Executable file
227
n8n-n8n-1.109.2/packages/@n8n/api-types/src/frontend-settings 2.ts
Executable file
@@ -0,0 +1,227 @@
|
||||
import type { LogLevel, WorkflowSettings } from 'n8n-workflow';
|
||||
|
||||
import { type InsightsDateRange } from './schemas/insights.schema';
|
||||
|
||||
export interface IVersionNotificationSettings {
|
||||
enabled: boolean;
|
||||
endpoint: string;
|
||||
whatsNewEnabled: boolean;
|
||||
whatsNewEndpoint: string;
|
||||
infoUrl: string;
|
||||
}
|
||||
|
||||
export interface ITelemetryClientConfig {
|
||||
url: string;
|
||||
key: string;
|
||||
proxy: string;
|
||||
sourceConfig: string;
|
||||
}
|
||||
|
||||
export interface ITelemetrySettings {
|
||||
enabled: boolean;
|
||||
config?: ITelemetryClientConfig;
|
||||
}
|
||||
|
||||
export type AuthenticationMethod = 'email' | 'ldap' | 'saml' | 'oidc';
|
||||
|
||||
export interface IUserManagementSettings {
|
||||
quota: number;
|
||||
showSetupOnFirstLoad?: boolean;
|
||||
smtpSetup: boolean;
|
||||
authenticationMethod: AuthenticationMethod;
|
||||
}
|
||||
|
||||
export interface FrontendSettings {
|
||||
inE2ETests: boolean;
|
||||
isDocker: boolean;
|
||||
databaseType: 'sqlite' | 'mariadb' | 'mysqldb' | 'postgresdb';
|
||||
endpointForm: string;
|
||||
endpointFormTest: string;
|
||||
endpointFormWaiting: string;
|
||||
endpointMcp: string;
|
||||
endpointMcpTest: string;
|
||||
endpointWebhook: string;
|
||||
endpointWebhookTest: string;
|
||||
endpointWebhookWaiting: string;
|
||||
saveDataErrorExecution: WorkflowSettings.SaveDataExecution;
|
||||
saveDataSuccessExecution: WorkflowSettings.SaveDataExecution;
|
||||
saveManualExecutions: boolean;
|
||||
saveExecutionProgress: boolean;
|
||||
executionTimeout: number;
|
||||
maxExecutionTimeout: number;
|
||||
workflowCallerPolicyDefaultOption: WorkflowSettings.CallerPolicy;
|
||||
oauthCallbackUrls: {
|
||||
oauth1: string;
|
||||
oauth2: string;
|
||||
};
|
||||
timezone: string;
|
||||
urlBaseWebhook: string;
|
||||
urlBaseEditor: string;
|
||||
versionCli: string;
|
||||
nodeJsVersion: string;
|
||||
concurrency: number;
|
||||
authCookie: {
|
||||
secure: boolean;
|
||||
};
|
||||
binaryDataMode: 'default' | 'filesystem' | 's3';
|
||||
releaseChannel: 'stable' | 'beta' | 'nightly' | 'dev';
|
||||
n8nMetadata?: {
|
||||
userId?: string;
|
||||
[key: string]: string | number | undefined;
|
||||
};
|
||||
versionNotifications: IVersionNotificationSettings;
|
||||
instanceId: string;
|
||||
telemetry: ITelemetrySettings;
|
||||
posthog: {
|
||||
enabled: boolean;
|
||||
apiHost: string;
|
||||
apiKey: string;
|
||||
autocapture: boolean;
|
||||
disableSessionRecording: boolean;
|
||||
debug: boolean;
|
||||
};
|
||||
personalizationSurveyEnabled: boolean;
|
||||
defaultLocale: string;
|
||||
userManagement: IUserManagementSettings;
|
||||
sso: {
|
||||
saml: {
|
||||
loginLabel: string;
|
||||
loginEnabled: boolean;
|
||||
};
|
||||
oidc: {
|
||||
loginEnabled: boolean;
|
||||
loginUrl: string;
|
||||
callbackUrl: string;
|
||||
};
|
||||
ldap: {
|
||||
loginLabel: string;
|
||||
loginEnabled: boolean;
|
||||
};
|
||||
};
|
||||
publicApi: {
|
||||
enabled: boolean;
|
||||
latestVersion: number;
|
||||
path: string;
|
||||
swaggerUi: {
|
||||
enabled: boolean;
|
||||
};
|
||||
};
|
||||
workflowTagsDisabled: boolean;
|
||||
logLevel: LogLevel;
|
||||
hiringBannerEnabled: boolean;
|
||||
previewMode: boolean;
|
||||
templates: {
|
||||
enabled: boolean;
|
||||
host: string;
|
||||
};
|
||||
missingPackages?: boolean;
|
||||
executionMode: 'regular' | 'queue';
|
||||
/** Whether multi-main mode is enabled and licensed for this main instance. */
|
||||
isMultiMain: boolean;
|
||||
pushBackend: 'sse' | 'websocket';
|
||||
communityNodesEnabled: boolean;
|
||||
unverifiedCommunityNodesEnabled: boolean;
|
||||
aiAssistant: {
|
||||
enabled: boolean;
|
||||
};
|
||||
askAi: {
|
||||
enabled: boolean;
|
||||
};
|
||||
deployment: {
|
||||
type: string;
|
||||
};
|
||||
allowedModules: {
|
||||
builtIn?: string[];
|
||||
external?: string[];
|
||||
};
|
||||
enterprise: {
|
||||
sharing: boolean;
|
||||
ldap: boolean;
|
||||
saml: boolean;
|
||||
oidc: boolean;
|
||||
mfaEnforcement: boolean;
|
||||
logStreaming: boolean;
|
||||
advancedExecutionFilters: boolean;
|
||||
variables: boolean;
|
||||
sourceControl: boolean;
|
||||
auditLogs: boolean;
|
||||
externalSecrets: boolean;
|
||||
showNonProdBanner: boolean;
|
||||
debugInEditor: boolean;
|
||||
binaryDataS3: boolean;
|
||||
workflowHistory: boolean;
|
||||
workerView: boolean;
|
||||
advancedPermissions: boolean;
|
||||
apiKeyScopes: boolean;
|
||||
workflowDiffs: boolean;
|
||||
projects: {
|
||||
team: {
|
||||
limit: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
hideUsagePage: boolean;
|
||||
license: {
|
||||
planName?: string;
|
||||
consumerId: string;
|
||||
environment: 'development' | 'production' | 'staging';
|
||||
};
|
||||
variables: {
|
||||
limit: number;
|
||||
};
|
||||
mfa: {
|
||||
enabled: boolean;
|
||||
enforced: boolean;
|
||||
};
|
||||
folders: {
|
||||
enabled: boolean;
|
||||
};
|
||||
banners: {
|
||||
dismissed: string[];
|
||||
};
|
||||
workflowHistory: {
|
||||
pruneTime: number;
|
||||
licensePruneTime: number;
|
||||
};
|
||||
aiCredits: {
|
||||
enabled: boolean;
|
||||
credits: number;
|
||||
};
|
||||
pruning?: {
|
||||
isEnabled: boolean;
|
||||
maxAge: number;
|
||||
maxCount: number;
|
||||
};
|
||||
security: {
|
||||
blockFileAccessToN8nFiles: boolean;
|
||||
};
|
||||
easyAIWorkflowOnboarded: boolean;
|
||||
partialExecution: {
|
||||
version: 1 | 2;
|
||||
};
|
||||
evaluation: {
|
||||
quota: number;
|
||||
};
|
||||
|
||||
/** Backend modules that were initialized during startup. */
|
||||
activeModules: string[];
|
||||
envFeatureFlags: N8nEnvFeatFlags;
|
||||
}
|
||||
|
||||
export type FrontendModuleSettings = {
|
||||
/**
|
||||
* Client settings for [insights](https://docs.n8n.io/insights/) module.
|
||||
*
|
||||
* - `summary`: Whether the summary banner should be shown.
|
||||
* - `dashboard`: Whether the full dashboard should be shown.
|
||||
* - `dateRanges`: Date range filters available to select.
|
||||
*/
|
||||
insights?: {
|
||||
summary: boolean;
|
||||
dashboard: boolean;
|
||||
dateRanges: InsightsDateRange[];
|
||||
};
|
||||
};
|
||||
|
||||
export type N8nEnvFeatFlagValue = boolean | string | number | undefined;
|
||||
export type N8nEnvFeatFlags = Record<`N8N_ENV_FEAT_${Uppercase<string>}`, N8nEnvFeatFlagValue>;
|
||||
57
n8n-n8n-1.109.2/packages/@n8n/api-types/src/index 2.ts
Executable file
57
n8n-n8n-1.109.2/packages/@n8n/api-types/src/index 2.ts
Executable file
@@ -0,0 +1,57 @@
|
||||
export type * from './datetime';
|
||||
export * from './dto';
|
||||
export type * from './push';
|
||||
export type * from './scaling';
|
||||
export type * from './frontend-settings';
|
||||
export type * from './user';
|
||||
export type * from './api-keys';
|
||||
export type * from './community-node-types';
|
||||
|
||||
export type { Collaborator } from './push/collaboration';
|
||||
export type { HeartbeatMessage } from './push/heartbeat';
|
||||
export { createHeartbeatMessage, heartbeatMessageSchema } from './push/heartbeat';
|
||||
export type { SendWorkerStatusMessage } from './push/worker';
|
||||
|
||||
export type { BannerName } from './schemas/banner-name.schema';
|
||||
export { ViewableMimeTypes } from './schemas/binary-data.schema';
|
||||
export { passwordSchema } from './schemas/password.schema';
|
||||
|
||||
export type {
|
||||
ProjectType,
|
||||
ProjectIcon,
|
||||
ProjectRelation,
|
||||
} from './schemas/project.schema';
|
||||
|
||||
export {
|
||||
type SourceControlledFile,
|
||||
SOURCE_CONTROL_FILE_LOCATION,
|
||||
SOURCE_CONTROL_FILE_STATUS,
|
||||
SOURCE_CONTROL_FILE_TYPE,
|
||||
} from './schemas/source-controlled-file.schema';
|
||||
|
||||
export {
|
||||
type InsightsSummaryType,
|
||||
type InsightsSummaryUnit,
|
||||
type InsightsSummary,
|
||||
type InsightsByWorkflow,
|
||||
type InsightsByTime,
|
||||
type InsightsDateRange,
|
||||
} from './schemas/insights.schema';
|
||||
|
||||
export {
|
||||
ROLE,
|
||||
type Role,
|
||||
type User,
|
||||
type UsersList,
|
||||
usersListSchema,
|
||||
} from './schemas/user.schema';
|
||||
|
||||
export {
|
||||
DATA_STORE_COLUMN_REGEX,
|
||||
type DataStore,
|
||||
type DataStoreColumn,
|
||||
type DataStoreCreateColumnSchema,
|
||||
type DataStoreListFilter,
|
||||
type DataStoreListOptions,
|
||||
dateTimeSchema,
|
||||
} from './schemas/data-store.schema';
|
||||
30
n8n-n8n-1.109.2/packages/@n8n/api-types/src/scaling 2.ts
Executable file
30
n8n-n8n-1.109.2/packages/@n8n/api-types/src/scaling 2.ts
Executable file
@@ -0,0 +1,30 @@
|
||||
import type { ExecutionStatus, WorkflowExecuteMode } from 'n8n-workflow';
|
||||
|
||||
export type RunningJobSummary = {
|
||||
executionId: string;
|
||||
workflowId: string;
|
||||
workflowName: string;
|
||||
mode: WorkflowExecuteMode;
|
||||
startedAt: Date;
|
||||
retryOf?: string;
|
||||
status: ExecutionStatus;
|
||||
};
|
||||
|
||||
export type WorkerStatus = {
|
||||
senderId: string;
|
||||
runningJobsSummary: RunningJobSummary[];
|
||||
freeMem: number;
|
||||
totalMem: number;
|
||||
uptime: number;
|
||||
loadAvg: number[];
|
||||
cpus: string;
|
||||
arch: string;
|
||||
platform: NodeJS.Platform;
|
||||
hostname: string;
|
||||
interfaces: Array<{
|
||||
family: 'IPv4' | 'IPv6';
|
||||
address: string;
|
||||
internal: boolean;
|
||||
}>;
|
||||
version: string;
|
||||
};
|
||||
6
n8n-n8n-1.109.2/packages/@n8n/api-types/src/user 2.ts
Executable file
6
n8n-n8n-1.109.2/packages/@n8n/api-types/src/user 2.ts
Executable file
@@ -0,0 +1,6 @@
|
||||
export type MinimalUser = {
|
||||
id: string;
|
||||
email: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
};
|
||||
63
n8n-n8n-1.109.2/packages/@n8n/backend-common/src/cli-parser 2.ts
Executable file
63
n8n-n8n-1.109.2/packages/@n8n/backend-common/src/cli-parser 2.ts
Executable file
@@ -0,0 +1,63 @@
|
||||
import { Service } from '@n8n/di';
|
||||
import argvParser from 'yargs-parser';
|
||||
import type { z } from 'zod';
|
||||
|
||||
import { Logger } from './logging';
|
||||
|
||||
type CliInput<Flags extends z.ZodRawShape> = {
|
||||
argv: string[];
|
||||
flagsSchema?: z.ZodObject<Flags>;
|
||||
description?: string;
|
||||
examples?: string[];
|
||||
};
|
||||
|
||||
type ParsedArgs<Flags = Record<string, unknown>> = {
|
||||
flags: Flags;
|
||||
args: string[];
|
||||
};
|
||||
|
||||
@Service()
|
||||
export class CliParser {
|
||||
constructor(private readonly logger: Logger) {}
|
||||
|
||||
parse<Flags extends z.ZodRawShape>(
|
||||
input: CliInput<Flags>,
|
||||
): ParsedArgs<z.infer<z.ZodObject<Flags>>> {
|
||||
// eslint-disable-next-line id-denylist
|
||||
const { _: rest, ...rawFlags } = argvParser(input.argv, { string: ['id'] });
|
||||
|
||||
let flags = {} as z.infer<z.ZodObject<Flags>>;
|
||||
if (input.flagsSchema) {
|
||||
for (const key in input.flagsSchema.shape) {
|
||||
const flagSchema = input.flagsSchema.shape[key];
|
||||
let schemaDef = flagSchema._def as z.ZodTypeDef & {
|
||||
typeName: string;
|
||||
innerType?: z.ZodType;
|
||||
_alias?: string;
|
||||
};
|
||||
|
||||
if (schemaDef.typeName === 'ZodOptional' && schemaDef.innerType) {
|
||||
schemaDef = schemaDef.innerType._def as typeof schemaDef;
|
||||
}
|
||||
|
||||
const alias = schemaDef._alias;
|
||||
if (alias?.length && !(key in rawFlags) && rawFlags[alias]) {
|
||||
rawFlags[key] = rawFlags[alias] as unknown;
|
||||
}
|
||||
}
|
||||
|
||||
flags = input.flagsSchema.parse(rawFlags);
|
||||
}
|
||||
|
||||
const args = rest.map(String).slice(2);
|
||||
|
||||
this.logger.debug('Received CLI command', {
|
||||
execPath: rest[0],
|
||||
scriptPath: rest[1],
|
||||
args,
|
||||
flags,
|
||||
});
|
||||
|
||||
return { flags, args };
|
||||
}
|
||||
}
|
||||
5
n8n-n8n-1.109.2/packages/@n8n/backend-common/src/environment 2.ts
Executable file
5
n8n-n8n-1.109.2/packages/@n8n/backend-common/src/environment 2.ts
Executable file
@@ -0,0 +1,5 @@
|
||||
const { NODE_ENV } = process.env;
|
||||
|
||||
export const inTest = NODE_ENV === 'test';
|
||||
export const inProduction = NODE_ENV === 'production';
|
||||
export const inDevelopment = !NODE_ENV || NODE_ENV === 'development';
|
||||
10
n8n-n8n-1.109.2/packages/@n8n/backend-common/src/index 2.ts
Executable file
10
n8n-n8n-1.109.2/packages/@n8n/backend-common/src/index 2.ts
Executable file
@@ -0,0 +1,10 @@
|
||||
export * from './license-state';
|
||||
export * from './types';
|
||||
|
||||
export { inDevelopment, inProduction, inTest } from './environment';
|
||||
export { isObjectLiteral } from './utils/is-object-literal';
|
||||
export { Logger } from './logging/logger';
|
||||
export { ModuleRegistry } from './modules/module-registry';
|
||||
export { ModulesConfig, ModuleName } from './modules/modules.config';
|
||||
export { isContainedWithin, safeJoinPath } from './utils/path-util';
|
||||
export { CliParser } from './cli-parser';
|
||||
209
n8n-n8n-1.109.2/packages/@n8n/backend-common/src/license-state 2.ts
Executable file
209
n8n-n8n-1.109.2/packages/@n8n/backend-common/src/license-state 2.ts
Executable file
@@ -0,0 +1,209 @@
|
||||
import type { BooleanLicenseFeature } from '@n8n/constants';
|
||||
import { UNLIMITED_LICENSE_QUOTA } from '@n8n/constants';
|
||||
import { Service } from '@n8n/di';
|
||||
import { UnexpectedError } from 'n8n-workflow';
|
||||
|
||||
import type { FeatureReturnType, LicenseProvider } from './types';
|
||||
|
||||
class ProviderNotSetError extends UnexpectedError {
|
||||
constructor() {
|
||||
super('Cannot query license state because license provider has not been set');
|
||||
}
|
||||
}
|
||||
|
||||
@Service()
|
||||
export class LicenseState {
|
||||
licenseProvider: LicenseProvider | null = null;
|
||||
|
||||
setLicenseProvider(provider: LicenseProvider) {
|
||||
this.licenseProvider = provider;
|
||||
}
|
||||
|
||||
private assertProvider(): asserts this is { licenseProvider: LicenseProvider } {
|
||||
if (!this.licenseProvider) throw new ProviderNotSetError();
|
||||
}
|
||||
|
||||
// --------------------
|
||||
// core queries
|
||||
// --------------------
|
||||
|
||||
isLicensed(feature: BooleanLicenseFeature) {
|
||||
this.assertProvider();
|
||||
|
||||
return this.licenseProvider.isLicensed(feature);
|
||||
}
|
||||
|
||||
getValue<T extends keyof FeatureReturnType>(feature: T): FeatureReturnType[T] {
|
||||
this.assertProvider();
|
||||
|
||||
return this.licenseProvider.getValue(feature);
|
||||
}
|
||||
|
||||
// --------------------
|
||||
// booleans
|
||||
// --------------------
|
||||
|
||||
isSharingLicensed() {
|
||||
return this.isLicensed('feat:sharing');
|
||||
}
|
||||
|
||||
isLogStreamingLicensed() {
|
||||
return this.isLicensed('feat:logStreaming');
|
||||
}
|
||||
|
||||
isLdapLicensed() {
|
||||
return this.isLicensed('feat:ldap');
|
||||
}
|
||||
|
||||
isSamlLicensed() {
|
||||
return this.isLicensed('feat:saml');
|
||||
}
|
||||
|
||||
isOidcLicensed() {
|
||||
return this.isLicensed('feat:oidc');
|
||||
}
|
||||
|
||||
isMFAEnforcementLicensed() {
|
||||
return this.isLicensed('feat:mfaEnforcement');
|
||||
}
|
||||
|
||||
isApiKeyScopesLicensed() {
|
||||
return this.isLicensed('feat:apiKeyScopes');
|
||||
}
|
||||
|
||||
isAiAssistantLicensed() {
|
||||
return this.isLicensed('feat:aiAssistant');
|
||||
}
|
||||
|
||||
isAskAiLicensed() {
|
||||
return this.isLicensed('feat:askAi');
|
||||
}
|
||||
|
||||
isAiCreditsLicensed() {
|
||||
return this.isLicensed('feat:aiCredits');
|
||||
}
|
||||
|
||||
isAdvancedExecutionFiltersLicensed() {
|
||||
return this.isLicensed('feat:advancedExecutionFilters');
|
||||
}
|
||||
|
||||
isAdvancedPermissionsLicensed() {
|
||||
return this.isLicensed('feat:advancedPermissions');
|
||||
}
|
||||
|
||||
isDebugInEditorLicensed() {
|
||||
return this.isLicensed('feat:debugInEditor');
|
||||
}
|
||||
|
||||
isBinaryDataS3Licensed() {
|
||||
return this.isLicensed('feat:binaryDataS3');
|
||||
}
|
||||
|
||||
isMultiMainLicensed() {
|
||||
return this.isLicensed('feat:multipleMainInstances');
|
||||
}
|
||||
|
||||
isVariablesLicensed() {
|
||||
return this.isLicensed('feat:variables');
|
||||
}
|
||||
|
||||
isSourceControlLicensed() {
|
||||
return this.isLicensed('feat:sourceControl');
|
||||
}
|
||||
|
||||
isExternalSecretsLicensed() {
|
||||
return this.isLicensed('feat:externalSecrets');
|
||||
}
|
||||
|
||||
isWorkflowHistoryLicensed() {
|
||||
return this.isLicensed('feat:workflowHistory');
|
||||
}
|
||||
|
||||
isAPIDisabled() {
|
||||
return this.isLicensed('feat:apiDisabled');
|
||||
}
|
||||
|
||||
isWorkerViewLicensed() {
|
||||
return this.isLicensed('feat:workerView');
|
||||
}
|
||||
|
||||
isProjectRoleAdminLicensed() {
|
||||
return this.isLicensed('feat:projectRole:admin');
|
||||
}
|
||||
|
||||
isProjectRoleEditorLicensed() {
|
||||
return this.isLicensed('feat:projectRole:editor');
|
||||
}
|
||||
|
||||
isProjectRoleViewerLicensed() {
|
||||
return this.isLicensed('feat:projectRole:viewer');
|
||||
}
|
||||
|
||||
isCustomNpmRegistryLicensed() {
|
||||
return this.isLicensed('feat:communityNodes:customRegistry');
|
||||
}
|
||||
|
||||
isFoldersLicensed() {
|
||||
return this.isLicensed('feat:folders');
|
||||
}
|
||||
|
||||
isInsightsSummaryLicensed() {
|
||||
return this.isLicensed('feat:insights:viewSummary');
|
||||
}
|
||||
|
||||
isInsightsDashboardLicensed() {
|
||||
return this.isLicensed('feat:insights:viewDashboard');
|
||||
}
|
||||
|
||||
isInsightsHourlyDataLicensed() {
|
||||
return this.isLicensed('feat:insights:viewHourlyData');
|
||||
}
|
||||
|
||||
isWorkflowDiffsLicensed() {
|
||||
return this.isLicensed('feat:workflowDiffs');
|
||||
}
|
||||
|
||||
// --------------------
|
||||
// integers
|
||||
// --------------------
|
||||
|
||||
getMaxUsers() {
|
||||
return this.getValue('quota:users') ?? UNLIMITED_LICENSE_QUOTA;
|
||||
}
|
||||
|
||||
getMaxActiveWorkflows() {
|
||||
return this.getValue('quota:activeWorkflows') ?? UNLIMITED_LICENSE_QUOTA;
|
||||
}
|
||||
|
||||
getMaxVariables() {
|
||||
return this.getValue('quota:maxVariables') ?? UNLIMITED_LICENSE_QUOTA;
|
||||
}
|
||||
|
||||
getMaxAiCredits() {
|
||||
return this.getValue('quota:aiCredits') ?? 0;
|
||||
}
|
||||
|
||||
getWorkflowHistoryPruneQuota() {
|
||||
return this.getValue('quota:workflowHistoryPrune') ?? UNLIMITED_LICENSE_QUOTA;
|
||||
}
|
||||
|
||||
getInsightsMaxHistory() {
|
||||
return this.getValue('quota:insights:maxHistoryDays') ?? 7;
|
||||
}
|
||||
|
||||
getInsightsRetentionMaxAge() {
|
||||
return this.getValue('quota:insights:retention:maxAgeDays') ?? 180;
|
||||
}
|
||||
|
||||
getInsightsRetentionPruneInterval() {
|
||||
return this.getValue('quota:insights:retention:pruneIntervalDays') ?? 24;
|
||||
}
|
||||
|
||||
getMaxTeamProjects() {
|
||||
return this.getValue('quota:maxTeamProjects') ?? 0;
|
||||
}
|
||||
|
||||
getMaxWorkflowsWithEvaluations() {
|
||||
return this.getValue('quota:evaluations:maxWorkflows') ?? 0;
|
||||
}
|
||||
}
|
||||
15
n8n-n8n-1.109.2/packages/@n8n/backend-common/src/types 2.ts
Executable file
15
n8n-n8n-1.109.2/packages/@n8n/backend-common/src/types 2.ts
Executable file
@@ -0,0 +1,15 @@
|
||||
import type { BooleanLicenseFeature, NumericLicenseFeature } from '@n8n/constants';
|
||||
|
||||
export type FeatureReturnType = Partial<
|
||||
{
|
||||
planName: string;
|
||||
} & { [K in NumericLicenseFeature]: number } & { [K in BooleanLicenseFeature]: boolean }
|
||||
>;
|
||||
|
||||
export interface LicenseProvider {
|
||||
/** Returns whether a feature is included in the user's license plan. */
|
||||
isLicensed(feature: BooleanLicenseFeature): boolean;
|
||||
|
||||
/** Returns the value of a feature in the user's license plan, typically a boolean or integer. */
|
||||
getValue<T extends keyof FeatureReturnType>(feature: T): FeatureReturnType[T];
|
||||
}
|
||||
12
n8n-n8n-1.109.2/packages/@n8n/backend-test-utils/src/index 2.ts
Executable file
12
n8n-n8n-1.109.2/packages/@n8n/backend-test-utils/src/index 2.ts
Executable file
@@ -0,0 +1,12 @@
|
||||
import type { Logger } from '@n8n/backend-common';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
|
||||
export const mockLogger = (): Logger =>
|
||||
mock<Logger>({ scoped: jest.fn().mockReturnValue(mock<Logger>()) });
|
||||
|
||||
export * from './random';
|
||||
export * as testDb from './test-db';
|
||||
export * as testModules from './test-modules';
|
||||
export * from './db/workflows';
|
||||
export * from './db/projects';
|
||||
export * from './mocking';
|
||||
12
n8n-n8n-1.109.2/packages/@n8n/backend-test-utils/src/mocking 2.ts
Executable file
12
n8n-n8n-1.109.2/packages/@n8n/backend-test-utils/src/mocking 2.ts
Executable file
@@ -0,0 +1,12 @@
|
||||
import { Container, type Constructable } from '@n8n/di';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { DeepPartial } from 'ts-essentials';
|
||||
|
||||
export const mockInstance = <T>(
|
||||
serviceClass: Constructable<T>,
|
||||
data: DeepPartial<T> | undefined = undefined,
|
||||
) => {
|
||||
const instance = mock<T>(data);
|
||||
Container.set(serviceClass, instance);
|
||||
return instance;
|
||||
};
|
||||
63
n8n-n8n-1.109.2/packages/@n8n/backend-test-utils/src/random 2.ts
Executable file
63
n8n-n8n-1.109.2/packages/@n8n/backend-test-utils/src/random 2.ts
Executable file
@@ -0,0 +1,63 @@
|
||||
import { MIN_PASSWORD_CHAR_LENGTH, MAX_PASSWORD_CHAR_LENGTH } from '@n8n/constants';
|
||||
import { randomInt, randomString, UPPERCASE_LETTERS } from 'n8n-workflow';
|
||||
import type { ICredentialDataDecryptedObject } from 'n8n-workflow';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
export type CredentialPayload = {
|
||||
name: string;
|
||||
type: string;
|
||||
data: ICredentialDataDecryptedObject;
|
||||
isManaged?: boolean;
|
||||
};
|
||||
|
||||
export const randomApiKey = () => `n8n_api_${randomString(40)}`;
|
||||
|
||||
export const chooseRandomly = <T>(array: T[]) => array[randomInt(array.length)];
|
||||
|
||||
const randomUppercaseLetter = () => chooseRandomly(UPPERCASE_LETTERS.split(''));
|
||||
|
||||
export const randomValidPassword = () =>
|
||||
randomString(MIN_PASSWORD_CHAR_LENGTH, MAX_PASSWORD_CHAR_LENGTH - 2) +
|
||||
randomUppercaseLetter() +
|
||||
randomInt(10);
|
||||
|
||||
export const randomInvalidPassword = () =>
|
||||
chooseRandomly([
|
||||
randomString(1, MIN_PASSWORD_CHAR_LENGTH - 1),
|
||||
randomString(MAX_PASSWORD_CHAR_LENGTH + 2, MAX_PASSWORD_CHAR_LENGTH + 100),
|
||||
'abcdefgh', // valid length, no number, no uppercase
|
||||
'abcdefg1', // valid length, has number, no uppercase
|
||||
'abcdefgA', // valid length, no number, has uppercase
|
||||
'abcdefA', // invalid length, no number, has uppercase
|
||||
'abcdef1', // invalid length, has number, no uppercase
|
||||
'abcdeA1', // invalid length, has number, has uppercase
|
||||
'abcdefg', // invalid length, no number, no uppercase
|
||||
]);
|
||||
|
||||
const POPULAR_TOP_LEVEL_DOMAINS = ['com', 'org', 'net', 'io', 'edu'];
|
||||
|
||||
const randomTopLevelDomain = () => chooseRandomly(POPULAR_TOP_LEVEL_DOMAINS);
|
||||
|
||||
export const randomName = () => randomString(4, 8).toLowerCase();
|
||||
|
||||
export const randomEmail = () => `${randomName()}@${randomName()}.${randomTopLevelDomain()}`;
|
||||
|
||||
export const randomCredentialPayload = ({
|
||||
isManaged = false,
|
||||
}: { isManaged?: boolean } = {}): CredentialPayload => ({
|
||||
name: randomName(),
|
||||
type: randomName(),
|
||||
data: { accessToken: randomString(6, 16) },
|
||||
isManaged,
|
||||
});
|
||||
|
||||
export const randomCredentialPayloadWithOauthTokenData = ({
|
||||
isManaged = false,
|
||||
}: { isManaged?: boolean } = {}): CredentialPayload => ({
|
||||
name: randomName(),
|
||||
type: randomName(),
|
||||
data: { accessToken: randomString(6, 16), oauthTokenData: { access_token: randomString(6, 16) } },
|
||||
isManaged,
|
||||
});
|
||||
|
||||
export const uniqueId = () => uuid();
|
||||
86
n8n-n8n-1.109.2/packages/@n8n/backend-test-utils/src/test-db 2.ts
Executable file
86
n8n-n8n-1.109.2/packages/@n8n/backend-test-utils/src/test-db 2.ts
Executable file
@@ -0,0 +1,86 @@
|
||||
import { GlobalConfig } from '@n8n/config';
|
||||
import type { entities } from '@n8n/db';
|
||||
import { DbConnection, DbConnectionOptions } from '@n8n/db';
|
||||
import { Container } from '@n8n/di';
|
||||
import type { DataSourceOptions } from '@n8n/typeorm';
|
||||
import { DataSource as Connection } from '@n8n/typeorm';
|
||||
import { randomString } from 'n8n-workflow';
|
||||
|
||||
export const testDbPrefix = 'n8n_test_';
|
||||
|
||||
/**
|
||||
* Generate options for a bootstrap DB connection, to create and drop test databases.
|
||||
*/
|
||||
export const getBootstrapDBOptions = (dbType: 'postgresdb' | 'mysqldb'): DataSourceOptions => {
|
||||
const globalConfig = Container.get(GlobalConfig);
|
||||
const type = dbType === 'postgresdb' ? 'postgres' : 'mysql';
|
||||
return {
|
||||
type,
|
||||
...Container.get(DbConnectionOptions).getOverrides(dbType),
|
||||
database: type,
|
||||
entityPrefix: globalConfig.database.tablePrefix,
|
||||
schema: dbType === 'postgresdb' ? globalConfig.database.postgresdb.schema : undefined,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Initialize one test DB per suite run, with bootstrap connection if needed.
|
||||
*/
|
||||
export async function init() {
|
||||
const globalConfig = Container.get(GlobalConfig);
|
||||
const dbType = globalConfig.database.type;
|
||||
const testDbName = `${testDbPrefix}${randomString(6, 10).toLowerCase()}_${Date.now()}`;
|
||||
|
||||
if (dbType === 'postgresdb') {
|
||||
const bootstrapPostgres = await new Connection(
|
||||
getBootstrapDBOptions('postgresdb'),
|
||||
).initialize();
|
||||
await bootstrapPostgres.query(`CREATE DATABASE ${testDbName}`);
|
||||
await bootstrapPostgres.destroy();
|
||||
|
||||
globalConfig.database.postgresdb.database = testDbName;
|
||||
} else if (dbType === 'mysqldb' || dbType === 'mariadb') {
|
||||
const bootstrapMysql = await new Connection(getBootstrapDBOptions('mysqldb')).initialize();
|
||||
await bootstrapMysql.query(`CREATE DATABASE ${testDbName} DEFAULT CHARACTER SET utf8mb4`);
|
||||
await bootstrapMysql.destroy();
|
||||
|
||||
globalConfig.database.mysqldb.database = testDbName;
|
||||
}
|
||||
|
||||
const dbConnection = Container.get(DbConnection);
|
||||
await dbConnection.init();
|
||||
await dbConnection.migrate();
|
||||
}
|
||||
|
||||
export function isReady() {
|
||||
const { connectionState } = Container.get(DbConnection);
|
||||
return connectionState.connected && connectionState.migrated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Drop test DB, closing bootstrap connection if existing.
|
||||
*/
|
||||
export async function terminate() {
|
||||
const dbConnection = Container.get(DbConnection);
|
||||
await dbConnection.close();
|
||||
dbConnection.connectionState.connected = false;
|
||||
}
|
||||
|
||||
type EntityName =
|
||||
| keyof typeof entities
|
||||
| 'InsightsRaw'
|
||||
| 'InsightsByPeriod'
|
||||
| 'InsightsMetadata'
|
||||
| 'DataStore'
|
||||
| 'DataStoreColumn';
|
||||
|
||||
/**
|
||||
* Truncate specific DB tables in a test DB.
|
||||
*/
|
||||
export async function truncate(entities: EntityName[]) {
|
||||
const connection = Container.get(Connection);
|
||||
|
||||
for (const name of entities) {
|
||||
await connection.getRepository(name).delete({});
|
||||
}
|
||||
}
|
||||
7
n8n-n8n-1.109.2/packages/@n8n/backend-test-utils/src/test-modules 2.ts
Executable file
7
n8n-n8n-1.109.2/packages/@n8n/backend-test-utils/src/test-modules 2.ts
Executable file
@@ -0,0 +1,7 @@
|
||||
import { ModuleRegistry } from '@n8n/backend-common';
|
||||
import type { ModuleName } from '@n8n/backend-common';
|
||||
import { Container } from '@n8n/di';
|
||||
|
||||
export async function loadModules(moduleNames: ModuleName[]) {
|
||||
await Container.get(ModuleRegistry).loadModules(moduleNames);
|
||||
}
|
||||
13
n8n-n8n-1.109.2/packages/@n8n/benchmark/bin/n8n-benchmark 2
Executable file
13
n8n-n8n-1.109.2/packages/@n8n/benchmark/bin/n8n-benchmark 2
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// Check if version should be displayed
|
||||
const versionFlags = ['-v', '-V', '--version'];
|
||||
if (versionFlags.includes(process.argv.slice(-1)[0])) {
|
||||
console.log(require('../package').version);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
(async () => {
|
||||
const oclif = require('@oclif/core');
|
||||
await oclif.execute({ dir: __dirname });
|
||||
})();
|
||||
60
n8n-n8n-1.109.2/packages/@n8n/benchmark/infra/.terraform.lock 2.hcl
Executable file
60
n8n-n8n-1.109.2/packages/@n8n/benchmark/infra/.terraform.lock 2.hcl
Executable file
@@ -0,0 +1,60 @@
|
||||
# This file is maintained automatically by "terraform init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/hashicorp/azurerm" {
|
||||
version = "3.115.0"
|
||||
constraints = "~> 3.115.0"
|
||||
hashes = [
|
||||
"h1:O7C3Xb+MSOc9C/eAJ5C/CiJ4vuvUsYxxIzr9ZurmHNI=",
|
||||
"zh:0ea93abd53cb872691bad6d5625bda88b5d9619ea813c208b36e0ee236308589",
|
||||
"zh:26703cb9c2c38bc43e97bc83af03559d065750856ea85834b71fbcb2ef9d935c",
|
||||
"zh:316255a3391c49fe9bd7c5b6aa53b56dd490e1083d19b722e7b8f956a2dfe004",
|
||||
"zh:431637ae90c592126fb1ec813fee6390604275438a0d5e15904c65b0a6a0f826",
|
||||
"zh:4cee0fa2e84f89853723c0bc72b7debf8ea2ffffc7ae34ff28d8a69269d3a879",
|
||||
"zh:64a3a3c78ea877515365ed336bd0f3abbe71db7c99b3d2837915fbca168d429c",
|
||||
"zh:7380d7b503b5a87fd71a31360c3eeab504f78e4f314824e3ceda724d9dc74cf0",
|
||||
"zh:974213e05708037a6d2d8c58cc84981819138f44fe40e344034eb80e16ca6012",
|
||||
"zh:9a91614de0476074e9c62bbf08d3bb9c64adbd1d3a4a2b5a3e8e41d9d6d5672f",
|
||||
"zh:a438471c85b8788ab21bdef4cd5ca391a46cbae33bd0262668a80f5e6c4610e1",
|
||||
"zh:bf823f2c941b336a1208f015466212b1a8fdf6da28abacf59bea708377709d9e",
|
||||
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/random" {
|
||||
version = "3.6.2"
|
||||
hashes = [
|
||||
"h1:VavG5unYCa3SYISMKF9pzc3718M0bhPlcbUZZGl7wuo=",
|
||||
"zh:0ef01a4f81147b32c1bea3429974d4d104bbc4be2ba3cfa667031a8183ef88ec",
|
||||
"zh:1bcd2d8161e89e39886119965ef0f37fcce2da9c1aca34263dd3002ba05fcb53",
|
||||
"zh:37c75d15e9514556a5f4ed02e1548aaa95c0ecd6ff9af1119ac905144c70c114",
|
||||
"zh:4210550a767226976bc7e57d988b9ce48f4411fa8a60cd74a6b246baf7589dad",
|
||||
"zh:562007382520cd4baa7320f35e1370ffe84e46ed4e2071fdc7e4b1a9b1f8ae9b",
|
||||
"zh:5efb9da90f665e43f22c2e13e0ce48e86cae2d960aaf1abf721b497f32025916",
|
||||
"zh:6f71257a6b1218d02a573fc9bff0657410404fb2ef23bc66ae8cd968f98d5ff6",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:9647e18f221380a85f2f0ab387c68fdafd58af6193a932417299cdcae4710150",
|
||||
"zh:bb6297ce412c3c2fa9fec726114e5e0508dd2638cad6a0cb433194930c97a544",
|
||||
"zh:f83e925ed73ff8a5ef6e3608ad9225baa5376446349572c2449c0c0b3cf184b7",
|
||||
"zh:fbef0781cb64de76b1df1ca11078aecba7800d82fd4a956302734999cfd9a4af",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/tls" {
|
||||
version = "4.0.5"
|
||||
hashes = [
|
||||
"h1:zeG5RmggBZW/8JWIVrdaeSJa0OG62uFX5HY1eE8SjzY=",
|
||||
"zh:01cfb11cb74654c003f6d4e32bbef8f5969ee2856394a96d127da4949c65153e",
|
||||
"zh:0472ea1574026aa1e8ca82bb6df2c40cd0478e9336b7a8a64e652119a2fa4f32",
|
||||
"zh:1a8ddba2b1550c5d02003ea5d6cdda2eef6870ece86c5619f33edd699c9dc14b",
|
||||
"zh:1e3bb505c000adb12cdf60af5b08f0ed68bc3955b0d4d4a126db5ca4d429eb4a",
|
||||
"zh:6636401b2463c25e03e68a6b786acf91a311c78444b1dc4f97c539f9f78de22a",
|
||||
"zh:76858f9d8b460e7b2a338c477671d07286b0d287fd2d2e3214030ae8f61dd56e",
|
||||
"zh:a13b69fb43cb8746793b3069c4d897bb18f454290b496f19d03c3387d1c9a2dc",
|
||||
"zh:a90ca81bb9bb509063b736842250ecff0f886a91baae8de65c8430168001dad9",
|
||||
"zh:c4de401395936e41234f1956ebadbd2ed9f414e6908f27d578614aaa529870d4",
|
||||
"zh:c657e121af8fde19964482997f0de2d5173217274f6997e16389e7707ed8ece8",
|
||||
"zh:d68b07a67fbd604c38ec9733069fbf23441436fecf554de6c75c032f82e1ef19",
|
||||
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
|
||||
]
|
||||
}
|
||||
54
n8n-n8n-1.109.2/packages/@n8n/benchmark/infra/benchmark-env 2.tf
Executable file
54
n8n-n8n-1.109.2/packages/@n8n/benchmark/infra/benchmark-env 2.tf
Executable file
@@ -0,0 +1,54 @@
|
||||
|
||||
data "azurerm_resource_group" "main" {
|
||||
name = var.resource_group_name
|
||||
}
|
||||
|
||||
# Random prefix for the resources
|
||||
resource "random_string" "prefix" {
|
||||
length = 8
|
||||
special = false
|
||||
}
|
||||
|
||||
# SSH key pair
|
||||
resource "tls_private_key" "ssh_key" {
|
||||
algorithm = "RSA"
|
||||
rsa_bits = 4096
|
||||
}
|
||||
|
||||
# Dedicated Host Group & Hosts
|
||||
|
||||
resource "azurerm_dedicated_host_group" "main" {
|
||||
name = "${random_string.prefix.result}-hostgroup"
|
||||
location = var.location
|
||||
resource_group_name = data.azurerm_resource_group.main.name
|
||||
platform_fault_domain_count = 1
|
||||
automatic_placement_enabled = false
|
||||
zone = 1
|
||||
|
||||
tags = local.common_tags
|
||||
}
|
||||
|
||||
resource "azurerm_dedicated_host" "hosts" {
|
||||
name = "${random_string.prefix.result}-host"
|
||||
location = var.location
|
||||
dedicated_host_group_id = azurerm_dedicated_host_group.main.id
|
||||
sku_name = var.host_size_family
|
||||
platform_fault_domain = 0
|
||||
|
||||
tags = local.common_tags
|
||||
}
|
||||
|
||||
# VM
|
||||
|
||||
module "test_vm" {
|
||||
source = "./modules/benchmark-vm"
|
||||
|
||||
location = var.location
|
||||
resource_group_name = data.azurerm_resource_group.main.name
|
||||
prefix = random_string.prefix.result
|
||||
dedicated_host_id = azurerm_dedicated_host.hosts.id
|
||||
ssh_public_key = tls_private_key.ssh_key.public_key_openssh
|
||||
vm_size = var.vm_size
|
||||
|
||||
tags = local.common_tags
|
||||
}
|
||||
16
n8n-n8n-1.109.2/packages/@n8n/benchmark/infra/output 2.tf
Executable file
16
n8n-n8n-1.109.2/packages/@n8n/benchmark/infra/output 2.tf
Executable file
@@ -0,0 +1,16 @@
|
||||
output "vm_name" {
|
||||
value = module.test_vm.vm_name
|
||||
}
|
||||
|
||||
output "ip" {
|
||||
value = module.test_vm.ip
|
||||
}
|
||||
|
||||
output "ssh_username" {
|
||||
value = module.test_vm.ssh_username
|
||||
}
|
||||
|
||||
output "ssh_private_key" {
|
||||
value = tls_private_key.ssh_key.private_key_pem
|
||||
sensitive = true
|
||||
}
|
||||
23
n8n-n8n-1.109.2/packages/@n8n/benchmark/infra/providers 2.tf
Executable file
23
n8n-n8n-1.109.2/packages/@n8n/benchmark/infra/providers 2.tf
Executable file
@@ -0,0 +1,23 @@
|
||||
|
||||
terraform {
|
||||
required_providers {
|
||||
azurerm = {
|
||||
source = "hashicorp/azurerm"
|
||||
version = "~> 3.115.0"
|
||||
}
|
||||
|
||||
random = {
|
||||
source = "hashicorp/random"
|
||||
}
|
||||
}
|
||||
|
||||
required_version = "~> 1.8.5"
|
||||
}
|
||||
|
||||
provider "azurerm" {
|
||||
features {}
|
||||
|
||||
skip_provider_registration = true
|
||||
}
|
||||
|
||||
provider "random" {}
|
||||
34
n8n-n8n-1.109.2/packages/@n8n/benchmark/infra/vars 2.tf
Executable file
34
n8n-n8n-1.109.2/packages/@n8n/benchmark/infra/vars 2.tf
Executable file
@@ -0,0 +1,34 @@
|
||||
variable "location" {
|
||||
description = "Region to deploy resources"
|
||||
default = "East US"
|
||||
}
|
||||
|
||||
variable "resource_group_name" {
|
||||
description = "Name of the resource group"
|
||||
default = "n8n-benchmarking"
|
||||
}
|
||||
|
||||
variable "host_size_family" {
|
||||
description = "Size Family for the Host Group"
|
||||
default = "DCSv2-Type1"
|
||||
}
|
||||
|
||||
variable "vm_size" {
|
||||
description = "VM Size"
|
||||
# 8 vCPUs, 32 GiB memory
|
||||
default = "Standard_DC8_v2"
|
||||
}
|
||||
|
||||
variable "number_of_vms" {
|
||||
description = "Number of VMs to create"
|
||||
default = 1
|
||||
}
|
||||
|
||||
locals {
|
||||
common_tags = {
|
||||
Id = "N8nBenchmark"
|
||||
Terraform = "true"
|
||||
Owner = "Catalysts"
|
||||
CreatedAt = timestamp()
|
||||
}
|
||||
}
|
||||
48
n8n-n8n-1.109.2/packages/@n8n/benchmark/scenarios/scenario.schema 2.json
Executable file
48
n8n-n8n-1.109.2/packages/@n8n/benchmark/scenarios/scenario.schema 2.json
Executable file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"definitions": {
|
||||
"ScenarioData": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"workflowFiles": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"credentialFiles": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": [],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"description": "The JSON schema to validate this file"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "The name of the scenario"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "A longer description of the scenario"
|
||||
},
|
||||
"scriptPath": {
|
||||
"type": "string",
|
||||
"description": "Relative path to the k6 test script"
|
||||
},
|
||||
"scenarioData": {
|
||||
"$ref": "#/definitions/ScenarioData",
|
||||
"description": "Data to import before running the scenario"
|
||||
}
|
||||
},
|
||||
"required": ["name", "description", "scriptPath", "scenarioData"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
63
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/bootstrap 2.sh
Executable file
63
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/bootstrap 2.sh
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Script to initialize the benchmark environment on a VM
|
||||
#
|
||||
|
||||
set -euo pipefail;
|
||||
|
||||
CURRENT_USER=$(whoami)
|
||||
|
||||
# Mount the data disk
|
||||
# First wait for the disk to become available
|
||||
WAIT_TIME=0
|
||||
MAX_WAIT_TIME=60
|
||||
|
||||
while [ ! -e /dev/sdc ]; do
|
||||
if [ $WAIT_TIME -ge $MAX_WAIT_TIME ]; then
|
||||
echo "Error: /dev/sdc did not become available within $MAX_WAIT_TIME seconds."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Waiting for /dev/sdc to be available... ($WAIT_TIME/$MAX_WAIT_TIME)"
|
||||
sleep 1
|
||||
WAIT_TIME=$((WAIT_TIME + 1))
|
||||
done
|
||||
|
||||
# Then mount it
|
||||
if [ -d "/n8n" ]; then
|
||||
echo "Data disk already mounted. Clearing it..."
|
||||
sudo rm -rf /n8n/*
|
||||
sudo rm -rf /n8n/.[!.]*
|
||||
else
|
||||
sudo mkdir -p /n8n
|
||||
sudo parted /dev/sdc --script mklabel gpt mkpart xfspart xfs 0% 100%
|
||||
sudo mkfs.xfs /dev/sdc1
|
||||
sudo partprobe /dev/sdc1
|
||||
sudo mount /dev/sdc1 /n8n
|
||||
sudo chown -R "$CURRENT_USER":"$CURRENT_USER" /n8n
|
||||
fi
|
||||
|
||||
### Remove unneeded dependencies
|
||||
# TTY
|
||||
sudo systemctl disable getty@tty1.service
|
||||
sudo systemctl disable serial-getty@ttyS0.service
|
||||
# Snap
|
||||
sudo systemctl disable snapd.service
|
||||
# Unattended upgrades
|
||||
sudo systemctl disable unattended-upgrades.service
|
||||
# Cron
|
||||
sudo systemctl disable cron.service
|
||||
|
||||
# Include nodejs v20 repository
|
||||
curl -fsSL https://deb.nodesource.com/setup_20.x -o nodesource_setup.sh
|
||||
sudo -E bash nodesource_setup.sh
|
||||
|
||||
# Install docker, docker compose and nodejs
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get update -yq
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -yq docker.io docker-compose nodejs
|
||||
|
||||
# Add the current user to the docker group
|
||||
sudo usermod -aG docker "$CURRENT_USER"
|
||||
|
||||
# Install zx
|
||||
npm install zx
|
||||
86
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/destroy-cloud-env 2.mjs
Executable file
86
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/destroy-cloud-env 2.mjs
Executable file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env zx
|
||||
/**
|
||||
* Script that deletes all resources created by the benchmark environment.
|
||||
*
|
||||
* This scripts tries to delete resources created by Terraform. If Terraform
|
||||
* state file is not found, it will try to delete resources using Azure CLI.
|
||||
* The terraform state is not persisted, so we want to support both cases.
|
||||
*/
|
||||
// @ts-check
|
||||
import { $, minimist } from 'zx';
|
||||
import { TerraformClient } from './clients/terraform-client.mjs';
|
||||
|
||||
const RESOURCE_GROUP_NAME = 'n8n-benchmarking';
|
||||
|
||||
const args = minimist(process.argv.slice(3), {
|
||||
boolean: ['debug'],
|
||||
});
|
||||
|
||||
const isVerbose = !!args.debug;
|
||||
|
||||
async function main() {
|
||||
const terraformClient = new TerraformClient({ isVerbose });
|
||||
|
||||
if (terraformClient.hasTerraformState()) {
|
||||
await terraformClient.destroyEnvironment();
|
||||
} else {
|
||||
await destroyUsingAz();
|
||||
}
|
||||
}
|
||||
|
||||
async function destroyUsingAz() {
|
||||
const resourcesResult =
|
||||
await $`az resource list --resource-group ${RESOURCE_GROUP_NAME} --query "[?tags.Id == 'N8nBenchmark'].{id:id, createdAt:tags.CreatedAt}" -o json`;
|
||||
|
||||
const resources = JSON.parse(resourcesResult.stdout);
|
||||
|
||||
const resourcesToDelete = resources.map((resource) => resource.id);
|
||||
|
||||
if (resourcesToDelete.length === 0) {
|
||||
console.log('No resources found in the resource group.');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
await deleteResources(resourcesToDelete);
|
||||
}
|
||||
|
||||
async function deleteResources(resourceIds) {
|
||||
// We don't know the order in which resource should be deleted.
|
||||
// Here's a poor person's approach to try deletion until all complete
|
||||
const MAX_ITERATIONS = 100;
|
||||
let i = 0;
|
||||
const toDelete = [...resourceIds];
|
||||
|
||||
console.log(`Deleting ${resourceIds.length} resources...`);
|
||||
while (toDelete.length > 0) {
|
||||
const resourceId = toDelete.shift();
|
||||
const deleted = await deleteById(resourceId);
|
||||
if (!deleted) {
|
||||
toDelete.push(resourceId);
|
||||
}
|
||||
|
||||
if (i++ > MAX_ITERATIONS) {
|
||||
console.log(
|
||||
`Max iterations reached. Exiting. Could not delete ${toDelete.length} resources.`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteById(id) {
|
||||
try {
|
||||
await $`az resource delete --ids ${id}`;
|
||||
return true;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('An error occurred destroying cloud env:');
|
||||
console.error(error);
|
||||
|
||||
process.exit(1);
|
||||
});
|
||||
36
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/provision-cloud-env 2.mjs
Executable file
36
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/provision-cloud-env 2.mjs
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env zx
|
||||
/**
|
||||
* Provisions the cloud benchmark environment
|
||||
*
|
||||
* NOTE: Must be run in the root of the package.
|
||||
*/
|
||||
// @ts-check
|
||||
import { which, minimist } from 'zx';
|
||||
import { TerraformClient } from './clients/terraform-client.mjs';
|
||||
|
||||
const args = minimist(process.argv.slice(3), {
|
||||
boolean: ['debug'],
|
||||
});
|
||||
|
||||
const isVerbose = !!args.debug;
|
||||
|
||||
export async function provision() {
|
||||
await ensureDependencies();
|
||||
|
||||
const terraformClient = new TerraformClient({
|
||||
isVerbose,
|
||||
});
|
||||
|
||||
await terraformClient.provisionEnvironment();
|
||||
}
|
||||
|
||||
async function ensureDependencies() {
|
||||
await which('terraform');
|
||||
}
|
||||
|
||||
provision().catch((error) => {
|
||||
console.error('An error occurred while provisioning cloud env:');
|
||||
console.error(error);
|
||||
|
||||
process.exit(1);
|
||||
});
|
||||
186
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/run 2.mjs
Executable file
186
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/run 2.mjs
Executable file
@@ -0,0 +1,186 @@
|
||||
#!/usr/bin/env zx
|
||||
/**
|
||||
* Script to run benchmarks either on the cloud benchmark environment or locally.
|
||||
* The cloud environment needs to be provisioned using Terraform before running the benchmarks.
|
||||
*
|
||||
* NOTE: Must be run in the root of the package.
|
||||
*/
|
||||
// @ts-check
|
||||
import fs from 'fs';
|
||||
import minimist from 'minimist';
|
||||
import path from 'path';
|
||||
import { runInCloud } from './run-in-cloud.mjs';
|
||||
import { runLocally } from './run-locally.mjs';
|
||||
|
||||
const paths = {
|
||||
n8nSetupsDir: path.join(path.resolve('scripts'), 'n8n-setups'),
|
||||
};
|
||||
|
||||
async function main() {
|
||||
const config = await parseAndValidateConfig();
|
||||
|
||||
const n8nSetupsToUse =
|
||||
config.n8nSetupToUse === 'all' ? readAvailableN8nSetups() : [config.n8nSetupToUse];
|
||||
|
||||
console.log('Using n8n tag', config.n8nTag);
|
||||
console.log('Using benchmark cli tag', config.benchmarkTag);
|
||||
console.log('Using environment', config.env);
|
||||
console.log('Using n8n setups', n8nSetupsToUse.join(', '));
|
||||
console.log('');
|
||||
|
||||
if (config.env === 'cloud') {
|
||||
await runInCloud({
|
||||
benchmarkTag: config.benchmarkTag,
|
||||
isVerbose: config.isVerbose,
|
||||
k6ApiToken: config.k6ApiToken,
|
||||
resultWebhookUrl: config.resultWebhookUrl,
|
||||
resultWebhookAuthHeader: config.resultWebhookAuthHeader,
|
||||
n8nLicenseCert: config.n8nLicenseCert,
|
||||
n8nTag: config.n8nTag,
|
||||
n8nSetupsToUse,
|
||||
vus: config.vus,
|
||||
duration: config.duration,
|
||||
});
|
||||
} else if (config.env === 'local') {
|
||||
await runLocally({
|
||||
benchmarkTag: config.benchmarkTag,
|
||||
isVerbose: config.isVerbose,
|
||||
k6ApiToken: config.k6ApiToken,
|
||||
resultWebhookUrl: config.resultWebhookUrl,
|
||||
resultWebhookAuthHeader: config.resultWebhookAuthHeader,
|
||||
n8nLicenseCert: config.n8nLicenseCert,
|
||||
n8nTag: config.n8nTag,
|
||||
runDir: config.runDir,
|
||||
n8nSetupsToUse,
|
||||
vus: config.vus,
|
||||
duration: config.duration,
|
||||
});
|
||||
} else {
|
||||
console.error('Invalid env:', config.env);
|
||||
printUsage();
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function readAvailableN8nSetups() {
|
||||
const setups = fs.readdirSync(paths.n8nSetupsDir);
|
||||
|
||||
return setups;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} Config
|
||||
* @property {boolean} isVerbose
|
||||
* @property {'cloud' | 'local'} env
|
||||
* @property {string} n8nSetupToUse
|
||||
* @property {string} n8nTag
|
||||
* @property {string} benchmarkTag
|
||||
* @property {string} [k6ApiToken]
|
||||
* @property {string} [resultWebhookUrl]
|
||||
* @property {string} [resultWebhookAuthHeader]
|
||||
* @property {string} [n8nLicenseCert]
|
||||
* @property {string} [runDir]
|
||||
* @property {string} [vus]
|
||||
* @property {string} [duration]
|
||||
*
|
||||
* @returns {Promise<Config>}
|
||||
*/
|
||||
async function parseAndValidateConfig() {
|
||||
const args = minimist(process.argv.slice(3), {
|
||||
boolean: ['debug', 'help'],
|
||||
});
|
||||
|
||||
if (args.help) {
|
||||
printUsage();
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const n8nSetupToUse = await getAndValidateN8nSetup(args);
|
||||
const isVerbose = args.debug || false;
|
||||
const n8nTag = args.n8nTag || process.env.N8N_DOCKER_TAG || 'latest';
|
||||
const benchmarkTag = args.benchmarkTag || process.env.BENCHMARK_DOCKER_TAG || 'latest';
|
||||
const k6ApiToken = args.k6ApiToken || process.env.K6_API_TOKEN || undefined;
|
||||
const resultWebhookUrl =
|
||||
args.resultWebhookUrl || process.env.BENCHMARK_RESULT_WEBHOOK_URL || undefined;
|
||||
const resultWebhookAuthHeader =
|
||||
args.resultWebhookAuthHeader || process.env.BENCHMARK_RESULT_WEBHOOK_AUTH_HEADER || undefined;
|
||||
const n8nLicenseCert = args.n8nLicenseCert || process.env.N8N_LICENSE_CERT || undefined;
|
||||
const runDir = args.runDir || undefined;
|
||||
const env = args.env || 'local';
|
||||
const vus = args.vus;
|
||||
const duration = args.duration;
|
||||
|
||||
if (!env) {
|
||||
printUsage();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return {
|
||||
isVerbose,
|
||||
env,
|
||||
n8nSetupToUse,
|
||||
n8nTag,
|
||||
benchmarkTag,
|
||||
k6ApiToken,
|
||||
resultWebhookUrl,
|
||||
resultWebhookAuthHeader,
|
||||
n8nLicenseCert,
|
||||
runDir,
|
||||
vus,
|
||||
duration,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {minimist.ParsedArgs} args
|
||||
*/
|
||||
async function getAndValidateN8nSetup(args) {
|
||||
// Last parameter is the n8n setup to use
|
||||
const n8nSetupToUse = args._[args._.length - 1];
|
||||
if (!n8nSetupToUse || n8nSetupToUse === 'all') {
|
||||
return 'all';
|
||||
}
|
||||
|
||||
const availableSetups = readAvailableN8nSetups();
|
||||
|
||||
if (!availableSetups.includes(n8nSetupToUse)) {
|
||||
printUsage();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return n8nSetupToUse;
|
||||
}
|
||||
|
||||
function printUsage() {
|
||||
const availableSetups = readAvailableN8nSetups();
|
||||
|
||||
console.log(`Usage: zx scripts/${path.basename(__filename)} [n8n setup name]`);
|
||||
console.log(` eg: zx scripts/${path.basename(__filename)}`);
|
||||
console.log('');
|
||||
console.log('Options:');
|
||||
console.log(
|
||||
` [n8n setup name] Against which n8n setup to run the benchmarks. One of: ${['all', ...availableSetups].join(', ')}. Default is all`,
|
||||
);
|
||||
console.log(
|
||||
' --env Env where to run the benchmarks. Either cloud or local. Default is local.',
|
||||
);
|
||||
console.log(' --debug Enable verbose output');
|
||||
console.log(' --n8nTag Docker tag for n8n image. Default is latest');
|
||||
console.log(' --benchmarkTag Docker tag for benchmark cli image. Default is latest');
|
||||
console.log(' --vus How many concurrent requests to make');
|
||||
console.log(' --duration Test duration, e.g. 1m or 30s');
|
||||
console.log(
|
||||
' --k6ApiToken API token for k6 cloud. Default is read from K6_API_TOKEN env var. If omitted, k6 cloud will not be used',
|
||||
);
|
||||
console.log(
|
||||
' --runDir Directory to share with the n8n container for storing data. Needed only for local runs.',
|
||||
);
|
||||
console.log('');
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('An error occurred while running the benchmarks:');
|
||||
console.error(error);
|
||||
|
||||
process.exit(1);
|
||||
});
|
||||
158
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/run-for-n8n-setup 2.mjs
Executable file
158
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/run-for-n8n-setup 2.mjs
Executable file
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env zx
|
||||
/**
|
||||
* This script runs the benchmarks for the given n8n setup.
|
||||
*/
|
||||
// @ts-check
|
||||
import path from 'path';
|
||||
import { $, argv, fs } from 'zx';
|
||||
import { DockerComposeClient } from './clients/docker-compose-client.mjs';
|
||||
import { flagsObjectToCliArgs } from './utils/flags.mjs';
|
||||
|
||||
const paths = {
|
||||
n8nSetupsDir: path.join(__dirname, 'n8n-setups'),
|
||||
mockApiDataPath: path.join(__dirname, 'mock-api'),
|
||||
};
|
||||
|
||||
const N8N_ENCRYPTION_KEY = 'very-secret-encryption-key';
|
||||
|
||||
async function main() {
|
||||
const [n8nSetupToUse] = argv._;
|
||||
validateN8nSetup(n8nSetupToUse);
|
||||
|
||||
const composeFilePath = path.join(paths.n8nSetupsDir, n8nSetupToUse);
|
||||
const setupScriptPath = path.join(paths.n8nSetupsDir, n8nSetupToUse, 'setup.mjs');
|
||||
const n8nTag = argv.n8nDockerTag || process.env.N8N_DOCKER_TAG || 'latest';
|
||||
const benchmarkTag = argv.benchmarkDockerTag || process.env.BENCHMARK_DOCKER_TAG || 'latest';
|
||||
const k6ApiToken = argv.k6ApiToken || process.env.K6_API_TOKEN || undefined;
|
||||
const resultWebhookUrl =
|
||||
argv.resultWebhookUrl || process.env.BENCHMARK_RESULT_WEBHOOK_URL || undefined;
|
||||
const resultWebhookAuthHeader =
|
||||
argv.resultWebhookAuthHeader || process.env.BENCHMARK_RESULT_WEBHOOK_AUTH_HEADER || undefined;
|
||||
const baseRunDir = argv.runDir || process.env.RUN_DIR || '/n8n';
|
||||
const n8nLicenseCert = argv.n8nLicenseCert || process.env.N8N_LICENSE_CERT || undefined;
|
||||
const n8nLicenseActivationKey = process.env.N8N_LICENSE_ACTIVATION_KEY || undefined;
|
||||
const n8nLicenseTenantId = argv.n8nLicenseTenantId || process.env.N8N_LICENSE_TENANT_ID || '1';
|
||||
const envTag = argv.env || 'local';
|
||||
const vus = argv.vus;
|
||||
const duration = argv.duration;
|
||||
|
||||
const hasN8nLicense = !!n8nLicenseCert || !!n8nLicenseActivationKey;
|
||||
if (n8nSetupToUse === 'scaling-multi-main' && !hasN8nLicense) {
|
||||
console.error(
|
||||
'n8n license is required to run the multi-main scaling setup. Please provide N8N_LICENSE_CERT or N8N_LICENSE_ACTIVATION_KEY (and N8N_LICENSE_TENANT_ID if needed)',
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(baseRunDir)) {
|
||||
console.error(
|
||||
`The run directory "${baseRunDir}" does not exist. Please specify a valid directory using --runDir`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const runDir = path.join(baseRunDir, n8nSetupToUse);
|
||||
fs.emptyDirSync(runDir);
|
||||
|
||||
const dockerComposeClient = new DockerComposeClient({
|
||||
$: $({
|
||||
cwd: composeFilePath,
|
||||
verbose: true,
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
N8N_VERSION: n8nTag,
|
||||
N8N_LICENSE_CERT: n8nLicenseCert,
|
||||
N8N_LICENSE_ACTIVATION_KEY: n8nLicenseActivationKey,
|
||||
N8N_LICENSE_TENANT_ID: n8nLicenseTenantId,
|
||||
N8N_ENCRYPTION_KEY,
|
||||
BENCHMARK_VERSION: benchmarkTag,
|
||||
K6_API_TOKEN: k6ApiToken,
|
||||
BENCHMARK_RESULT_WEBHOOK_URL: resultWebhookUrl,
|
||||
BENCHMARK_RESULT_WEBHOOK_AUTH_HEADER: resultWebhookAuthHeader,
|
||||
RUN_DIR: runDir,
|
||||
MOCK_API_DATA_PATH: paths.mockApiDataPath,
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
// Run the setup script if it exists
|
||||
if (fs.existsSync(setupScriptPath)) {
|
||||
const setupScript = await import(setupScriptPath);
|
||||
await setupScript.setup({ runDir });
|
||||
}
|
||||
|
||||
try {
|
||||
await dockerComposeClient.$('up', '-d', '--remove-orphans', 'n8n');
|
||||
|
||||
const tags = Object.entries({
|
||||
Env: envTag,
|
||||
N8nVersion: n8nTag,
|
||||
N8nSetup: n8nSetupToUse,
|
||||
})
|
||||
.map(([key, value]) => `${key}=${value}`)
|
||||
.join(',');
|
||||
|
||||
const cliArgs = flagsObjectToCliArgs({
|
||||
scenarioNamePrefix: n8nSetupToUse,
|
||||
vus,
|
||||
duration,
|
||||
tags,
|
||||
});
|
||||
|
||||
await dockerComposeClient.$('run', 'benchmark', 'run', ...cliArgs);
|
||||
} catch (error) {
|
||||
console.error('An error occurred while running the benchmarks:');
|
||||
console.error(error.message);
|
||||
console.error('');
|
||||
await printContainerStatus(dockerComposeClient);
|
||||
} finally {
|
||||
await dumpLogs(dockerComposeClient);
|
||||
await dockerComposeClient.$('down');
|
||||
}
|
||||
}
|
||||
|
||||
async function printContainerStatus(dockerComposeClient) {
|
||||
console.error('Container statuses:');
|
||||
await dockerComposeClient.$('ps', '-a');
|
||||
}
|
||||
|
||||
async function dumpLogs(dockerComposeClient) {
|
||||
console.info('Container logs:');
|
||||
await dockerComposeClient.$('logs');
|
||||
}
|
||||
|
||||
function printUsage() {
|
||||
const availableSetups = getAllN8nSetups();
|
||||
console.log('Usage: zx runForN8nSetup.mjs --runDir /path/for/n8n/data <n8n setup to use>');
|
||||
console.log(` eg: zx runForN8nSetup.mjs --runDir /path/for/n8n/data ${availableSetups[0]}`);
|
||||
console.log('');
|
||||
console.log('Flags:');
|
||||
console.log(
|
||||
' --runDir <path> Directory to share with the n8n container for storing data. Default is /n8n',
|
||||
);
|
||||
console.log(' --n8nDockerTag <tag> Docker tag for n8n image. Default is latest');
|
||||
console.log(
|
||||
' --benchmarkDockerTag <tag> Docker tag for benchmark cli image. Default is latest',
|
||||
);
|
||||
console.log(' --k6ApiToken <token> K6 API token to upload the results');
|
||||
console.log('');
|
||||
console.log('Available setups:');
|
||||
console.log(availableSetups.join(', '));
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function getAllN8nSetups() {
|
||||
return fs.readdirSync(paths.n8nSetupsDir);
|
||||
}
|
||||
|
||||
function validateN8nSetup(givenSetup) {
|
||||
const availableSetups = getAllN8nSetups();
|
||||
if (!availableSetups.includes(givenSetup)) {
|
||||
printUsage();
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
154
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/run-in-cloud 2.mjs
Executable file
154
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/run-in-cloud 2.mjs
Executable file
@@ -0,0 +1,154 @@
|
||||
#!/usr/bin/env zx
|
||||
/**
|
||||
* Script to run benchmarks on the cloud benchmark environment.
|
||||
* This script will:
|
||||
* 1. Provision a benchmark environment using Terraform.
|
||||
* 2. Run the benchmarks on the VM.
|
||||
* 3. Destroy the cloud environment.
|
||||
*
|
||||
* NOTE: Must be run in the root of the package.
|
||||
*/
|
||||
// @ts-check
|
||||
import { sleep, which, $, tmpdir } from 'zx';
|
||||
import path from 'path';
|
||||
import { SshClient } from './clients/ssh-client.mjs';
|
||||
import { TerraformClient } from './clients/terraform-client.mjs';
|
||||
import { flagsObjectToCliArgs } from './utils/flags.mjs';
|
||||
|
||||
/**
|
||||
* @typedef {Object} BenchmarkEnv
|
||||
* @property {string} vmName
|
||||
* @property {string} ip
|
||||
* @property {string} sshUsername
|
||||
* @property {string} sshPrivateKeyPath
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} Config
|
||||
* @property {boolean} isVerbose
|
||||
* @property {string[]} n8nSetupsToUse
|
||||
* @property {string} n8nTag
|
||||
* @property {string} benchmarkTag
|
||||
* @property {string} [k6ApiToken]
|
||||
* @property {string} [resultWebhookUrl]
|
||||
* @property {string} [resultWebhookAuthHeader]
|
||||
* @property {string} [n8nLicenseCert]
|
||||
* @property {string} [vus]
|
||||
* @property {string} [duration]
|
||||
*
|
||||
* @param {Config} config
|
||||
*/
|
||||
export async function runInCloud(config) {
|
||||
await ensureDependencies();
|
||||
|
||||
const terraformClient = new TerraformClient({
|
||||
isVerbose: config.isVerbose,
|
||||
});
|
||||
|
||||
const benchmarkEnv = await terraformClient.getTerraformOutputs();
|
||||
|
||||
await runBenchmarksOnVm(config, benchmarkEnv);
|
||||
}
|
||||
|
||||
async function ensureDependencies() {
|
||||
await which('terraform');
|
||||
await which('az');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Config} config
|
||||
* @param {BenchmarkEnv} benchmarkEnv
|
||||
*/
|
||||
async function runBenchmarksOnVm(config, benchmarkEnv) {
|
||||
console.log(`Setting up the environment...`);
|
||||
|
||||
const sshClient = new SshClient({
|
||||
ip: benchmarkEnv.ip,
|
||||
username: benchmarkEnv.sshUsername,
|
||||
privateKeyPath: benchmarkEnv.sshPrivateKeyPath,
|
||||
verbose: config.isVerbose,
|
||||
});
|
||||
|
||||
await ensureVmIsReachable(sshClient);
|
||||
|
||||
const scriptsDir = await transferScriptsToVm(sshClient, config);
|
||||
|
||||
// Bootstrap the environment with dependencies
|
||||
console.log('Running bootstrap script...');
|
||||
const bootstrapScriptPath = path.join(scriptsDir, 'bootstrap.sh');
|
||||
await sshClient.ssh(`chmod a+x ${bootstrapScriptPath} && ${bootstrapScriptPath}`);
|
||||
|
||||
// Give some time for the VM to be ready
|
||||
await sleep(1000);
|
||||
|
||||
for (const n8nSetup of config.n8nSetupsToUse) {
|
||||
await runBenchmarkForN8nSetup({
|
||||
config,
|
||||
sshClient,
|
||||
scriptsDir,
|
||||
n8nSetup,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{ config: Config; sshClient: any; scriptsDir: string; n8nSetup: string; }} opts
|
||||
*/
|
||||
async function runBenchmarkForN8nSetup({ config, sshClient, scriptsDir, n8nSetup }) {
|
||||
console.log(`Running benchmarks for ${n8nSetup}...`);
|
||||
const runScriptPath = path.join(scriptsDir, 'run-for-n8n-setup.mjs');
|
||||
|
||||
const cliArgs = flagsObjectToCliArgs({
|
||||
n8nDockerTag: config.n8nTag,
|
||||
benchmarkDockerTag: config.benchmarkTag,
|
||||
k6ApiToken: config.k6ApiToken,
|
||||
resultWebhookUrl: config.resultWebhookUrl,
|
||||
resultWebhookAuthHeader: config.resultWebhookAuthHeader,
|
||||
n8nLicenseCert: config.n8nLicenseCert,
|
||||
vus: config.vus,
|
||||
duration: config.duration,
|
||||
env: 'cloud',
|
||||
});
|
||||
|
||||
const flagsString = cliArgs.join(' ');
|
||||
|
||||
await sshClient.ssh(`npx zx ${runScriptPath} ${flagsString} ${n8nSetup}`, {
|
||||
// Test run should always log its output
|
||||
verbose: true,
|
||||
});
|
||||
}
|
||||
|
||||
async function ensureVmIsReachable(sshClient) {
|
||||
try {
|
||||
await sshClient.ssh('echo "VM is reachable"');
|
||||
} catch (error) {
|
||||
console.error(`VM is not reachable: ${error.message}`);
|
||||
console.error(
|
||||
`Did you provision the cloud environment first with 'pnpm provision-cloud-env'? You can also run the benchmarks locally with 'pnpm run benchmark-locally'.`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns Path where the scripts are located on the VM
|
||||
*/
|
||||
async function transferScriptsToVm(sshClient, config) {
|
||||
const cwd = process.cwd();
|
||||
const scriptsDir = path.resolve(cwd, './scripts');
|
||||
const tarFilename = 'scripts.tar.gz';
|
||||
const scriptsTarPath = path.join(tmpdir('n8n-benchmark'), tarFilename);
|
||||
|
||||
const $$ = $({ verbose: config.isVerbose });
|
||||
|
||||
// Compress the scripts folder
|
||||
await $$`tar -czf ${scriptsTarPath} ${scriptsDir} -C ${cwd} ./scripts`;
|
||||
|
||||
// Transfer the scripts to the VM
|
||||
await sshClient.scp(scriptsTarPath, `~/${tarFilename}`);
|
||||
|
||||
// Extract the scripts on the VM
|
||||
await sshClient.ssh(`tar -xzf ~/${tarFilename}`);
|
||||
|
||||
return '~/scripts';
|
||||
}
|
||||
71
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/run-locally 2.mjs
Executable file
71
n8n-n8n-1.109.2/packages/@n8n/benchmark/scripts/run-locally 2.mjs
Executable file
@@ -0,0 +1,71 @@
|
||||
#!/usr/bin/env zx
|
||||
/**
|
||||
* Script to run benchmarks on the cloud benchmark environment.
|
||||
* This script will:
|
||||
* 1. Provision a benchmark environment using Terraform.
|
||||
* 2. Run the benchmarks on the VM.
|
||||
* 3. Destroy the cloud environment.
|
||||
*
|
||||
* NOTE: Must be run in the root of the package.
|
||||
*/
|
||||
// @ts-check
|
||||
import { $ } from 'zx';
|
||||
import path from 'path';
|
||||
import { flagsObjectToCliArgs } from './utils/flags.mjs';
|
||||
|
||||
/**
|
||||
* @typedef {Object} BenchmarkEnv
|
||||
* @property {string} vmName
|
||||
*/
|
||||
|
||||
const paths = {
|
||||
scriptsDir: path.join(path.resolve('scripts')),
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {Object} Config
|
||||
* @property {boolean} isVerbose
|
||||
* @property {string[]} n8nSetupsToUse
|
||||
* @property {string} n8nTag
|
||||
* @property {string} benchmarkTag
|
||||
* @property {string} [runDir]
|
||||
* @property {string} [k6ApiToken]
|
||||
* @property {string} [resultWebhookUrl]
|
||||
* @property {string} [resultWebhookAuthHeader]
|
||||
* @property {string} [n8nLicenseCert]
|
||||
* @property {string} [vus]
|
||||
* @property {string} [duration]
|
||||
*
|
||||
* @param {Config} config
|
||||
*/
|
||||
export async function runLocally(config) {
|
||||
const runScriptPath = path.join(paths.scriptsDir, 'run-for-n8n-setup.mjs');
|
||||
|
||||
const cliArgs = flagsObjectToCliArgs({
|
||||
n8nDockerTag: config.n8nTag,
|
||||
benchmarkDockerTag: config.benchmarkTag,
|
||||
runDir: config.runDir,
|
||||
vus: config.vus,
|
||||
duration: config.duration,
|
||||
env: 'local',
|
||||
});
|
||||
|
||||
try {
|
||||
for (const n8nSetup of config.n8nSetupsToUse) {
|
||||
console.log(`Running benchmarks for n8n setup: ${n8nSetup}`);
|
||||
|
||||
await $({
|
||||
env: {
|
||||
...process.env,
|
||||
K6_API_TOKEN: config.k6ApiToken,
|
||||
BENCHMARK_RESULT_WEBHOOK_URL: config.resultWebhookUrl,
|
||||
BENCHMARK_RESULT_WEBHOOK_AUTH_HEADER: config.resultWebhookAuthHeader,
|
||||
N8N_LICENSE_CERT: config.n8nLicenseCert,
|
||||
},
|
||||
})`npx ${runScriptPath} ${cliArgs} ${n8nSetup}`;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('An error occurred while running the benchmarks:');
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
144
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/client-oauth2 2.ts
Executable file
144
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/client-oauth2 2.ts
Executable file
@@ -0,0 +1,144 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
import axios from 'axios';
|
||||
import type { AxiosRequestConfig, AxiosResponse } from 'axios';
|
||||
import { Agent } from 'https';
|
||||
import * as qs from 'querystring';
|
||||
|
||||
import type { ClientOAuth2TokenData } from './client-oauth2-token';
|
||||
import { ClientOAuth2Token } from './client-oauth2-token';
|
||||
import { CodeFlow } from './code-flow';
|
||||
import { CredentialsFlow } from './credentials-flow';
|
||||
import type { Headers, OAuth2AccessTokenErrorResponse } from './types';
|
||||
import { getAuthError } from './utils';
|
||||
|
||||
export interface ClientOAuth2RequestObject {
|
||||
url: string;
|
||||
method: 'DELETE' | 'GET' | 'HEAD' | 'PATCH' | 'POST' | 'PUT';
|
||||
body?: Record<string, any>;
|
||||
query?: qs.ParsedUrlQuery;
|
||||
headers?: Headers;
|
||||
ignoreSSLIssues?: boolean;
|
||||
}
|
||||
|
||||
export interface ClientOAuth2Options {
|
||||
clientId: string;
|
||||
clientSecret?: string;
|
||||
accessTokenUri: string;
|
||||
authentication?: 'header' | 'body';
|
||||
authorizationUri?: string;
|
||||
redirectUri?: string;
|
||||
scopes?: string[];
|
||||
scopesSeparator?: ',' | ' ';
|
||||
authorizationGrants?: string[];
|
||||
state?: string;
|
||||
additionalBodyProperties?: Record<string, any>;
|
||||
body?: Record<string, any>;
|
||||
query?: qs.ParsedUrlQuery;
|
||||
ignoreSSLIssues?: boolean;
|
||||
}
|
||||
|
||||
export class ResponseError extends Error {
|
||||
constructor(
|
||||
readonly status: number,
|
||||
readonly body: unknown,
|
||||
readonly code = 'ESTATUS',
|
||||
readonly message = `HTTP status ${status}`,
|
||||
) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
const sslIgnoringAgent = new Agent({ rejectUnauthorized: false });
|
||||
|
||||
/**
|
||||
* Construct an object that can handle the multiple OAuth 2.0 flows.
|
||||
*/
|
||||
export class ClientOAuth2 {
|
||||
code: CodeFlow;
|
||||
|
||||
credentials: CredentialsFlow;
|
||||
|
||||
constructor(readonly options: ClientOAuth2Options) {
|
||||
this.code = new CodeFlow(this);
|
||||
this.credentials = new CredentialsFlow(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new token from existing data.
|
||||
*/
|
||||
createToken(data: ClientOAuth2TokenData, type?: string): ClientOAuth2Token {
|
||||
return new ClientOAuth2Token(this, {
|
||||
...data,
|
||||
...(typeof type === 'string' ? { token_type: type } : type),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Request an access token from the OAuth2 server.
|
||||
*
|
||||
* @throws {ResponseError} If the response is an unexpected status code.
|
||||
* @throws {AuthError} If the response is an authentication error.
|
||||
*/
|
||||
async accessTokenRequest(options: ClientOAuth2RequestObject): Promise<ClientOAuth2TokenData> {
|
||||
let url = options.url;
|
||||
const query = qs.stringify(options.query);
|
||||
|
||||
if (query) {
|
||||
url += (url.indexOf('?') === -1 ? '?' : '&') + query;
|
||||
}
|
||||
|
||||
const requestConfig: AxiosRequestConfig = {
|
||||
url,
|
||||
method: options.method,
|
||||
data: qs.stringify(options.body),
|
||||
headers: options.headers,
|
||||
transformResponse: (res: unknown) => res,
|
||||
// Axios rejects the promise by default for all status codes 4xx.
|
||||
// We override this to reject promises only on 5xxs
|
||||
validateStatus: (status) => status < 500,
|
||||
};
|
||||
|
||||
if (options.ignoreSSLIssues) {
|
||||
requestConfig.httpsAgent = sslIgnoringAgent;
|
||||
}
|
||||
|
||||
const response = await axios.request(requestConfig);
|
||||
|
||||
if (response.status >= 400) {
|
||||
const body = this.parseResponseBody<OAuth2AccessTokenErrorResponse>(response);
|
||||
const authErr = getAuthError(body);
|
||||
|
||||
if (authErr) throw authErr;
|
||||
else throw new ResponseError(response.status, response.data);
|
||||
}
|
||||
|
||||
if (response.status >= 300) {
|
||||
throw new ResponseError(response.status, response.data);
|
||||
}
|
||||
|
||||
return this.parseResponseBody<ClientOAuth2TokenData>(response);
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to parse response body based on the content type.
|
||||
*/
|
||||
private parseResponseBody<T extends object>(response: AxiosResponse<unknown>): T {
|
||||
const contentType = (response.headers['content-type'] as string) ?? '';
|
||||
const body = response.data as string;
|
||||
|
||||
if (contentType.startsWith('application/json')) {
|
||||
return JSON.parse(body) as T;
|
||||
}
|
||||
|
||||
if (contentType.startsWith('application/x-www-form-urlencoded')) {
|
||||
return qs.parse(body) as T;
|
||||
}
|
||||
|
||||
throw new ResponseError(
|
||||
response.status,
|
||||
body,
|
||||
undefined,
|
||||
`Unsupported content type: ${contentType}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
113
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/client-oauth2-token 2.ts
Executable file
113
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/client-oauth2-token 2.ts
Executable file
@@ -0,0 +1,113 @@
|
||||
import * as a from 'node:assert';
|
||||
|
||||
import type { ClientOAuth2, ClientOAuth2Options, ClientOAuth2RequestObject } from './client-oauth2';
|
||||
import { DEFAULT_HEADERS } from './constants';
|
||||
import { auth, expects, getRequestOptions } from './utils';
|
||||
|
||||
export interface ClientOAuth2TokenData extends Record<string, string | undefined> {
|
||||
token_type?: string | undefined;
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
expires_in?: string;
|
||||
scope?: string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* General purpose client token generator.
|
||||
*/
|
||||
export class ClientOAuth2Token {
|
||||
readonly tokenType?: string;
|
||||
|
||||
readonly accessToken: string;
|
||||
|
||||
readonly refreshToken: string;
|
||||
|
||||
private expires: Date;
|
||||
|
||||
constructor(
|
||||
readonly client: ClientOAuth2,
|
||||
readonly data: ClientOAuth2TokenData,
|
||||
) {
|
||||
this.tokenType = data.token_type?.toLowerCase() ?? 'bearer';
|
||||
this.accessToken = data.access_token;
|
||||
this.refreshToken = data.refresh_token;
|
||||
|
||||
this.expires = new Date();
|
||||
this.expires.setSeconds(this.expires.getSeconds() + Number(data.expires_in));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sign a standardized request object with user authentication information.
|
||||
*/
|
||||
sign(requestObject: ClientOAuth2RequestObject): ClientOAuth2RequestObject {
|
||||
if (!this.accessToken) {
|
||||
throw new Error('Unable to sign without access token');
|
||||
}
|
||||
|
||||
requestObject.headers = requestObject.headers ?? {};
|
||||
|
||||
if (this.tokenType === 'bearer') {
|
||||
requestObject.headers.Authorization = 'Bearer ' + this.accessToken;
|
||||
} else {
|
||||
const parts = requestObject.url.split('#');
|
||||
const token = 'access_token=' + this.accessToken;
|
||||
const url = parts[0].replace(/[?&]access_token=[^&#]/, '');
|
||||
const fragment = parts[1] ? '#' + parts[1] : '';
|
||||
|
||||
// Prepend the correct query string parameter to the url.
|
||||
requestObject.url = url + (url.indexOf('?') > -1 ? '&' : '?') + token + fragment;
|
||||
|
||||
// Attempt to avoid storing the url in proxies, since the access token
|
||||
// is exposed in the query parameters.
|
||||
requestObject.headers.Pragma = 'no-store';
|
||||
requestObject.headers['Cache-Control'] = 'no-store';
|
||||
}
|
||||
|
||||
return requestObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh a user access token with the refresh token.
|
||||
* As in RFC 6749 Section 6: https://www.rfc-editor.org/rfc/rfc6749.html#section-6
|
||||
*/
|
||||
async refresh(opts?: ClientOAuth2Options): Promise<ClientOAuth2Token> {
|
||||
const options = { ...this.client.options, ...opts };
|
||||
|
||||
expects(options, 'clientSecret');
|
||||
a.ok(this.refreshToken, 'refreshToken is required');
|
||||
|
||||
const { clientId, clientSecret } = options;
|
||||
const headers = { ...DEFAULT_HEADERS };
|
||||
const body: Record<string, string> = {
|
||||
refresh_token: this.refreshToken,
|
||||
grant_type: 'refresh_token',
|
||||
};
|
||||
|
||||
if (options.authentication === 'body') {
|
||||
body.client_id = clientId;
|
||||
body.client_secret = clientSecret;
|
||||
} else {
|
||||
headers.Authorization = auth(clientId, clientSecret);
|
||||
}
|
||||
|
||||
const requestOptions = getRequestOptions(
|
||||
{
|
||||
url: options.accessTokenUri,
|
||||
method: 'POST',
|
||||
headers,
|
||||
body,
|
||||
},
|
||||
options,
|
||||
);
|
||||
|
||||
const responseData = await this.client.accessTokenRequest(requestOptions);
|
||||
return this.client.createToken({ ...this.data, ...responseData });
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the token has expired.
|
||||
*/
|
||||
expired(): boolean {
|
||||
return Date.now() > this.expires.getTime();
|
||||
}
|
||||
}
|
||||
123
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/code-flow 2.ts
Executable file
123
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/code-flow 2.ts
Executable file
@@ -0,0 +1,123 @@
|
||||
import * as qs from 'querystring';
|
||||
|
||||
import type { ClientOAuth2, ClientOAuth2Options } from './client-oauth2';
|
||||
import type { ClientOAuth2Token } from './client-oauth2-token';
|
||||
import { DEFAULT_HEADERS, DEFAULT_URL_BASE } from './constants';
|
||||
import { auth, expects, getAuthError, getRequestOptions } from './utils';
|
||||
|
||||
interface CodeFlowBody {
|
||||
code: string | string[];
|
||||
grant_type: 'authorization_code';
|
||||
redirect_uri?: string;
|
||||
client_id?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Support authorization code OAuth 2.0 grant.
|
||||
*
|
||||
* Reference: http://tools.ietf.org/html/rfc6749#section-4.1
|
||||
*/
|
||||
export class CodeFlow {
|
||||
constructor(private client: ClientOAuth2) {}
|
||||
|
||||
/**
|
||||
* Generate the uri for doing the first redirect.
|
||||
*/
|
||||
getUri(opts?: Partial<ClientOAuth2Options>): string {
|
||||
const options: ClientOAuth2Options = { ...this.client.options, ...opts };
|
||||
|
||||
// Check the required parameters are set.
|
||||
expects(options, 'clientId', 'authorizationUri');
|
||||
|
||||
const url = new URL(options.authorizationUri);
|
||||
|
||||
const queryParams = {
|
||||
...options.query,
|
||||
client_id: options.clientId,
|
||||
redirect_uri: options.redirectUri,
|
||||
response_type: 'code',
|
||||
state: options.state,
|
||||
...(options.scopes ? { scope: options.scopes.join(options.scopesSeparator ?? ' ') } : {}),
|
||||
};
|
||||
|
||||
for (const [key, value] of Object.entries(queryParams)) {
|
||||
if (value !== null && value !== undefined) {
|
||||
url.searchParams.append(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the code token from the redirected uri and make another request for
|
||||
* the user access token.
|
||||
*/
|
||||
async getToken(
|
||||
urlString: string,
|
||||
opts?: Partial<ClientOAuth2Options>,
|
||||
): Promise<ClientOAuth2Token> {
|
||||
const options: ClientOAuth2Options = { ...this.client.options, ...opts };
|
||||
expects(options, 'clientId', 'accessTokenUri');
|
||||
|
||||
const url = new URL(urlString, DEFAULT_URL_BASE);
|
||||
if (
|
||||
typeof options.redirectUri === 'string' &&
|
||||
typeof url.pathname === 'string' &&
|
||||
url.pathname !== new URL(options.redirectUri, DEFAULT_URL_BASE).pathname
|
||||
) {
|
||||
throw new TypeError('Redirected path should match configured path, but got: ' + url.pathname);
|
||||
}
|
||||
|
||||
if (!url.search?.substring(1)) {
|
||||
throw new TypeError(`Unable to process uri: ${urlString}`);
|
||||
}
|
||||
|
||||
const data =
|
||||
typeof url.search === 'string' ? qs.parse(url.search.substring(1)) : url.search || {};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
const error = getAuthError(data);
|
||||
if (error) throw error;
|
||||
|
||||
if (options.state && data.state !== options.state) {
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
throw new TypeError(`Invalid state: ${data.state}`);
|
||||
}
|
||||
|
||||
// Check whether the response code is set.
|
||||
if (!data.code) {
|
||||
throw new TypeError('Missing code, unable to request token');
|
||||
}
|
||||
|
||||
const headers = { ...DEFAULT_HEADERS };
|
||||
const body: CodeFlowBody = {
|
||||
code: data.code,
|
||||
grant_type: 'authorization_code',
|
||||
redirect_uri: options.redirectUri,
|
||||
};
|
||||
|
||||
// `client_id`: REQUIRED, if the client is not authenticating with the
|
||||
// authorization server as described in Section 3.2.1.
|
||||
// Reference: https://tools.ietf.org/html/rfc6749#section-3.2.1
|
||||
if (options.clientSecret) {
|
||||
headers.Authorization = auth(options.clientId, options.clientSecret);
|
||||
} else {
|
||||
body.client_id = options.clientId;
|
||||
}
|
||||
|
||||
const requestOptions = getRequestOptions(
|
||||
{
|
||||
url: options.accessTokenUri,
|
||||
method: 'POST',
|
||||
headers,
|
||||
body,
|
||||
},
|
||||
options,
|
||||
);
|
||||
|
||||
const responseData = await this.client.accessTokenRequest(requestOptions);
|
||||
return this.client.createToken(responseData);
|
||||
}
|
||||
}
|
||||
62
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/constants 2.ts
Executable file
62
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/constants 2.ts
Executable file
@@ -0,0 +1,62 @@
|
||||
import type { Headers } from './types';
|
||||
|
||||
export const DEFAULT_URL_BASE = 'https://example.org/';
|
||||
|
||||
/**
|
||||
* Default headers for executing OAuth 2.0 flows.
|
||||
*/
|
||||
export const DEFAULT_HEADERS: Headers = {
|
||||
Accept: 'application/json, application/x-www-form-urlencoded',
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
};
|
||||
|
||||
/**
|
||||
* Format error response types to regular strings for displaying to clients.
|
||||
*
|
||||
* Reference: http://tools.ietf.org/html/rfc6749#section-4.1.2.1
|
||||
*/
|
||||
export const ERROR_RESPONSES: Record<string, string> = {
|
||||
invalid_request: [
|
||||
'The request is missing a required parameter, includes an',
|
||||
'invalid parameter value, includes a parameter more than',
|
||||
'once, or is otherwise malformed.',
|
||||
].join(' '),
|
||||
invalid_client: [
|
||||
'Client authentication failed (e.g., unknown client, no',
|
||||
'client authentication included, or unsupported',
|
||||
'authentication method).',
|
||||
].join(' '),
|
||||
invalid_grant: [
|
||||
'The provided authorization grant (e.g., authorization',
|
||||
'code, resource owner credentials) or refresh token is',
|
||||
'invalid, expired, revoked, does not match the redirection',
|
||||
'URI used in the authorization request, or was issued to',
|
||||
'another client.',
|
||||
].join(' '),
|
||||
unauthorized_client: [
|
||||
'The client is not authorized to request an authorization',
|
||||
'code using this method.',
|
||||
].join(' '),
|
||||
unsupported_grant_type: [
|
||||
'The authorization grant type is not supported by the',
|
||||
'authorization server.',
|
||||
].join(' '),
|
||||
access_denied: ['The resource owner or authorization server denied the request.'].join(' '),
|
||||
unsupported_response_type: [
|
||||
'The authorization server does not support obtaining',
|
||||
'an authorization code using this method.',
|
||||
].join(' '),
|
||||
invalid_scope: ['The requested scope is invalid, unknown, or malformed.'].join(' '),
|
||||
server_error: [
|
||||
'The authorization server encountered an unexpected',
|
||||
'condition that prevented it from fulfilling the request.',
|
||||
'(This error code is needed because a 500 Internal Server',
|
||||
'Error HTTP status code cannot be returned to the client',
|
||||
'via an HTTP redirect.)',
|
||||
].join(' '),
|
||||
temporarily_unavailable: [
|
||||
'The authorization server is currently unable to handle',
|
||||
'the request due to a temporary overloading or maintenance',
|
||||
'of the server.',
|
||||
].join(' '),
|
||||
};
|
||||
62
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/credentials-flow 2.ts
Executable file
62
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/credentials-flow 2.ts
Executable file
@@ -0,0 +1,62 @@
|
||||
import type { ClientOAuth2 } from './client-oauth2';
|
||||
import type { ClientOAuth2Token } from './client-oauth2-token';
|
||||
import { DEFAULT_HEADERS } from './constants';
|
||||
import type { Headers } from './types';
|
||||
import { auth, expects, getRequestOptions } from './utils';
|
||||
|
||||
interface CredentialsFlowBody {
|
||||
client_id?: string;
|
||||
client_secret?: string;
|
||||
grant_type: 'client_credentials';
|
||||
scope?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Support client credentials OAuth 2.0 grant.
|
||||
*
|
||||
* Reference: http://tools.ietf.org/html/rfc6749#section-4.4
|
||||
*/
|
||||
export class CredentialsFlow {
|
||||
constructor(private client: ClientOAuth2) {}
|
||||
|
||||
/**
|
||||
* Request an access token using the client credentials.
|
||||
*/
|
||||
async getToken(): Promise<ClientOAuth2Token> {
|
||||
const options = { ...this.client.options };
|
||||
expects(options, 'clientId', 'clientSecret', 'accessTokenUri');
|
||||
|
||||
const headers: Headers = { ...DEFAULT_HEADERS };
|
||||
const body: CredentialsFlowBody = {
|
||||
grant_type: 'client_credentials',
|
||||
...(options.additionalBodyProperties ?? {}),
|
||||
};
|
||||
|
||||
if (options.scopes !== undefined) {
|
||||
body.scope = options.scopes.join(options.scopesSeparator ?? ' ');
|
||||
}
|
||||
|
||||
const clientId = options.clientId;
|
||||
const clientSecret = options.clientSecret;
|
||||
|
||||
if (options.authentication === 'body') {
|
||||
body.client_id = clientId;
|
||||
body.client_secret = clientSecret;
|
||||
} else {
|
||||
headers.Authorization = auth(clientId, clientSecret);
|
||||
}
|
||||
|
||||
const requestOptions = getRequestOptions(
|
||||
{
|
||||
url: options.accessTokenUri,
|
||||
method: 'POST',
|
||||
headers,
|
||||
body,
|
||||
},
|
||||
options,
|
||||
);
|
||||
|
||||
const responseData = await this.client.accessTokenRequest(requestOptions);
|
||||
return this.client.createToken(responseData);
|
||||
}
|
||||
}
|
||||
3
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/index 2.ts
Executable file
3
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/index 2.ts
Executable file
@@ -0,0 +1,3 @@
|
||||
export { ClientOAuth2, ClientOAuth2Options, ClientOAuth2RequestObject } from './client-oauth2';
|
||||
export { ClientOAuth2Token, ClientOAuth2TokenData } from './client-oauth2-token';
|
||||
export type * from './types';
|
||||
31
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/types 2.ts
Executable file
31
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/types 2.ts
Executable file
@@ -0,0 +1,31 @@
|
||||
export type Headers = Record<string, string | string[]>;
|
||||
|
||||
export type OAuth2GrantType = 'pkce' | 'authorizationCode' | 'clientCredentials';
|
||||
|
||||
export interface OAuth2CredentialData {
|
||||
clientId: string;
|
||||
clientSecret?: string;
|
||||
accessTokenUrl: string;
|
||||
authentication?: 'header' | 'body';
|
||||
authUrl?: string;
|
||||
scope?: string;
|
||||
authQueryParameters?: string;
|
||||
additionalBodyProperties?: string;
|
||||
grantType: OAuth2GrantType;
|
||||
ignoreSSLIssues?: boolean;
|
||||
oauthTokenData?: {
|
||||
access_token: string;
|
||||
refresh_token?: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* The response from the OAuth2 server when the access token is not successfully
|
||||
* retrieved. As specified in RFC 6749 Section 5.2:
|
||||
* https://www.rfc-editor.org/rfc/rfc6749.html#section-5.2
|
||||
*/
|
||||
export interface OAuth2AccessTokenErrorResponse extends Record<string, unknown> {
|
||||
error: string;
|
||||
error_description?: string;
|
||||
error_uri?: string;
|
||||
}
|
||||
82
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/utils 2.ts
Executable file
82
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/src/utils 2.ts
Executable file
@@ -0,0 +1,82 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
import type { ClientOAuth2Options, ClientOAuth2RequestObject } from './client-oauth2';
|
||||
import { ERROR_RESPONSES } from './constants';
|
||||
|
||||
/**
|
||||
* Check if properties exist on an object and throw when they aren't.
|
||||
*/
|
||||
export function expects<Keys extends keyof ClientOAuth2Options>(
|
||||
obj: ClientOAuth2Options,
|
||||
...keys: Keys[]
|
||||
): asserts obj is ClientOAuth2Options & {
|
||||
[K in Keys]: NonNullable<ClientOAuth2Options[K]>;
|
||||
} {
|
||||
for (const key of keys) {
|
||||
if (obj[key] === null || obj[key] === undefined) {
|
||||
throw new TypeError('Expected "' + key + '" to exist');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class AuthError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
readonly body: any,
|
||||
readonly code = 'EAUTH',
|
||||
) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull an authentication error from the response data.
|
||||
*/
|
||||
export function getAuthError(body: {
|
||||
error: string;
|
||||
error_description?: string;
|
||||
}): Error | undefined {
|
||||
const message: string | undefined =
|
||||
ERROR_RESPONSES[body.error] ?? body.error_description ?? body.error;
|
||||
|
||||
if (message) {
|
||||
return new AuthError(message, body);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure a value is a string.
|
||||
*/
|
||||
function toString(str: string | null | undefined) {
|
||||
return str === null ? '' : String(str);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create basic auth header.
|
||||
*/
|
||||
export function auth(username: string, password: string): string {
|
||||
return 'Basic ' + Buffer.from(toString(username) + ':' + toString(password)).toString('base64');
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge request options from an options object.
|
||||
*/
|
||||
export function getRequestOptions(
|
||||
{ url, method, body, query, headers }: ClientOAuth2RequestObject,
|
||||
options: ClientOAuth2Options,
|
||||
): ClientOAuth2RequestObject {
|
||||
const rOptions = {
|
||||
url,
|
||||
method,
|
||||
body: { ...body, ...options.body },
|
||||
query: { ...query, ...options.query },
|
||||
headers: headers ?? {},
|
||||
ignoreSSLIssues: options.ignoreSSLIssues,
|
||||
};
|
||||
// if request authorization was overridden delete it from header
|
||||
if (rOptions.headers.Authorization === '') {
|
||||
delete rOptions.headers.Authorization;
|
||||
}
|
||||
return rOptions;
|
||||
}
|
||||
168
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/test/client-oauth2.test 2.ts
Executable file
168
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/test/client-oauth2.test 2.ts
Executable file
@@ -0,0 +1,168 @@
|
||||
import axios from 'axios';
|
||||
import nock from 'nock';
|
||||
|
||||
import { ClientOAuth2, ResponseError } from '@/client-oauth2';
|
||||
import { ERROR_RESPONSES } from '@/constants';
|
||||
import { auth, AuthError } from '@/utils';
|
||||
|
||||
import * as config from './config';
|
||||
|
||||
describe('ClientOAuth2', () => {
|
||||
const client = new ClientOAuth2({
|
||||
clientId: config.clientId,
|
||||
clientSecret: config.clientSecret,
|
||||
accessTokenUri: config.accessTokenUri,
|
||||
authentication: 'header',
|
||||
});
|
||||
|
||||
beforeAll(async () => {
|
||||
nock.disableNetConnect();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
nock.restore();
|
||||
});
|
||||
|
||||
describe('accessTokenRequest', () => {
|
||||
const authHeader = auth(config.clientId, config.clientSecret);
|
||||
|
||||
const makeTokenCall = async () =>
|
||||
await client.accessTokenRequest({
|
||||
url: config.accessTokenUri,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: authHeader,
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
body: {
|
||||
refresh_token: 'test',
|
||||
grant_type: 'refresh_token',
|
||||
},
|
||||
});
|
||||
|
||||
const mockTokenResponse = ({
|
||||
status = 200,
|
||||
headers,
|
||||
body,
|
||||
}: {
|
||||
status: number;
|
||||
body: string;
|
||||
headers: Record<string, string>;
|
||||
}) =>
|
||||
nock(config.baseUrl).post('/login/oauth/access_token').once().reply(status, body, headers);
|
||||
|
||||
it('should send the correct request based on given options', async () => {
|
||||
mockTokenResponse({
|
||||
status: 200,
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
access_token: config.accessToken,
|
||||
refresh_token: config.refreshToken,
|
||||
}),
|
||||
});
|
||||
|
||||
const axiosSpy = jest.spyOn(axios, 'request');
|
||||
|
||||
await makeTokenCall();
|
||||
|
||||
expect(axiosSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
url: config.accessTokenUri,
|
||||
method: 'POST',
|
||||
data: 'refresh_token=test&grant_type=refresh_token',
|
||||
headers: {
|
||||
Authorization: authHeader,
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test.each([
|
||||
{
|
||||
contentType: 'application/json',
|
||||
body: JSON.stringify({
|
||||
access_token: config.accessToken,
|
||||
refresh_token: config.refreshToken,
|
||||
}),
|
||||
},
|
||||
{
|
||||
contentType: 'application/json; charset=utf-8',
|
||||
body: JSON.stringify({
|
||||
access_token: config.accessToken,
|
||||
refresh_token: config.refreshToken,
|
||||
}),
|
||||
},
|
||||
{
|
||||
contentType: 'application/x-www-form-urlencoded',
|
||||
body: `access_token=${config.accessToken}&refresh_token=${config.refreshToken}`,
|
||||
},
|
||||
])('should parse response with content type $contentType', async ({ contentType, body }) => {
|
||||
mockTokenResponse({
|
||||
status: 200,
|
||||
headers: { 'Content-Type': contentType },
|
||||
body,
|
||||
});
|
||||
|
||||
const response = await makeTokenCall();
|
||||
|
||||
expect(response).toEqual({
|
||||
access_token: config.accessToken,
|
||||
refresh_token: config.refreshToken,
|
||||
});
|
||||
});
|
||||
|
||||
test.each([
|
||||
{
|
||||
contentType: 'text/html',
|
||||
body: '<html><body>Hello, world!</body></html>',
|
||||
},
|
||||
{
|
||||
contentType: 'application/xml',
|
||||
body: '<xml><body>Hello, world!</body></xml>',
|
||||
},
|
||||
{
|
||||
contentType: 'text/plain',
|
||||
body: 'Hello, world!',
|
||||
},
|
||||
])('should reject content type $contentType', async ({ contentType, body }) => {
|
||||
mockTokenResponse({
|
||||
status: 200,
|
||||
headers: { 'Content-Type': contentType },
|
||||
body,
|
||||
});
|
||||
|
||||
const result = await makeTokenCall().catch((err) => err);
|
||||
expect(result).toBeInstanceOf(Error);
|
||||
expect(result.message).toEqual(`Unsupported content type: ${contentType}`);
|
||||
});
|
||||
|
||||
it('should reject 4xx responses with auth errors', async () => {
|
||||
mockTokenResponse({
|
||||
status: 401,
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ error: 'access_denied' }),
|
||||
});
|
||||
|
||||
const result = await makeTokenCall().catch((err) => err);
|
||||
expect(result).toBeInstanceOf(AuthError);
|
||||
expect(result.message).toEqual(ERROR_RESPONSES.access_denied);
|
||||
expect(result.body).toEqual({ error: 'access_denied' });
|
||||
});
|
||||
|
||||
it('should reject 3xx responses with response errors', async () => {
|
||||
mockTokenResponse({
|
||||
status: 302,
|
||||
headers: {},
|
||||
body: 'Redirected',
|
||||
});
|
||||
|
||||
const result = await makeTokenCall().catch((err) => err);
|
||||
expect(result).toBeInstanceOf(ResponseError);
|
||||
expect(result.message).toEqual('HTTP status 302');
|
||||
expect(result.body).toEqual('Redirected');
|
||||
});
|
||||
});
|
||||
});
|
||||
192
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/test/code-flow.test 2.ts
Executable file
192
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/test/code-flow.test 2.ts
Executable file
@@ -0,0 +1,192 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import { ClientOAuth2 } from '@/client-oauth2';
|
||||
import { ClientOAuth2Token } from '@/client-oauth2-token';
|
||||
import { AuthError } from '@/utils';
|
||||
|
||||
import * as config from './config';
|
||||
|
||||
describe('CodeFlow', () => {
|
||||
beforeAll(async () => {
|
||||
nock.disableNetConnect();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
nock.restore();
|
||||
});
|
||||
|
||||
const uri = `/auth/callback?code=${config.code}&state=${config.state}`;
|
||||
|
||||
const githubAuth = new ClientOAuth2({
|
||||
clientId: config.clientId,
|
||||
clientSecret: config.clientSecret,
|
||||
accessTokenUri: config.accessTokenUri,
|
||||
authorizationUri: config.authorizationUri,
|
||||
authorizationGrants: ['code'],
|
||||
redirectUri: config.redirectUri,
|
||||
scopes: ['notifications'],
|
||||
});
|
||||
|
||||
describe('#getUri', () => {
|
||||
it('should return a valid uri', () => {
|
||||
expect(githubAuth.code.getUri()).toEqual(
|
||||
`${config.authorizationUri}?client_id=abc&` +
|
||||
`redirect_uri=${encodeURIComponent(config.redirectUri)}&` +
|
||||
'response_type=code&scope=notifications',
|
||||
);
|
||||
});
|
||||
|
||||
describe('when scopes are undefined', () => {
|
||||
it('should not include scope in the uri', () => {
|
||||
const authWithoutScopes = new ClientOAuth2({
|
||||
clientId: config.clientId,
|
||||
clientSecret: config.clientSecret,
|
||||
accessTokenUri: config.accessTokenUri,
|
||||
authorizationUri: config.authorizationUri,
|
||||
authorizationGrants: ['code'],
|
||||
redirectUri: config.redirectUri,
|
||||
});
|
||||
expect(authWithoutScopes.code.getUri()).toEqual(
|
||||
`${config.authorizationUri}?client_id=abc&` +
|
||||
`redirect_uri=${encodeURIComponent(config.redirectUri)}&` +
|
||||
'response_type=code',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should include empty scopes array as an empty string', () => {
|
||||
const authWithEmptyScopes = new ClientOAuth2({
|
||||
clientId: config.clientId,
|
||||
clientSecret: config.clientSecret,
|
||||
accessTokenUri: config.accessTokenUri,
|
||||
authorizationUri: config.authorizationUri,
|
||||
authorizationGrants: ['code'],
|
||||
redirectUri: config.redirectUri,
|
||||
scopes: [],
|
||||
});
|
||||
expect(authWithEmptyScopes.code.getUri()).toEqual(
|
||||
`${config.authorizationUri}?client_id=abc&` +
|
||||
`redirect_uri=${encodeURIComponent(config.redirectUri)}&` +
|
||||
'response_type=code&scope=',
|
||||
);
|
||||
});
|
||||
|
||||
it('should include empty scopes string as an empty string', () => {
|
||||
const authWithEmptyScopes = new ClientOAuth2({
|
||||
clientId: config.clientId,
|
||||
clientSecret: config.clientSecret,
|
||||
accessTokenUri: config.accessTokenUri,
|
||||
authorizationUri: config.authorizationUri,
|
||||
authorizationGrants: ['code'],
|
||||
redirectUri: config.redirectUri,
|
||||
scopes: [],
|
||||
});
|
||||
expect(authWithEmptyScopes.code.getUri()).toEqual(
|
||||
`${config.authorizationUri}?client_id=abc&` +
|
||||
`redirect_uri=${encodeURIComponent(config.redirectUri)}&` +
|
||||
'response_type=code&scope=',
|
||||
);
|
||||
});
|
||||
|
||||
describe('when authorizationUri contains query parameters', () => {
|
||||
it('should preserve query string parameters', () => {
|
||||
const authWithParams = new ClientOAuth2({
|
||||
clientId: config.clientId,
|
||||
clientSecret: config.clientSecret,
|
||||
accessTokenUri: config.accessTokenUri,
|
||||
authorizationUri: `${config.authorizationUri}?bar=qux`,
|
||||
authorizationGrants: ['code'],
|
||||
redirectUri: config.redirectUri,
|
||||
scopes: ['notifications'],
|
||||
});
|
||||
expect(authWithParams.code.getUri()).toEqual(
|
||||
`${config.authorizationUri}?bar=qux&client_id=abc&` +
|
||||
`redirect_uri=${encodeURIComponent(config.redirectUri)}&` +
|
||||
'response_type=code&scope=notifications',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getToken', () => {
|
||||
const mockTokenCall = () =>
|
||||
nock(config.baseUrl)
|
||||
.post(
|
||||
'/login/oauth/access_token',
|
||||
({ code, grant_type, redirect_uri }) =>
|
||||
code === config.code &&
|
||||
grant_type === 'authorization_code' &&
|
||||
redirect_uri === config.redirectUri,
|
||||
)
|
||||
.once()
|
||||
.reply(200, {
|
||||
access_token: config.accessToken,
|
||||
refresh_token: config.refreshToken,
|
||||
});
|
||||
|
||||
it('should request the token', async () => {
|
||||
mockTokenCall();
|
||||
const user = await githubAuth.code.getToken(uri);
|
||||
|
||||
expect(user).toBeInstanceOf(ClientOAuth2Token);
|
||||
expect(user.accessToken).toEqual(config.accessToken);
|
||||
expect(user.tokenType).toEqual('bearer');
|
||||
});
|
||||
|
||||
it('should reject with auth errors', async () => {
|
||||
let errored = false;
|
||||
|
||||
try {
|
||||
await githubAuth.code.getToken(`${config.redirectUri}?error=invalid_request`);
|
||||
} catch (err) {
|
||||
errored = true;
|
||||
expect(err).toBeInstanceOf(AuthError);
|
||||
if (err instanceof AuthError) {
|
||||
expect(err.code).toEqual('EAUTH');
|
||||
expect(err.body.error).toEqual('invalid_request');
|
||||
}
|
||||
}
|
||||
expect(errored).toEqual(true);
|
||||
});
|
||||
|
||||
describe('#sign', () => {
|
||||
it('should be able to sign a standard request object', async () => {
|
||||
mockTokenCall();
|
||||
const token = await githubAuth.code.getToken(uri);
|
||||
const requestOptions = token.sign({
|
||||
method: 'GET',
|
||||
url: 'http://api.github.com/user',
|
||||
});
|
||||
expect(requestOptions.headers?.Authorization).toEqual(`Bearer ${config.accessToken}`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#refresh', () => {
|
||||
const mockRefreshCall = () =>
|
||||
nock(config.baseUrl)
|
||||
.post(
|
||||
'/login/oauth/access_token',
|
||||
({ refresh_token, grant_type }) =>
|
||||
refresh_token === config.refreshToken && grant_type === 'refresh_token',
|
||||
)
|
||||
.once()
|
||||
.reply(200, {
|
||||
access_token: config.refreshedAccessToken,
|
||||
refresh_token: config.refreshedRefreshToken,
|
||||
});
|
||||
|
||||
it('should make a request to get a new access token', async () => {
|
||||
mockTokenCall();
|
||||
const token = await githubAuth.code.getToken(uri, { state: config.state });
|
||||
expect(token.refreshToken).toEqual(config.refreshToken);
|
||||
|
||||
mockRefreshCall();
|
||||
const token1 = await token.refresh();
|
||||
expect(token1).toBeInstanceOf(ClientOAuth2Token);
|
||||
expect(token1.accessToken).toEqual(config.refreshedAccessToken);
|
||||
expect(token1.refreshToken).toEqual(config.refreshedRefreshToken);
|
||||
expect(token1.tokenType).toEqual('bearer');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
15
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/test/config 2.ts
Executable file
15
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/test/config 2.ts
Executable file
@@ -0,0 +1,15 @@
|
||||
export const baseUrl = 'https://mock.auth.service';
|
||||
export const accessTokenUri = baseUrl + '/login/oauth/access_token';
|
||||
export const authorizationUri = baseUrl + '/login/oauth/authorize';
|
||||
export const redirectUri = 'http://example.com/auth/callback';
|
||||
|
||||
export const accessToken = '4430eb1615fb6127cbf828a8e403';
|
||||
export const refreshToken = 'def456token';
|
||||
export const refreshedAccessToken = 'f456okeendt';
|
||||
export const refreshedRefreshToken = 'f4f6577c0f3af456okeendt';
|
||||
|
||||
export const clientId = 'abc';
|
||||
export const clientSecret = '123';
|
||||
|
||||
export const code = 'fbe55d970377e0686746';
|
||||
export const state = '7076840850058943';
|
||||
215
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/test/credentials-flow.test 2.ts
Executable file
215
n8n-n8n-1.109.2/packages/@n8n/client-oauth2/test/credentials-flow.test 2.ts
Executable file
@@ -0,0 +1,215 @@
|
||||
import nock from 'nock';
|
||||
|
||||
import { ClientOAuth2, type ClientOAuth2Options } from '@/client-oauth2';
|
||||
import { ClientOAuth2Token } from '@/client-oauth2-token';
|
||||
import type { Headers } from '@/types';
|
||||
|
||||
import * as config from './config';
|
||||
|
||||
describe('CredentialsFlow', () => {
|
||||
beforeAll(async () => {
|
||||
nock.disableNetConnect();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
nock.restore();
|
||||
});
|
||||
|
||||
beforeEach(() => jest.clearAllMocks());
|
||||
|
||||
describe('#getToken', () => {
|
||||
const createAuthClient = ({
|
||||
scopes,
|
||||
authentication,
|
||||
}: Pick<ClientOAuth2Options, 'scopes' | 'authentication'> = {}) =>
|
||||
new ClientOAuth2({
|
||||
clientId: config.clientId,
|
||||
clientSecret: config.clientSecret,
|
||||
accessTokenUri: config.accessTokenUri,
|
||||
authentication,
|
||||
authorizationGrants: ['credentials'],
|
||||
scopes,
|
||||
});
|
||||
|
||||
const mockTokenCall = async ({ requestedScope }: { requestedScope?: string } = {}) => {
|
||||
const nockScope = nock(config.baseUrl)
|
||||
.post(
|
||||
'/login/oauth/access_token',
|
||||
({ scope, grant_type }) =>
|
||||
scope === requestedScope && grant_type === 'client_credentials',
|
||||
)
|
||||
.once()
|
||||
.reply(200, {
|
||||
access_token: config.accessToken,
|
||||
refresh_token: config.refreshToken,
|
||||
scope: requestedScope,
|
||||
});
|
||||
return await new Promise<{ headers: Headers; body: unknown }>((resolve) => {
|
||||
nockScope.once('request', (req) => {
|
||||
resolve({
|
||||
headers: req.headers,
|
||||
body: req.requestBodyBuffers.toString('utf-8'),
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
it('should request the token', async () => {
|
||||
const authClient = createAuthClient({ scopes: ['notifications'] });
|
||||
const requestPromise = mockTokenCall({ requestedScope: 'notifications' });
|
||||
|
||||
const user = await authClient.credentials.getToken();
|
||||
|
||||
expect(user).toBeInstanceOf(ClientOAuth2Token);
|
||||
expect(user.accessToken).toEqual(config.accessToken);
|
||||
expect(user.tokenType).toEqual('bearer');
|
||||
expect(user.data.scope).toEqual('notifications');
|
||||
|
||||
const { headers, body } = await requestPromise;
|
||||
expect(headers.authorization).toBe('Basic YWJjOjEyMw==');
|
||||
expect(body).toEqual('grant_type=client_credentials&scope=notifications');
|
||||
});
|
||||
|
||||
it('when scopes are undefined, it should not send scopes to an auth server', async () => {
|
||||
const authClient = createAuthClient();
|
||||
const requestPromise = mockTokenCall();
|
||||
|
||||
const user = await authClient.credentials.getToken();
|
||||
expect(user).toBeInstanceOf(ClientOAuth2Token);
|
||||
expect(user.accessToken).toEqual(config.accessToken);
|
||||
expect(user.tokenType).toEqual('bearer');
|
||||
expect(user.data.scope).toEqual(undefined);
|
||||
|
||||
const { body } = await requestPromise;
|
||||
expect(body).toEqual('grant_type=client_credentials');
|
||||
});
|
||||
|
||||
it('when scopes is an empty array, it should send empty scope string to an auth server', async () => {
|
||||
const authClient = createAuthClient({ scopes: [] });
|
||||
const requestPromise = mockTokenCall({ requestedScope: '' });
|
||||
|
||||
const user = await authClient.credentials.getToken();
|
||||
expect(user).toBeInstanceOf(ClientOAuth2Token);
|
||||
expect(user.accessToken).toEqual(config.accessToken);
|
||||
expect(user.tokenType).toEqual('bearer');
|
||||
expect(user.data.scope).toEqual('');
|
||||
|
||||
const { body } = await requestPromise;
|
||||
expect(body).toEqual('grant_type=client_credentials&scope=');
|
||||
});
|
||||
|
||||
it('should handle authentication = "header"', async () => {
|
||||
const authClient = createAuthClient({ scopes: [] });
|
||||
const requestPromise = mockTokenCall({ requestedScope: '' });
|
||||
await authClient.credentials.getToken();
|
||||
const { headers, body } = await requestPromise;
|
||||
expect(headers?.authorization).toBe('Basic YWJjOjEyMw==');
|
||||
expect(body).toEqual('grant_type=client_credentials&scope=');
|
||||
});
|
||||
|
||||
it('should handle authentication = "body"', async () => {
|
||||
const authClient = createAuthClient({ scopes: [], authentication: 'body' });
|
||||
const requestPromise = mockTokenCall({ requestedScope: '' });
|
||||
await authClient.credentials.getToken();
|
||||
const { headers, body } = await requestPromise;
|
||||
expect(headers?.authorization).toBe(undefined);
|
||||
expect(body).toEqual('grant_type=client_credentials&scope=&client_id=abc&client_secret=123');
|
||||
});
|
||||
|
||||
describe('#sign', () => {
|
||||
it('should be able to sign a standard request object', async () => {
|
||||
const authClient = createAuthClient({ scopes: ['notifications'] });
|
||||
void mockTokenCall({ requestedScope: 'notifications' });
|
||||
|
||||
const token = await authClient.credentials.getToken();
|
||||
const requestOptions = token.sign({
|
||||
method: 'GET',
|
||||
url: `${config.baseUrl}/test`,
|
||||
});
|
||||
|
||||
expect(requestOptions.headers?.Authorization).toEqual(`Bearer ${config.accessToken}`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#refresh', () => {
|
||||
const mockRefreshCall = async () => {
|
||||
const nockScope = nock(config.baseUrl)
|
||||
.post(
|
||||
'/login/oauth/access_token',
|
||||
({ refresh_token, grant_type }) =>
|
||||
refresh_token === config.refreshToken && grant_type === 'refresh_token',
|
||||
)
|
||||
.once()
|
||||
.reply(200, {
|
||||
access_token: config.refreshedAccessToken,
|
||||
refresh_token: config.refreshedRefreshToken,
|
||||
});
|
||||
return await new Promise<{ headers: Headers; body: unknown }>((resolve) => {
|
||||
nockScope.once('request', (req) => {
|
||||
resolve({
|
||||
headers: req.headers,
|
||||
body: req.requestBodyBuffers.toString('utf-8'),
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
it('should make a request to get a new access token', async () => {
|
||||
const authClient = createAuthClient({ scopes: ['notifications'] });
|
||||
void mockTokenCall({ requestedScope: 'notifications' });
|
||||
|
||||
const token = await authClient.credentials.getToken();
|
||||
expect(token.accessToken).toEqual(config.accessToken);
|
||||
|
||||
const requestPromise = mockRefreshCall();
|
||||
const token1 = await token.refresh();
|
||||
await requestPromise;
|
||||
|
||||
expect(token1).toBeInstanceOf(ClientOAuth2Token);
|
||||
expect(token1.accessToken).toEqual(config.refreshedAccessToken);
|
||||
expect(token1.tokenType).toEqual('bearer');
|
||||
});
|
||||
|
||||
it('should make a request to get a new access token with authentication = "body"', async () => {
|
||||
const authClient = createAuthClient({ scopes: ['notifications'], authentication: 'body' });
|
||||
void mockTokenCall({ requestedScope: 'notifications' });
|
||||
|
||||
const token = await authClient.credentials.getToken();
|
||||
expect(token.accessToken).toEqual(config.accessToken);
|
||||
|
||||
const requestPromise = mockRefreshCall();
|
||||
const token1 = await token.refresh();
|
||||
const { headers, body } = await requestPromise;
|
||||
|
||||
expect(token1).toBeInstanceOf(ClientOAuth2Token);
|
||||
expect(token1.accessToken).toEqual(config.refreshedAccessToken);
|
||||
expect(token1.tokenType).toEqual('bearer');
|
||||
expect(headers?.authorization).toBe(undefined);
|
||||
expect(body).toEqual(
|
||||
'refresh_token=def456token&grant_type=refresh_token&client_id=abc&client_secret=123',
|
||||
);
|
||||
});
|
||||
|
||||
it('should make a request to get a new access token with authentication = "header"', async () => {
|
||||
const authClient = createAuthClient({
|
||||
scopes: ['notifications'],
|
||||
authentication: 'header',
|
||||
});
|
||||
void mockTokenCall({ requestedScope: 'notifications' });
|
||||
|
||||
const token = await authClient.credentials.getToken();
|
||||
expect(token.accessToken).toEqual(config.accessToken);
|
||||
|
||||
const requestPromise = mockRefreshCall();
|
||||
const token1 = await token.refresh();
|
||||
const { headers, body } = await requestPromise;
|
||||
|
||||
expect(token1).toBeInstanceOf(ClientOAuth2Token);
|
||||
expect(token1.accessToken).toEqual(config.refreshedAccessToken);
|
||||
expect(token1.tokenType).toEqual('bearer');
|
||||
expect(headers?.authorization).toBe('Basic YWJjOjEyMw==');
|
||||
expect(body).toEqual('refresh_token=def456token&grant_type=refresh_token');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
1
n8n-n8n-1.109.2/packages/@n8n/codemirror-lang/src/index 2.ts
Executable file
1
n8n-n8n-1.109.2/packages/@n8n/codemirror-lang/src/index 2.ts
Executable file
@@ -0,0 +1 @@
|
||||
export { parserWithMetaData, n8nLanguage } from './expressions';
|
||||
19
n8n-n8n-1.109.2/packages/@n8n/config/src/custom-types 2.ts
Executable file
19
n8n-n8n-1.109.2/packages/@n8n/config/src/custom-types 2.ts
Executable file
@@ -0,0 +1,19 @@
|
||||
abstract class StringArray<T extends string> extends Array<T> {
|
||||
constructor(str: string, delimiter: string) {
|
||||
super();
|
||||
const parsed = str.split(delimiter) as this;
|
||||
return parsed.filter((i) => typeof i === 'string' && i.length);
|
||||
}
|
||||
}
|
||||
|
||||
export class CommaSeparatedStringArray<T extends string> extends StringArray<T> {
|
||||
constructor(str: string) {
|
||||
super(str, ',');
|
||||
}
|
||||
}
|
||||
|
||||
export class ColonSeparatedStringArray<T extends string = string> extends StringArray<T> {
|
||||
constructor(str: string) {
|
||||
super(str, ':');
|
||||
}
|
||||
}
|
||||
118
n8n-n8n-1.109.2/packages/@n8n/config/src/decorators 2.ts
Executable file
118
n8n-n8n-1.109.2/packages/@n8n/config/src/decorators 2.ts
Executable file
@@ -0,0 +1,118 @@
|
||||
import 'reflect-metadata';
|
||||
import { Container, Service } from '@n8n/di';
|
||||
import { readFileSync } from 'fs';
|
||||
import { z } from 'zod';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-restricted-types
|
||||
type Class = Function;
|
||||
type Constructable<T = unknown> = new (rawValue: string) => T;
|
||||
type PropertyKey = string | symbol;
|
||||
type PropertyType = number | boolean | string | Class;
|
||||
interface PropertyMetadata {
|
||||
type: PropertyType;
|
||||
envName?: string;
|
||||
schema?: z.ZodType<unknown>;
|
||||
}
|
||||
|
||||
const globalMetadata = new Map<Class, Map<PropertyKey, PropertyMetadata>>();
|
||||
|
||||
const readEnv = (envName: string) => {
|
||||
if (envName in process.env) return process.env[envName];
|
||||
|
||||
// Read the value from a file, if "_FILE" environment variable is defined
|
||||
const filePath = process.env[`${envName}_FILE`];
|
||||
if (filePath) return readFileSync(filePath, 'utf8');
|
||||
|
||||
return undefined;
|
||||
};
|
||||
|
||||
export const Config: ClassDecorator = (ConfigClass: Class) => {
|
||||
const factory = function (...args: unknown[]) {
|
||||
const config = new (ConfigClass as new (...a: unknown[]) => Record<PropertyKey, unknown>)(
|
||||
...args,
|
||||
);
|
||||
const classMetadata = globalMetadata.get(ConfigClass);
|
||||
if (!classMetadata) {
|
||||
throw new Error('Invalid config class: ' + ConfigClass.name);
|
||||
}
|
||||
|
||||
for (const [key, { type, envName, schema }] of classMetadata) {
|
||||
if (typeof type === 'function' && globalMetadata.has(type)) {
|
||||
config[key] = Container.get(type as Constructable);
|
||||
} else if (envName) {
|
||||
const value = readEnv(envName);
|
||||
if (value === undefined) continue;
|
||||
|
||||
if (schema) {
|
||||
const result = schema.safeParse(value);
|
||||
if (result.error) {
|
||||
console.warn(
|
||||
`Invalid value for ${envName} - ${result.error.issues[0].message}. Falling back to default value.`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
config[key] = result.data;
|
||||
} else if (type === Number) {
|
||||
const parsed = Number(value);
|
||||
if (isNaN(parsed)) {
|
||||
console.warn(`Invalid number value for ${envName}: ${value}`);
|
||||
} else {
|
||||
config[key] = parsed;
|
||||
}
|
||||
} else if (type === Boolean) {
|
||||
if (['true', '1'].includes(value.toLowerCase())) {
|
||||
config[key] = true;
|
||||
} else if (['false', '0'].includes(value.toLowerCase())) {
|
||||
config[key] = false;
|
||||
} else {
|
||||
console.warn(`Invalid boolean value for ${envName}: ${value}`);
|
||||
}
|
||||
} else if (type === Date) {
|
||||
const timestamp = Date.parse(value);
|
||||
if (isNaN(timestamp)) {
|
||||
console.warn(`Invalid timestamp value for ${envName}: ${value}`);
|
||||
} else {
|
||||
config[key] = new Date(timestamp);
|
||||
}
|
||||
} else if (type === String) {
|
||||
config[key] = value.trim().replace(/^(['"])(.*)\1$/, '$2');
|
||||
} else {
|
||||
config[key] = new (type as Constructable)(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof config.sanitize === 'function') config.sanitize();
|
||||
|
||||
return config;
|
||||
};
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
||||
return Service({ factory })(ConfigClass);
|
||||
};
|
||||
|
||||
export const Nested: PropertyDecorator = (target: object, key: PropertyKey) => {
|
||||
const ConfigClass = target.constructor;
|
||||
const classMetadata = globalMetadata.get(ConfigClass) ?? new Map<PropertyKey, PropertyMetadata>();
|
||||
const type = Reflect.getMetadata('design:type', target, key) as PropertyType;
|
||||
classMetadata.set(key, { type });
|
||||
globalMetadata.set(ConfigClass, classMetadata);
|
||||
};
|
||||
|
||||
export const Env =
|
||||
(envName: string, schema?: PropertyMetadata['schema']): PropertyDecorator =>
|
||||
(target: object, key: PropertyKey) => {
|
||||
const ConfigClass = target.constructor;
|
||||
const classMetadata =
|
||||
globalMetadata.get(ConfigClass) ?? new Map<PropertyKey, PropertyMetadata>();
|
||||
|
||||
const type = Reflect.getMetadata('design:type', target, key) as PropertyType;
|
||||
const isZodSchema = schema instanceof z.ZodType;
|
||||
if (type === Object && !isZodSchema) {
|
||||
throw new Error(
|
||||
`Invalid decorator metadata on key "${key as string}" on ${ConfigClass.name}\n Please use explicit typing on all config fields`,
|
||||
);
|
||||
}
|
||||
|
||||
classMetadata.set(key, { type, envName, schema });
|
||||
globalMetadata.set(ConfigClass, classMetadata);
|
||||
};
|
||||
211
n8n-n8n-1.109.2/packages/@n8n/config/src/index 2.ts
Executable file
211
n8n-n8n-1.109.2/packages/@n8n/config/src/index 2.ts
Executable file
@@ -0,0 +1,211 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
import { AiAssistantConfig } from './configs/ai-assistant.config';
|
||||
import { AiConfig } from './configs/ai.config';
|
||||
import { AuthConfig } from './configs/auth.config';
|
||||
import { CacheConfig } from './configs/cache.config';
|
||||
import { CredentialsConfig } from './configs/credentials.config';
|
||||
import { DatabaseConfig } from './configs/database.config';
|
||||
import { DeploymentConfig } from './configs/deployment.config';
|
||||
import { DiagnosticsConfig } from './configs/diagnostics.config';
|
||||
import { EndpointsConfig } from './configs/endpoints.config';
|
||||
import { EventBusConfig } from './configs/event-bus.config';
|
||||
import { ExecutionsConfig } from './configs/executions.config';
|
||||
import { ExternalHooksConfig } from './configs/external-hooks.config';
|
||||
import { GenericConfig } from './configs/generic.config';
|
||||
import { HiringBannerConfig } from './configs/hiring-banner.config';
|
||||
import { LicenseConfig } from './configs/license.config';
|
||||
import { LoggingConfig } from './configs/logging.config';
|
||||
import { MfaConfig } from './configs/mfa.config';
|
||||
import { MultiMainSetupConfig } from './configs/multi-main-setup.config';
|
||||
import { NodesConfig } from './configs/nodes.config';
|
||||
import { PartialExecutionsConfig } from './configs/partial-executions.config';
|
||||
import { PersonalizationConfig } from './configs/personalization.config';
|
||||
import { PublicApiConfig } from './configs/public-api.config';
|
||||
import { RedisConfig } from './configs/redis.config';
|
||||
import { TaskRunnersConfig } from './configs/runners.config';
|
||||
import { ScalingModeConfig } from './configs/scaling-mode.config';
|
||||
import { SecurityConfig } from './configs/security.config';
|
||||
import { SentryConfig } from './configs/sentry.config';
|
||||
import { SsoConfig } from './configs/sso.config';
|
||||
import { TagsConfig } from './configs/tags.config';
|
||||
import { TemplatesConfig } from './configs/templates.config';
|
||||
import { UserManagementConfig } from './configs/user-management.config';
|
||||
import { VersionNotificationsConfig } from './configs/version-notifications.config';
|
||||
import { WorkflowHistoryConfig } from './configs/workflow-history.config';
|
||||
import { WorkflowsConfig } from './configs/workflows.config';
|
||||
import { Config, Env, Nested } from './decorators';
|
||||
|
||||
export { Config, Env, Nested } from './decorators';
|
||||
export { DatabaseConfig } from './configs/database.config';
|
||||
export { InstanceSettingsConfig } from './configs/instance-settings-config';
|
||||
export { TaskRunnersConfig } from './configs/runners.config';
|
||||
export { SecurityConfig } from './configs/security.config';
|
||||
export { ExecutionsConfig } from './configs/executions.config';
|
||||
export { LOG_SCOPES } from './configs/logging.config';
|
||||
export type { LogScope } from './configs/logging.config';
|
||||
export { WorkflowsConfig } from './configs/workflows.config';
|
||||
export * from './custom-types';
|
||||
export { DeploymentConfig } from './configs/deployment.config';
|
||||
export { MfaConfig } from './configs/mfa.config';
|
||||
export { HiringBannerConfig } from './configs/hiring-banner.config';
|
||||
export { PersonalizationConfig } from './configs/personalization.config';
|
||||
export { NodesConfig } from './configs/nodes.config';
|
||||
export { CronLoggingConfig } from './configs/logging.config';
|
||||
|
||||
const protocolSchema = z.enum(['http', 'https']);
|
||||
|
||||
export type Protocol = z.infer<typeof protocolSchema>;
|
||||
|
||||
@Config
|
||||
export class GlobalConfig {
|
||||
@Nested
|
||||
auth: AuthConfig;
|
||||
|
||||
@Nested
|
||||
database: DatabaseConfig;
|
||||
|
||||
@Nested
|
||||
credentials: CredentialsConfig;
|
||||
|
||||
@Nested
|
||||
userManagement: UserManagementConfig;
|
||||
|
||||
@Nested
|
||||
versionNotifications: VersionNotificationsConfig;
|
||||
|
||||
@Nested
|
||||
publicApi: PublicApiConfig;
|
||||
|
||||
@Nested
|
||||
externalHooks: ExternalHooksConfig;
|
||||
|
||||
@Nested
|
||||
templates: TemplatesConfig;
|
||||
|
||||
@Nested
|
||||
eventBus: EventBusConfig;
|
||||
|
||||
@Nested
|
||||
nodes: NodesConfig;
|
||||
|
||||
@Nested
|
||||
workflows: WorkflowsConfig;
|
||||
|
||||
@Nested
|
||||
sentry: SentryConfig;
|
||||
|
||||
/** Path n8n is deployed to */
|
||||
@Env('N8N_PATH')
|
||||
path: string = '/';
|
||||
|
||||
/** Host name n8n can be reached */
|
||||
@Env('N8N_HOST')
|
||||
host: string = 'localhost';
|
||||
|
||||
/** HTTP port n8n can be reached */
|
||||
@Env('N8N_PORT')
|
||||
port: number = 5678;
|
||||
|
||||
/** IP address n8n should listen on */
|
||||
@Env('N8N_LISTEN_ADDRESS')
|
||||
listen_address: string = '::';
|
||||
|
||||
/** HTTP Protocol via which n8n can be reached */
|
||||
@Env('N8N_PROTOCOL', protocolSchema)
|
||||
protocol: Protocol = 'http';
|
||||
|
||||
@Nested
|
||||
endpoints: EndpointsConfig;
|
||||
|
||||
@Nested
|
||||
cache: CacheConfig;
|
||||
|
||||
@Nested
|
||||
queue: ScalingModeConfig;
|
||||
|
||||
@Nested
|
||||
logging: LoggingConfig;
|
||||
|
||||
@Nested
|
||||
taskRunners: TaskRunnersConfig;
|
||||
|
||||
@Nested
|
||||
multiMainSetup: MultiMainSetupConfig;
|
||||
|
||||
@Nested
|
||||
generic: GenericConfig;
|
||||
|
||||
@Nested
|
||||
license: LicenseConfig;
|
||||
|
||||
@Nested
|
||||
security: SecurityConfig;
|
||||
|
||||
@Nested
|
||||
executions: ExecutionsConfig;
|
||||
|
||||
@Nested
|
||||
diagnostics: DiagnosticsConfig;
|
||||
|
||||
@Nested
|
||||
aiAssistant: AiAssistantConfig;
|
||||
|
||||
@Nested
|
||||
tags: TagsConfig;
|
||||
|
||||
@Nested
|
||||
partialExecutions: PartialExecutionsConfig;
|
||||
|
||||
@Nested
|
||||
workflowHistory: WorkflowHistoryConfig;
|
||||
|
||||
@Nested
|
||||
deployment: DeploymentConfig;
|
||||
|
||||
@Nested
|
||||
mfa: MfaConfig;
|
||||
|
||||
@Nested
|
||||
hiringBanner: HiringBannerConfig;
|
||||
|
||||
@Nested
|
||||
personalization: PersonalizationConfig;
|
||||
|
||||
@Nested
|
||||
sso: SsoConfig;
|
||||
|
||||
/** Default locale for the UI. */
|
||||
@Env('N8N_DEFAULT_LOCALE')
|
||||
defaultLocale: string = 'en';
|
||||
|
||||
/** Whether to hide the page that shows active workflows and executions count. */
|
||||
@Env('N8N_HIDE_USAGE_PAGE')
|
||||
hideUsagePage: boolean = false;
|
||||
|
||||
/** Number of reverse proxies n8n is running behind. */
|
||||
@Env('N8N_PROXY_HOPS')
|
||||
proxy_hops: number = 0;
|
||||
|
||||
/** SSL key for HTTPS protocol. */
|
||||
@Env('N8N_SSL_KEY')
|
||||
ssl_key: string = '';
|
||||
|
||||
/** SSL cert for HTTPS protocol. */
|
||||
@Env('N8N_SSL_CERT')
|
||||
ssl_cert: string = '';
|
||||
|
||||
/** Public URL where the editor is accessible. Also used for emails sent from n8n. */
|
||||
@Env('N8N_EDITOR_BASE_URL')
|
||||
editorBaseUrl: string = '';
|
||||
|
||||
/** URLs to external frontend hooks files, separated by semicolons. */
|
||||
@Env('EXTERNAL_FRONTEND_HOOKS_URLS')
|
||||
externalFrontendHooksUrls: string = '';
|
||||
|
||||
@Nested
|
||||
redis: RedisConfig;
|
||||
|
||||
@Nested
|
||||
ai: AiConfig;
|
||||
}
|
||||
486
n8n-n8n-1.109.2/packages/@n8n/config/test/config.test 2.ts
Executable file
486
n8n-n8n-1.109.2/packages/@n8n/config/test/config.test 2.ts
Executable file
@@ -0,0 +1,486 @@
|
||||
import { Container } from '@n8n/di';
|
||||
import fs from 'fs';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
|
||||
import type { UserManagementConfig } from '../src/configs/user-management.config';
|
||||
import { GlobalConfig } from '../src/index';
|
||||
|
||||
jest.mock('fs');
|
||||
const mockFs = mock<typeof fs>();
|
||||
fs.readFileSync = mockFs.readFileSync;
|
||||
|
||||
const consoleWarnMock = jest.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
|
||||
describe('GlobalConfig', () => {
|
||||
beforeEach(() => {
|
||||
Container.reset();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
const originalEnv = process.env;
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
const defaultConfig: GlobalConfig = {
|
||||
path: '/',
|
||||
host: 'localhost',
|
||||
port: 5678,
|
||||
listen_address: '::',
|
||||
protocol: 'http',
|
||||
auth: {
|
||||
cookie: {
|
||||
samesite: 'lax',
|
||||
secure: true,
|
||||
},
|
||||
},
|
||||
defaultLocale: 'en',
|
||||
hideUsagePage: false,
|
||||
deployment: {
|
||||
type: 'default',
|
||||
},
|
||||
mfa: {
|
||||
enabled: true,
|
||||
},
|
||||
hiringBanner: {
|
||||
enabled: true,
|
||||
},
|
||||
personalization: {
|
||||
enabled: true,
|
||||
},
|
||||
proxy_hops: 0,
|
||||
ssl_key: '',
|
||||
ssl_cert: '',
|
||||
editorBaseUrl: '',
|
||||
database: {
|
||||
logging: {
|
||||
enabled: false,
|
||||
maxQueryExecutionTime: 0,
|
||||
options: 'error',
|
||||
},
|
||||
mysqldb: {
|
||||
database: 'n8n',
|
||||
host: 'localhost',
|
||||
password: '',
|
||||
port: 3306,
|
||||
user: 'root',
|
||||
},
|
||||
postgresdb: {
|
||||
database: 'n8n',
|
||||
host: 'localhost',
|
||||
password: '',
|
||||
poolSize: 2,
|
||||
port: 5432,
|
||||
schema: 'public',
|
||||
connectionTimeoutMs: 20_000,
|
||||
ssl: {
|
||||
ca: '',
|
||||
cert: '',
|
||||
enabled: false,
|
||||
key: '',
|
||||
rejectUnauthorized: true,
|
||||
},
|
||||
user: 'postgres',
|
||||
idleTimeoutMs: 30_000,
|
||||
},
|
||||
sqlite: {
|
||||
database: 'database.sqlite',
|
||||
enableWAL: false,
|
||||
executeVacuumOnStartup: false,
|
||||
poolSize: 0,
|
||||
},
|
||||
tablePrefix: '',
|
||||
type: 'sqlite',
|
||||
isLegacySqlite: true,
|
||||
pingIntervalSeconds: 2,
|
||||
},
|
||||
credentials: {
|
||||
defaultName: 'My credentials',
|
||||
overwrite: {
|
||||
data: '{}',
|
||||
endpoint: '',
|
||||
},
|
||||
},
|
||||
userManagement: {
|
||||
jwtSecret: '',
|
||||
jwtSessionDurationHours: 168,
|
||||
jwtRefreshTimeoutHours: 0,
|
||||
emails: {
|
||||
mode: 'smtp',
|
||||
smtp: {
|
||||
host: '',
|
||||
port: 465,
|
||||
secure: true,
|
||||
sender: '',
|
||||
startTLS: true,
|
||||
auth: {
|
||||
pass: '',
|
||||
user: '',
|
||||
privateKey: '',
|
||||
serviceClient: '',
|
||||
},
|
||||
},
|
||||
template: {
|
||||
'credentials-shared': '',
|
||||
'user-invited': '',
|
||||
'password-reset-requested': '',
|
||||
'workflow-shared': '',
|
||||
'project-shared': '',
|
||||
},
|
||||
},
|
||||
} as UserManagementConfig,
|
||||
eventBus: {
|
||||
checkUnsentInterval: 0,
|
||||
crashRecoveryMode: 'extensive',
|
||||
logWriter: {
|
||||
keepLogCount: 3,
|
||||
logBaseName: 'n8nEventLog',
|
||||
maxFileSizeInKB: 10240,
|
||||
},
|
||||
},
|
||||
externalHooks: {
|
||||
files: [],
|
||||
},
|
||||
nodes: {
|
||||
errorTriggerType: 'n8n-nodes-base.errorTrigger',
|
||||
include: [],
|
||||
exclude: [],
|
||||
pythonEnabled: true,
|
||||
},
|
||||
publicApi: {
|
||||
disabled: false,
|
||||
path: 'api',
|
||||
swaggerUiDisabled: false,
|
||||
},
|
||||
templates: {
|
||||
enabled: true,
|
||||
host: 'https://api.n8n.io/api/',
|
||||
},
|
||||
versionNotifications: {
|
||||
enabled: true,
|
||||
endpoint: 'https://api.n8n.io/api/versions/',
|
||||
whatsNewEnabled: true,
|
||||
whatsNewEndpoint: 'https://api.n8n.io/api/whats-new',
|
||||
infoUrl: 'https://docs.n8n.io/hosting/installation/updating/',
|
||||
},
|
||||
workflows: {
|
||||
defaultName: 'My workflow',
|
||||
callerPolicyDefaultOption: 'workflowsFromSameOwner',
|
||||
activationBatchSize: 1,
|
||||
},
|
||||
endpoints: {
|
||||
metrics: {
|
||||
enable: false,
|
||||
prefix: 'n8n_',
|
||||
includeWorkflowIdLabel: false,
|
||||
includeWorkflowNameLabel: false,
|
||||
includeDefaultMetrics: true,
|
||||
includeMessageEventBusMetrics: false,
|
||||
includeNodeTypeLabel: false,
|
||||
includeCacheMetrics: false,
|
||||
includeApiEndpoints: false,
|
||||
includeApiPathLabel: false,
|
||||
includeApiMethodLabel: false,
|
||||
includeCredentialTypeLabel: false,
|
||||
includeApiStatusCodeLabel: false,
|
||||
includeQueueMetrics: false,
|
||||
queueMetricsInterval: 20,
|
||||
activeWorkflowCountInterval: 60,
|
||||
},
|
||||
additionalNonUIRoutes: '',
|
||||
disableProductionWebhooksOnMainProcess: false,
|
||||
disableUi: false,
|
||||
form: 'form',
|
||||
formTest: 'form-test',
|
||||
formWaiting: 'form-waiting',
|
||||
mcp: 'mcp',
|
||||
mcpTest: 'mcp-test',
|
||||
payloadSizeMax: 16,
|
||||
formDataFileSizeMax: 200,
|
||||
rest: 'rest',
|
||||
webhook: 'webhook',
|
||||
webhookTest: 'webhook-test',
|
||||
webhookWaiting: 'webhook-waiting',
|
||||
},
|
||||
cache: {
|
||||
backend: 'auto',
|
||||
memory: {
|
||||
maxSize: 3145728,
|
||||
ttl: 3600000,
|
||||
},
|
||||
redis: {
|
||||
prefix: 'cache',
|
||||
ttl: 3600000,
|
||||
},
|
||||
},
|
||||
queue: {
|
||||
health: {
|
||||
active: false,
|
||||
port: 5678,
|
||||
address: '::',
|
||||
},
|
||||
bull: {
|
||||
redis: {
|
||||
db: 0,
|
||||
host: 'localhost',
|
||||
password: '',
|
||||
port: 6379,
|
||||
timeoutThreshold: 10_000,
|
||||
username: '',
|
||||
clusterNodes: '',
|
||||
tls: false,
|
||||
dualStack: false,
|
||||
},
|
||||
gracefulShutdownTimeout: 30,
|
||||
prefix: 'bull',
|
||||
settings: {
|
||||
lockDuration: 30_000,
|
||||
lockRenewTime: 15_000,
|
||||
stalledInterval: 30_000,
|
||||
maxStalledCount: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
taskRunners: {
|
||||
enabled: false,
|
||||
mode: 'internal',
|
||||
path: '/runners',
|
||||
authToken: '',
|
||||
listenAddress: '127.0.0.1',
|
||||
maxPayload: 1024 * 1024 * 1024,
|
||||
port: 5679,
|
||||
maxOldSpaceSize: '',
|
||||
maxConcurrency: 10,
|
||||
taskTimeout: 300,
|
||||
heartbeatInterval: 30,
|
||||
insecureMode: false,
|
||||
},
|
||||
sentry: {
|
||||
backendDsn: '',
|
||||
frontendDsn: '',
|
||||
environment: '',
|
||||
deploymentName: '',
|
||||
},
|
||||
logging: {
|
||||
level: 'info',
|
||||
format: 'text',
|
||||
outputs: ['console'],
|
||||
file: {
|
||||
fileCountMax: 100,
|
||||
fileSizeMax: 16,
|
||||
location: 'logs/n8n.log',
|
||||
},
|
||||
scopes: [],
|
||||
cron: {
|
||||
activeInterval: 0,
|
||||
},
|
||||
},
|
||||
multiMainSetup: {
|
||||
enabled: false,
|
||||
ttl: 10,
|
||||
interval: 3,
|
||||
},
|
||||
generic: {
|
||||
timezone: 'America/New_York',
|
||||
releaseChannel: 'dev',
|
||||
gracefulShutdownTimeout: 30,
|
||||
},
|
||||
license: {
|
||||
serverUrl: 'https://license.n8n.io/v1',
|
||||
autoRenewalEnabled: true,
|
||||
detachFloatingOnShutdown: true,
|
||||
activationKey: '',
|
||||
tenantId: 1,
|
||||
cert: '',
|
||||
},
|
||||
security: {
|
||||
restrictFileAccessTo: '',
|
||||
blockFileAccessToN8nFiles: true,
|
||||
daysAbandonedWorkflow: 90,
|
||||
contentSecurityPolicy: '{}',
|
||||
contentSecurityPolicyReportOnly: false,
|
||||
disableWebhookHtmlSandboxing: false,
|
||||
},
|
||||
executions: {
|
||||
pruneData: true,
|
||||
pruneDataMaxAge: 336,
|
||||
pruneDataMaxCount: 10_000,
|
||||
pruneDataHardDeleteBuffer: 1,
|
||||
pruneDataIntervals: {
|
||||
hardDelete: 15,
|
||||
softDelete: 60,
|
||||
},
|
||||
concurrency: {
|
||||
productionLimit: -1,
|
||||
evaluationLimit: -1,
|
||||
},
|
||||
queueRecovery: {
|
||||
interval: 180,
|
||||
batchSize: 100,
|
||||
},
|
||||
saveDataOnError: 'all',
|
||||
saveDataOnSuccess: 'all',
|
||||
saveExecutionProgress: false,
|
||||
saveDataManualExecutions: true,
|
||||
},
|
||||
diagnostics: {
|
||||
enabled: true,
|
||||
frontendConfig: '1zPn9bgWPzlQc0p8Gj1uiK6DOTn;https://telemetry.n8n.io',
|
||||
backendConfig: '1zPn7YoGC3ZXE9zLeTKLuQCB4F6;https://telemetry.n8n.io',
|
||||
posthogConfig: {
|
||||
apiKey: 'phc_4URIAm1uYfJO7j8kWSe0J8lc8IqnstRLS7Jx8NcakHo',
|
||||
apiHost: 'https://ph.n8n.io',
|
||||
},
|
||||
},
|
||||
aiAssistant: {
|
||||
baseUrl: '',
|
||||
},
|
||||
tags: {
|
||||
disabled: false,
|
||||
},
|
||||
partialExecutions: {
|
||||
version: 2,
|
||||
},
|
||||
workflowHistory: {
|
||||
enabled: true,
|
||||
pruneTime: -1,
|
||||
},
|
||||
sso: {
|
||||
justInTimeProvisioning: true,
|
||||
redirectLoginToSso: true,
|
||||
saml: {
|
||||
loginEnabled: false,
|
||||
loginLabel: '',
|
||||
},
|
||||
oidc: {
|
||||
loginEnabled: false,
|
||||
},
|
||||
ldap: {
|
||||
loginEnabled: false,
|
||||
loginLabel: '',
|
||||
},
|
||||
},
|
||||
redis: {
|
||||
prefix: 'n8n',
|
||||
},
|
||||
externalFrontendHooksUrls: '',
|
||||
ai: {
|
||||
enabled: false,
|
||||
},
|
||||
};
|
||||
|
||||
it('should use all default values when no env variables are defined', () => {
|
||||
process.env = {};
|
||||
const config = Container.get(GlobalConfig);
|
||||
// Makes sure the objects are structurally equal while respecting getters,
|
||||
// which `toEqual` and `toBe` does not do.
|
||||
expect(defaultConfig).toMatchObject(config);
|
||||
expect(config).toMatchObject(defaultConfig);
|
||||
expect(mockFs.readFileSync).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should use values from env variables when defined', () => {
|
||||
process.env = {
|
||||
DB_POSTGRESDB_HOST: 'some-host',
|
||||
DB_POSTGRESDB_USER: 'n8n',
|
||||
DB_POSTGRESDB_IDLE_CONNECTION_TIMEOUT: '10000',
|
||||
DB_TABLE_PREFIX: 'test_',
|
||||
DB_PING_INTERVAL_SECONDS: '2',
|
||||
NODES_INCLUDE: '["n8n-nodes-base.hackerNews"]',
|
||||
DB_LOGGING_MAX_EXECUTION_TIME: '0',
|
||||
N8N_METRICS: 'TRUE',
|
||||
N8N_TEMPLATES_ENABLED: '0',
|
||||
};
|
||||
const config = Container.get(GlobalConfig);
|
||||
expect(structuredClone(config)).toEqual({
|
||||
...defaultConfig,
|
||||
database: {
|
||||
logging: defaultConfig.database.logging,
|
||||
mysqldb: defaultConfig.database.mysqldb,
|
||||
postgresdb: {
|
||||
...defaultConfig.database.postgresdb,
|
||||
host: 'some-host',
|
||||
user: 'n8n',
|
||||
idleTimeoutMs: 10_000,
|
||||
},
|
||||
sqlite: defaultConfig.database.sqlite,
|
||||
tablePrefix: 'test_',
|
||||
type: 'sqlite',
|
||||
pingIntervalSeconds: 2,
|
||||
},
|
||||
endpoints: {
|
||||
...defaultConfig.endpoints,
|
||||
metrics: {
|
||||
...defaultConfig.endpoints.metrics,
|
||||
enable: true,
|
||||
},
|
||||
},
|
||||
nodes: {
|
||||
...defaultConfig.nodes,
|
||||
include: ['n8n-nodes-base.hackerNews'],
|
||||
},
|
||||
templates: {
|
||||
...defaultConfig.templates,
|
||||
enabled: false,
|
||||
},
|
||||
});
|
||||
expect(mockFs.readFileSync).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should read values from files using _FILE env variables', () => {
|
||||
const passwordFile = '/path/to/postgres/password';
|
||||
process.env = {
|
||||
DB_POSTGRESDB_PASSWORD_FILE: passwordFile,
|
||||
};
|
||||
mockFs.readFileSync.calledWith(passwordFile, 'utf8').mockReturnValueOnce('password-from-file');
|
||||
|
||||
const config = Container.get(GlobalConfig);
|
||||
const expected = {
|
||||
...defaultConfig,
|
||||
database: {
|
||||
...defaultConfig.database,
|
||||
postgresdb: {
|
||||
...defaultConfig.database.postgresdb,
|
||||
password: 'password-from-file',
|
||||
},
|
||||
},
|
||||
};
|
||||
// Makes sure the objects are structurally equal while respecting getters,
|
||||
// which `toEqual` and `toBe` does not do.
|
||||
expect(config).toMatchObject(expected);
|
||||
expect(expected).toMatchObject(config);
|
||||
expect(mockFs.readFileSync).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle invalid numbers', () => {
|
||||
process.env = {
|
||||
DB_LOGGING_MAX_EXECUTION_TIME: 'abcd',
|
||||
};
|
||||
const config = Container.get(GlobalConfig);
|
||||
expect(config.database.logging.maxQueryExecutionTime).toEqual(0);
|
||||
expect(consoleWarnMock).toHaveBeenCalledWith(
|
||||
'Invalid number value for DB_LOGGING_MAX_EXECUTION_TIME: abcd',
|
||||
);
|
||||
});
|
||||
|
||||
describe('string unions', () => {
|
||||
it('on invalid value, should warn and fall back to default value', () => {
|
||||
process.env = {
|
||||
N8N_RUNNERS_MODE: 'non-existing-mode',
|
||||
N8N_RUNNERS_ENABLED: 'true',
|
||||
DB_TYPE: 'postgresdb',
|
||||
};
|
||||
|
||||
const globalConfig = Container.get(GlobalConfig);
|
||||
expect(globalConfig.taskRunners.mode).toEqual('internal');
|
||||
expect(consoleWarnMock).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
"Invalid value for N8N_RUNNERS_MODE - Invalid enum value. Expected 'internal' | 'external', received 'non-existing-mode'. Falling back to default value.",
|
||||
),
|
||||
);
|
||||
|
||||
expect(globalConfig.taskRunners.enabled).toEqual(true);
|
||||
expect(globalConfig.database.type).toEqual('postgresdb');
|
||||
});
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user