chore: 清理macOS同步产生的重复文件

详细说明:
- 删除了352个带数字后缀的重复文件
- 更新.gitignore防止未来产生此类文件
- 这些文件是由iCloud或其他同步服务冲突产生的
- 不影响项目功能,仅清理冗余文件
This commit is contained in:
Yep_Q
2025-09-08 12:06:01 +08:00
parent 1564396449
commit d6f48d6d14
365 changed files with 2039 additions and 68301 deletions

View File

@@ -1,239 +0,0 @@
import { CSVLoader } from '@langchain/community/document_loaders/fs/csv';
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx';
import { EPubLoader } from '@langchain/community/document_loaders/fs/epub';
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf';
import type { Document } from '@langchain/core/documents';
import type { TextSplitter } from '@langchain/textsplitters';
import { createWriteStream } from 'fs';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import type {
IBinaryData,
IExecuteFunctions,
INodeExecutionData,
ISupplyDataFunctions,
} from 'n8n-workflow';
import { NodeOperationError, BINARY_ENCODING } from 'n8n-workflow';
import { pipeline } from 'stream/promises';
import { file as tmpFile, type DirectoryResult } from 'tmp-promise';
import { getMetadataFiltersValues } from './helpers';
const SUPPORTED_MIME_TYPES = {
auto: ['*/*'],
pdfLoader: ['application/pdf'],
csvLoader: ['text/csv'],
epubLoader: ['application/epub+zip'],
docxLoader: ['application/vnd.openxmlformats-officedocument.wordprocessingml.document'],
textLoader: ['text/plain', 'text/mdx', 'text/md'],
jsonLoader: ['application/json'],
};
export class N8nBinaryLoader {
constructor(
private context: IExecuteFunctions | ISupplyDataFunctions,
private optionsPrefix = '',
private binaryDataKey = '',
private textSplitter?: TextSplitter,
) {}
async processAll(items?: INodeExecutionData[]): Promise<Document[]> {
const docs: Document[] = [];
if (!items) return [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
const processedDocuments = await this.processItem(items[itemIndex], itemIndex);
docs.push(...processedDocuments);
}
return docs;
}
private async validateMimeType(
mimeType: string,
selectedLoader: keyof typeof SUPPORTED_MIME_TYPES,
): Promise<void> {
// Check if loader matches the mime-type of the data
if (selectedLoader !== 'auto' && !SUPPORTED_MIME_TYPES[selectedLoader].includes(mimeType)) {
const neededLoader = Object.keys(SUPPORTED_MIME_TYPES).find((loader) =>
SUPPORTED_MIME_TYPES[loader as keyof typeof SUPPORTED_MIME_TYPES].includes(mimeType),
);
throw new NodeOperationError(
this.context.getNode(),
`Mime type doesn't match selected loader. Please select under "Loader Type": ${neededLoader}`,
);
}
if (!Object.values(SUPPORTED_MIME_TYPES).flat().includes(mimeType)) {
throw new NodeOperationError(this.context.getNode(), `Unsupported mime type: ${mimeType}`);
}
if (
!SUPPORTED_MIME_TYPES[selectedLoader].includes(mimeType) &&
selectedLoader !== 'textLoader' &&
selectedLoader !== 'auto'
) {
throw new NodeOperationError(
this.context.getNode(),
`Unsupported mime type: ${mimeType} for selected loader: ${selectedLoader}`,
);
}
}
private async getFilePathOrBlob(
binaryData: IBinaryData,
mimeType: string,
): Promise<string | Blob> {
if (binaryData.id) {
const binaryBuffer = await this.context.helpers.binaryToBuffer(
await this.context.helpers.getBinaryStream(binaryData.id),
);
return new Blob([binaryBuffer as BlobPart], {
type: mimeType,
});
} else {
return new Blob([Buffer.from(binaryData.data, BINARY_ENCODING)], {
type: mimeType,
});
}
}
private async getLoader(
mimeType: string,
filePathOrBlob: string | Blob,
itemIndex: number,
): Promise<PDFLoader | CSVLoader | EPubLoader | DocxLoader | TextLoader | JSONLoader> {
switch (mimeType) {
case 'application/pdf':
const splitPages = this.context.getNodeParameter(
`${this.optionsPrefix}splitPages`,
itemIndex,
false,
) as boolean;
return new PDFLoader(filePathOrBlob, { splitPages });
case 'text/csv':
const column = this.context.getNodeParameter(
`${this.optionsPrefix}column`,
itemIndex,
null,
) as string;
const separator = this.context.getNodeParameter(
`${this.optionsPrefix}separator`,
itemIndex,
',',
) as string;
return new CSVLoader(filePathOrBlob, { column: column ?? undefined, separator });
case 'application/epub+zip':
// EPubLoader currently does not accept Blobs https://github.com/langchain-ai/langchainjs/issues/1623
let filePath: string;
if (filePathOrBlob instanceof Blob) {
const tmpFileData = await tmpFile({ prefix: 'epub-loader-' });
const bufferData = await filePathOrBlob.arrayBuffer();
await pipeline([new Uint8Array(bufferData)], createWriteStream(tmpFileData.path));
return new EPubLoader(tmpFileData.path);
} else {
filePath = filePathOrBlob;
}
return new EPubLoader(filePath);
case 'application/vnd.openxmlformats-officedocument.wordprocessingml.document':
return new DocxLoader(filePathOrBlob);
case 'text/plain':
return new TextLoader(filePathOrBlob);
case 'application/json':
const pointers = this.context.getNodeParameter(
`${this.optionsPrefix}pointers`,
itemIndex,
'',
) as string;
const pointersArray = pointers.split(',').map((pointer) => pointer.trim());
return new JSONLoader(filePathOrBlob, pointersArray);
default:
return new TextLoader(filePathOrBlob);
}
}
private async loadDocuments(
loader: PDFLoader | CSVLoader | EPubLoader | DocxLoader | TextLoader | JSONLoader,
): Promise<Document[]> {
return this.textSplitter
? await this.textSplitter.splitDocuments(await loader.load())
: await loader.load();
}
private async cleanupTmpFileIfNeeded(
cleanupTmpFile: DirectoryResult['cleanup'] | undefined,
): Promise<void> {
if (cleanupTmpFile) {
await cleanupTmpFile();
}
}
async processItem(item: INodeExecutionData, itemIndex: number): Promise<Document[]> {
const docs: Document[] = [];
const binaryMode = this.context.getNodeParameter('binaryMode', itemIndex, 'allInputData');
if (binaryMode === 'allInputData') {
const binaryData = this.context.getInputData();
for (const data of binaryData) {
if (data.binary) {
const binaryDataKeys = Object.keys(data.binary);
for (const fileKey of binaryDataKeys) {
const processedDocuments = await this.processItemByKey(item, itemIndex, fileKey);
docs.push(...processedDocuments);
}
}
}
} else {
const processedDocuments = await this.processItemByKey(item, itemIndex, this.binaryDataKey);
docs.push(...processedDocuments);
}
return docs;
}
async processItemByKey(
item: INodeExecutionData,
itemIndex: number,
binaryKey: string,
): Promise<Document[]> {
const selectedLoader: keyof typeof SUPPORTED_MIME_TYPES = this.context.getNodeParameter(
'loader',
itemIndex,
'auto',
) as keyof typeof SUPPORTED_MIME_TYPES;
const docs: Document[] = [];
const metadata = getMetadataFiltersValues(this.context, itemIndex);
if (!item) return [];
const binaryData = this.context.helpers.assertBinaryData(itemIndex, binaryKey);
const { mimeType } = binaryData;
await this.validateMimeType(mimeType, selectedLoader);
const filePathOrBlob = await this.getFilePathOrBlob(binaryData, mimeType);
const cleanupTmpFile: DirectoryResult['cleanup'] | undefined = undefined;
const loader = await this.getLoader(mimeType, filePathOrBlob, itemIndex);
const loadedDoc = await this.loadDocuments(loader);
docs.push(...loadedDoc);
if (metadata) {
docs.forEach((document) => {
document.metadata = {
...document.metadata,
...metadata,
};
});
}
await this.cleanupTmpFileIfNeeded(cleanupTmpFile);
return docs;
}
}

View File

@@ -1,90 +0,0 @@
import type { Document } from '@langchain/core/documents';
import type { TextSplitter } from '@langchain/textsplitters';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import {
type IExecuteFunctions,
type INodeExecutionData,
type ISupplyDataFunctions,
NodeOperationError,
} from 'n8n-workflow';
import { getMetadataFiltersValues } from './helpers';
export class N8nJsonLoader {
constructor(
private context: IExecuteFunctions | ISupplyDataFunctions,
private optionsPrefix = '',
private textSplitter?: TextSplitter,
) {}
async processAll(items?: INodeExecutionData[]): Promise<Document[]> {
const docs: Document[] = [];
if (!items) return [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
const processedDocuments = await this.processItem(items[itemIndex], itemIndex);
docs.push(...processedDocuments);
}
return docs;
}
async processItem(item: INodeExecutionData, itemIndex: number): Promise<Document[]> {
const mode = this.context.getNodeParameter('jsonMode', itemIndex, 'allInputData') as
| 'allInputData'
| 'expressionData';
const pointers = this.context.getNodeParameter(
`${this.optionsPrefix}pointers`,
itemIndex,
'',
) as string;
const pointersArray = pointers.split(',').map((pointer) => pointer.trim());
const metadata = getMetadataFiltersValues(this.context, itemIndex) ?? [];
if (!item) return [];
let documentLoader: JSONLoader | TextLoader | null = null;
if (mode === 'allInputData') {
const itemString = JSON.stringify(item.json);
const itemBlob = new Blob([itemString], { type: 'application/json' });
documentLoader = new JSONLoader(itemBlob, pointersArray);
}
if (mode === 'expressionData') {
const dataString = this.context.getNodeParameter('jsonData', itemIndex) as string | object;
if (typeof dataString === 'object') {
const itemBlob = new Blob([JSON.stringify(dataString)], { type: 'application/json' });
documentLoader = new JSONLoader(itemBlob, pointersArray);
}
if (typeof dataString === 'string') {
const itemBlob = new Blob([dataString], { type: 'text/plain' });
documentLoader = new TextLoader(itemBlob);
}
}
if (documentLoader === null) {
// This should never happen
throw new NodeOperationError(this.context.getNode(), 'Document loader is not initialized');
}
const docs = this.textSplitter
? await this.textSplitter.splitDocuments(await documentLoader.load())
: await documentLoader.load();
if (metadata) {
docs.forEach((doc) => {
doc.metadata = {
...doc.metadata,
...metadata,
};
});
}
return docs;
}
}

View File

@@ -1,114 +0,0 @@
import type { DynamicStructuredToolInput } from '@langchain/core/tools';
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
import { StructuredOutputParser } from 'langchain/output_parsers';
import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow';
import { NodeConnectionTypes, jsonParse, NodeOperationError } from 'n8n-workflow';
import type { ZodTypeAny } from 'zod';
import { ZodBoolean, ZodNullable, ZodNumber, ZodObject, ZodOptional } from 'zod';
import type { ZodObjectAny } from '../types/types';
const getSimplifiedType = (schema: ZodTypeAny) => {
if (schema instanceof ZodObject) {
return 'object';
} else if (schema instanceof ZodNumber) {
return 'number';
} else if (schema instanceof ZodBoolean) {
return 'boolean';
} else if (schema instanceof ZodNullable || schema instanceof ZodOptional) {
return getSimplifiedType(schema.unwrap());
}
return 'string';
};
const getParametersDescription = (parameters: Array<[string, ZodTypeAny]>) =>
parameters
.map(
([name, schema]) =>
`${name}: (description: ${schema.description ?? ''}, type: ${getSimplifiedType(schema)}, required: ${!schema.isOptional()})`,
)
.join(',\n ');
export const prepareFallbackToolDescription = (toolDescription: string, schema: ZodObject<any>) => {
let description = `${toolDescription}`;
const toolParameters = Object.entries<ZodTypeAny>(schema.shape);
if (toolParameters.length) {
description += `
Tool expects valid stringified JSON object with ${toolParameters.length} properties.
Property names with description, type and required status:
${getParametersDescription(toolParameters)}
ALL parameters marked as required must be provided`;
}
return description;
};
export class N8nTool extends DynamicStructuredTool<ZodObjectAny> {
constructor(
private context: ISupplyDataFunctions,
fields: DynamicStructuredToolInput<ZodObjectAny>,
) {
super(fields);
}
asDynamicTool(): DynamicTool {
const { name, func, schema, context, description } = this;
const parser = new StructuredOutputParser(schema);
const wrappedFunc = async function (query: string) {
let parsedQuery: object;
// First we try to parse the query using the structured parser (Zod schema)
try {
parsedQuery = await parser.parse(query);
} catch (e) {
// If we were unable to parse the query using the schema, we try to gracefully handle it
let dataFromModel;
try {
// First we try to parse a JSON with more relaxed rules
dataFromModel = jsonParse<IDataObject>(query, { acceptJSObject: true });
} catch (error) {
// In case of error,
// If model supplied a simple string instead of an object AND only one parameter expected, we try to recover the object structure
if (Object.keys(schema.shape).length === 1) {
const parameterName = Object.keys(schema.shape)[0];
dataFromModel = { [parameterName]: query };
} else {
// Finally throw an error if we were unable to parse the query
throw new NodeOperationError(
context.getNode(),
`Input is not a valid JSON: ${error.message}`,
);
}
}
// If we were able to parse the query with a fallback, we try to validate it using the schema
// Here we will throw an error if the data still does not match the schema
parsedQuery = schema.parse(dataFromModel);
}
try {
// Call tool function with parsed query
const result = await func(parsedQuery);
return result;
} catch (e) {
const { index } = context.addInputData(NodeConnectionTypes.AiTool, [[{ json: { query } }]]);
void context.addOutputData(NodeConnectionTypes.AiTool, index, e);
return e.toString();
}
};
return new DynamicTool({
name,
description: prepareFallbackToolDescription(description, schema),
func: wrappedFunc,
});
}
}

View File

@@ -1,170 +0,0 @@
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
import { createMockExecuteFunction } from 'n8n-nodes-base/test/nodes/Helpers';
import type { INode, ISupplyDataFunctions } from 'n8n-workflow';
import { z } from 'zod';
import { N8nTool } from './N8nTool';
const mockNode: INode = {
id: '1',
name: 'Mock node',
typeVersion: 2,
type: 'n8n-nodes-base.mock',
position: [60, 760],
parameters: {
operation: 'test',
},
};
describe('Test N8nTool wrapper as DynamicStructuredTool', () => {
it('should wrap a tool', () => {
const func = jest.fn();
const ctx = createMockExecuteFunction<ISupplyDataFunctions>({}, mockNode);
const tool = new N8nTool(ctx, {
name: 'Dummy Tool',
description: 'A dummy tool for testing',
func,
schema: z.object({
foo: z.string(),
}),
});
expect(tool).toBeInstanceOf(DynamicStructuredTool);
});
});
describe('Test N8nTool wrapper - DynamicTool fallback', () => {
it('should convert the tool to a dynamic tool', () => {
const func = jest.fn();
const ctx = createMockExecuteFunction<ISupplyDataFunctions>({}, mockNode);
const tool = new N8nTool(ctx, {
name: 'Dummy Tool',
description: 'A dummy tool for testing',
func,
schema: z.object({
foo: z.string(),
}),
});
const dynamicTool = tool.asDynamicTool();
expect(dynamicTool).toBeInstanceOf(DynamicTool);
});
it('should format fallback description correctly', () => {
const func = jest.fn();
const ctx = createMockExecuteFunction<ISupplyDataFunctions>({}, mockNode);
const tool = new N8nTool(ctx, {
name: 'Dummy Tool',
description: 'A dummy tool for testing',
func,
schema: z.object({
foo: z.string(),
bar: z.number().optional(),
qwe: z.boolean().describe('Boolean description'),
}),
});
const dynamicTool = tool.asDynamicTool();
expect(dynamicTool.description).toContain('foo: (description: , type: string, required: true)');
expect(dynamicTool.description).toContain(
'bar: (description: , type: number, required: false)',
);
expect(dynamicTool.description).toContain(
'qwe: (description: Boolean description, type: boolean, required: true)',
);
});
it('should handle empty parameter list correctly', () => {
const func = jest.fn();
const ctx = createMockExecuteFunction<ISupplyDataFunctions>({}, mockNode);
const tool = new N8nTool(ctx, {
name: 'Dummy Tool',
description: 'A dummy tool for testing',
func,
schema: z.object({}),
});
const dynamicTool = tool.asDynamicTool();
expect(dynamicTool.description).toEqual('A dummy tool for testing');
});
it('should parse correct parameters', async () => {
const func = jest.fn();
const ctx = createMockExecuteFunction<ISupplyDataFunctions>({}, mockNode);
const tool = new N8nTool(ctx, {
name: 'Dummy Tool',
description: 'A dummy tool for testing',
func,
schema: z.object({
foo: z.string().describe('Foo description'),
bar: z.number().optional(),
}),
});
const dynamicTool = tool.asDynamicTool();
const testParameters = { foo: 'some value' };
await dynamicTool.func(JSON.stringify(testParameters));
expect(func).toHaveBeenCalledWith(testParameters);
});
it('should recover when 1 parameter is passed directly', async () => {
const func = jest.fn();
const ctx = createMockExecuteFunction<ISupplyDataFunctions>({}, mockNode);
const tool = new N8nTool(ctx, {
name: 'Dummy Tool',
description: 'A dummy tool for testing',
func,
schema: z.object({
foo: z.string().describe('Foo description'),
}),
});
const dynamicTool = tool.asDynamicTool();
const testParameter = 'some value';
await dynamicTool.func(testParameter);
expect(func).toHaveBeenCalledWith({ foo: testParameter });
});
it('should recover when JS object is passed instead of JSON', async () => {
const func = jest.fn();
const ctx = createMockExecuteFunction<ISupplyDataFunctions>({}, mockNode);
const tool = new N8nTool(ctx, {
name: 'Dummy Tool',
description: 'A dummy tool for testing',
func,
schema: z.object({
foo: z.string().describe('Foo description'),
}),
});
const dynamicTool = tool.asDynamicTool();
await dynamicTool.func('{ foo: "some value" }');
expect(func).toHaveBeenCalledWith({ foo: 'some value' });
});
});

View File

@@ -1,156 +0,0 @@
import type { DisplayCondition, INodeProperties, NodeParameterValue } from 'n8n-workflow';
export const schemaTypeField: INodeProperties = {
displayName: 'Schema Type',
name: 'schemaType',
type: 'options',
noDataExpression: true,
options: [
{
name: 'Generate From JSON Example',
value: 'fromJson',
description: 'Generate a schema from an example JSON object',
},
{
name: 'Define using JSON Schema',
value: 'manual',
description: 'Define the JSON schema manually',
},
],
default: 'fromJson',
description: 'How to specify the schema for the function',
};
/**
* Returns a field for inputting a JSON example that can be used to generate the schema.
* @param props
*/
export const buildJsonSchemaExampleField = (props?: {
showExtraProps?: Record<string, Array<NodeParameterValue | DisplayCondition> | undefined>;
}): INodeProperties => ({
displayName: 'JSON Example',
name: 'jsonSchemaExample',
type: 'json',
default: `{
"some_input": "some_value"
}`,
noDataExpression: true,
typeOptions: {
rows: 10,
},
displayOptions: {
show: {
...props?.showExtraProps,
schemaType: ['fromJson'],
},
},
description: 'Example JSON object to use to generate the schema',
});
/**
* Returns a notice field about the generated schema properties being required by default.
* @param props
*/
export const buildJsonSchemaExampleNotice = (props?: {
showExtraProps?: Record<string, Array<NodeParameterValue | DisplayCondition> | undefined>;
}): INodeProperties => ({
displayName:
"All properties will be required. To make them optional, use the 'JSON Schema' schema type instead",
name: 'notice',
type: 'notice',
default: '',
displayOptions: {
show: {
...props?.showExtraProps,
schemaType: ['fromJson'],
},
},
});
export const jsonSchemaExampleField = buildJsonSchemaExampleField();
export const buildInputSchemaField = (props?: {
showExtraProps?: Record<string, Array<NodeParameterValue | DisplayCondition> | undefined>;
}): INodeProperties => ({
displayName: 'Input Schema',
name: 'inputSchema',
type: 'json',
default: `{
"type": "object",
"properties": {
"some_input": {
"type": "string",
"description": "Some input to the function"
}
}
}`,
noDataExpression: false,
typeOptions: {
rows: 10,
},
displayOptions: {
show: {
...props?.showExtraProps,
schemaType: ['manual'],
},
},
description: 'Schema to use for the function',
hint: 'Use <a target="_blank" href="https://json-schema.org/">JSON Schema</a> format (<a target="_blank" href="https://json-schema.org/learn/miscellaneous-examples.html">examples</a>). $refs syntax is currently not supported.',
});
export const inputSchemaField = buildInputSchemaField();
export const promptTypeOptions: INodeProperties = {
displayName: 'Source for Prompt (User Message)',
name: 'promptType',
type: 'options',
options: [
{
name: 'Connected Chat Trigger Node',
value: 'auto',
description:
"Looks for an input field called 'chatInput' that is coming from a directly connected Chat Trigger",
},
{
name: 'Define below',
value: 'define',
description: 'Use an expression to reference data in previous nodes or enter static text',
},
],
default: 'auto',
};
export const textInput: INodeProperties = {
displayName: 'Prompt (User Message)',
name: 'text',
type: 'string',
required: true,
default: '',
placeholder: 'e.g. Hello, how can you help me?',
typeOptions: {
rows: 2,
},
};
export const textFromPreviousNode: INodeProperties = {
displayName: 'Prompt (User Message)',
name: 'text',
type: 'string',
required: true,
default: '={{ $json.chatInput }}',
typeOptions: {
rows: 2,
},
disabledOptions: { show: { promptType: ['auto'] } },
};
export const toolDescription: INodeProperties = {
displayName: 'Description',
name: 'toolDescription',
type: 'string',
default: 'AI Agent that can call other tools',
required: true,
typeOptions: { rows: 2 },
description:
'Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often',
};

View File

@@ -1,120 +0,0 @@
import { buildInputSchemaField } from './descriptions';
describe('buildInputSchemaField', () => {
it('should create input schema field with noDataExpression set to false', () => {
const result = buildInputSchemaField();
expect(result.noDataExpression).toBe(false);
expect(result.displayName).toBe('Input Schema');
expect(result.name).toBe('inputSchema');
expect(result.type).toBe('json');
});
it('should include typeOptions with rows set to 10', () => {
const result = buildInputSchemaField();
expect(result.typeOptions).toEqual({ rows: 10 });
});
it('should have correct default JSON schema', () => {
const result = buildInputSchemaField();
const expectedDefault = `{
"type": "object",
"properties": {
"some_input": {
"type": "string",
"description": "Some input to the function"
}
}
}`;
expect(result.default).toBe(expectedDefault);
});
it('should include display options with schemaType manual', () => {
const result = buildInputSchemaField();
expect(result.displayOptions).toEqual({
show: {
schemaType: ['manual'],
},
});
});
it('should merge showExtraProps when provided', () => {
const result = buildInputSchemaField({
showExtraProps: {
mode: ['advanced'],
authentication: ['oauth2'],
},
});
expect(result.displayOptions).toEqual({
show: {
mode: ['advanced'],
authentication: ['oauth2'],
schemaType: ['manual'],
},
});
});
it('should include description and hint', () => {
const result = buildInputSchemaField();
expect(result.description).toBe('Schema to use for the function');
expect(result.hint).toContain('JSON Schema');
expect(result.hint).toContain('json-schema.org');
});
it('should allow data expressions in the schema field', () => {
const result = buildInputSchemaField();
// noDataExpression is false, which means expressions are allowed
expect(result.noDataExpression).toBe(false);
// Since noDataExpression is false, this should be valid
expect(typeof result.default).toBe('string');
expect(result.noDataExpression).toBe(false);
});
it('should be a valid INodeProperties object', () => {
const result = buildInputSchemaField();
// Check all required fields for INodeProperties
expect(result).toHaveProperty('displayName');
expect(result).toHaveProperty('name');
expect(result).toHaveProperty('type');
expect(result).toHaveProperty('default');
// Verify types
expect(typeof result.displayName).toBe('string');
expect(typeof result.name).toBe('string');
expect(typeof result.type).toBe('string');
expect(typeof result.default).toBe('string');
});
it('should properly handle edge cases with showExtraProps', () => {
// Empty showExtraProps
const result1 = buildInputSchemaField({ showExtraProps: {} });
expect(result1.displayOptions).toEqual({
show: {
schemaType: ['manual'],
},
});
// showExtraProps with undefined values
const result2 = buildInputSchemaField({
showExtraProps: {
field1: undefined,
field2: ['value2'],
},
});
expect(result2.displayOptions).toEqual({
show: {
field1: undefined,
field2: ['value2'],
schemaType: ['manual'],
},
});
});
});

View File

@@ -1,312 +0,0 @@
import type { BaseChatMessageHistory } from '@langchain/core/chat_history';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { BaseLLM } from '@langchain/core/language_models/llms';
import type { BaseMessage } from '@langchain/core/messages';
import type { Tool } from '@langchain/core/tools';
import { Toolkit } from 'langchain/agents';
import type { BaseChatMemory } from 'langchain/memory';
import { NodeConnectionTypes, NodeOperationError, jsonStringify } from 'n8n-workflow';
import type {
AiEvent,
IDataObject,
IExecuteFunctions,
ISupplyDataFunctions,
IWebhookFunctions,
} from 'n8n-workflow';
import { N8nTool } from './N8nTool';
function hasMethods<T>(obj: unknown, ...methodNames: Array<string | symbol>): obj is T {
return methodNames.every(
(methodName) =>
typeof obj === 'object' &&
obj !== null &&
methodName in obj &&
typeof (obj as Record<string | symbol, unknown>)[methodName] === 'function',
);
}
export function getMetadataFiltersValues(
ctx: IExecuteFunctions | ISupplyDataFunctions,
itemIndex: number,
): Record<string, never> | undefined {
const options = ctx.getNodeParameter('options', itemIndex, {});
if (options.metadata) {
const { metadataValues: metadata } = options.metadata as {
metadataValues: Array<{
name: string;
value: string;
}>;
};
if (metadata.length > 0) {
return metadata.reduce((acc, { name, value }) => ({ ...acc, [name]: value }), {});
}
}
if (options.searchFilterJson) {
return ctx.getNodeParameter('options.searchFilterJson', itemIndex, '', {
ensureType: 'object',
}) as Record<string, never>;
}
return undefined;
}
export function isBaseChatMemory(obj: unknown) {
return hasMethods<BaseChatMemory>(obj, 'loadMemoryVariables', 'saveContext');
}
export function isBaseChatMessageHistory(obj: unknown) {
return hasMethods<BaseChatMessageHistory>(obj, 'getMessages', 'addMessage');
}
export function isChatInstance(model: unknown): model is BaseChatModel {
const namespace = (model as BaseLLM)?.lc_namespace ?? [];
return namespace.includes('chat_models');
}
export function isToolsInstance(model: unknown): model is Tool {
const namespace = (model as Tool)?.lc_namespace ?? [];
return namespace.includes('tools');
}
export function getPromptInputByType(options: {
ctx: IExecuteFunctions | ISupplyDataFunctions;
i: number;
promptTypeKey: string;
inputKey: string;
}) {
const { ctx, i, promptTypeKey, inputKey } = options;
const promptType = ctx.getNodeParameter(promptTypeKey, i, 'define') as string;
let input;
if (promptType === 'auto') {
input = ctx.evaluateExpression('{{ $json["chatInput"] }}', i) as string;
} else {
input = ctx.getNodeParameter(inputKey, i) as string;
}
if (input === undefined) {
throw new NodeOperationError(ctx.getNode(), 'No prompt specified', {
description:
"Expected to find the prompt in an input field called 'chatInput' (this is what the chat trigger node outputs). To use something else, change the 'Prompt' parameter",
});
}
return input;
}
export function getSessionId(
ctx: ISupplyDataFunctions | IWebhookFunctions,
itemIndex: number,
selectorKey = 'sessionIdType',
autoSelect = 'fromInput',
customKey = 'sessionKey',
) {
let sessionId = '';
const selectorType = ctx.getNodeParameter(selectorKey, itemIndex) as string;
if (selectorType === autoSelect) {
// If memory node is used in webhook like node(like chat trigger node), it doesn't have access to evaluateExpression
// so we try to extract sessionId from the bodyData
if ('getBodyData' in ctx) {
const bodyData = ctx.getBodyData() ?? {};
sessionId = bodyData.sessionId as string;
} else {
sessionId = ctx.evaluateExpression('{{ $json.sessionId }}', itemIndex) as string;
// try to get sessionId from chat trigger
if (!sessionId || sessionId === undefined) {
try {
const chatTrigger = ctx.getChatTrigger();
if (chatTrigger) {
sessionId = ctx.evaluateExpression(
`{{ $('${chatTrigger.name}').first().json.sessionId }}`,
itemIndex,
) as string;
}
} catch (error) {}
}
}
if (sessionId === '' || sessionId === undefined) {
throw new NodeOperationError(ctx.getNode(), 'No session ID found', {
description:
"Expected to find the session ID in an input field called 'sessionId' (this is what the chat trigger node outputs). To use something else, change the 'Session ID' parameter",
itemIndex,
});
}
} else {
sessionId = ctx.getNodeParameter(customKey, itemIndex, '') as string;
if (sessionId === '' || sessionId === undefined) {
throw new NodeOperationError(ctx.getNode(), 'Key parameter is empty', {
description:
"Provide a key to use as session ID in the 'Key' parameter or use the 'Connected Chat Trigger Node' option to use the session ID from your Chat Trigger",
itemIndex,
});
}
}
return sessionId;
}
export function logAiEvent(
executeFunctions: IExecuteFunctions | ISupplyDataFunctions,
event: AiEvent,
data?: IDataObject,
) {
try {
executeFunctions.logAiEvent(event, data ? jsonStringify(data) : undefined);
} catch (error) {
executeFunctions.logger.debug(`Error logging AI event: ${event}`);
}
}
export function serializeChatHistory(chatHistory: BaseMessage[]): string {
return chatHistory
.map((chatMessage) => {
if (chatMessage._getType() === 'human') {
return `Human: ${chatMessage.content}`;
} else if (chatMessage._getType() === 'ai') {
return `Assistant: ${chatMessage.content}`;
} else {
return `${chatMessage.content}`;
}
})
.join('\n');
}
export function escapeSingleCurlyBrackets(text?: string): string | undefined {
if (text === undefined) return undefined;
let result = text;
result = result
// First handle triple brackets to avoid interference with double brackets
.replace(/(?<!{){{{(?!{)/g, '{{{{')
.replace(/(?<!})}}}(?!})/g, '}}}}')
// Then handle single brackets, but only if they're not part of double brackets
// Convert single { to {{ if it's not already part of {{ or {{{
.replace(/(?<!{){(?!{)/g, '{{')
// Convert single } to }} if it's not already part of }} or }}}
.replace(/(?<!})}(?!})/g, '}}');
return result;
}
export const getConnectedTools = async (
ctx: IExecuteFunctions | IWebhookFunctions | ISupplyDataFunctions,
enforceUniqueNames: boolean,
convertStructuredTool: boolean = true,
escapeCurlyBrackets: boolean = false,
) => {
const connectedTools = (
((await ctx.getInputConnectionData(NodeConnectionTypes.AiTool, 0)) as Array<Toolkit | Tool>) ??
[]
).flatMap((toolOrToolkit) => {
if (toolOrToolkit instanceof Toolkit) {
return toolOrToolkit.getTools() as Tool[];
}
return toolOrToolkit;
});
if (!enforceUniqueNames) return connectedTools;
const seenNames = new Set<string>();
const finalTools: Tool[] = [];
for (const tool of connectedTools) {
const { name } = tool;
if (seenNames.has(name)) {
throw new NodeOperationError(
ctx.getNode(),
`You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`,
);
}
seenNames.add(name);
if (escapeCurlyBrackets) {
tool.description = escapeSingleCurlyBrackets(tool.description) ?? tool.description;
}
if (convertStructuredTool && tool instanceof N8nTool) {
finalTools.push(tool.asDynamicTool());
} else {
finalTools.push(tool);
}
}
return finalTools;
};
/**
* Sometimes model output is wrapped in an additional object property.
* This function unwraps the output if it is in the format { output: { output: { ... } } }
*/
export function unwrapNestedOutput(output: Record<string, unknown>): Record<string, unknown> {
if (
'output' in output &&
Object.keys(output).length === 1 &&
typeof output.output === 'object' &&
output.output !== null &&
'output' in output.output &&
Object.keys(output.output).length === 1
) {
return output.output as Record<string, unknown>;
}
return output;
}
/**
* Detects if a text contains a character that repeats sequentially for a specified threshold.
* This is used to prevent performance issues with tiktoken on highly repetitive content.
* @param text The text to check
* @param threshold The minimum number of sequential repeats to detect (default: 1000)
* @returns true if a character repeats sequentially for at least the threshold amount
*/
export function hasLongSequentialRepeat(text: string, threshold = 1000): boolean {
try {
// Validate inputs
if (
text === null ||
typeof text !== 'string' ||
text.length === 0 ||
threshold <= 0 ||
text.length < threshold
) {
return false;
}
// Use string iterator to avoid creating array copy (memory efficient)
const iterator = text[Symbol.iterator]();
let prev = iterator.next();
if (prev.done) {
return false;
}
let count = 1;
for (const char of iterator) {
if (char === prev.value) {
count++;
if (count >= threshold) {
return true;
}
} else {
count = 1;
prev = { value: char, done: false };
}
}
return false;
} catch (error) {
// On any error, return false to allow normal processing
return false;
}
}

View File

@@ -1,19 +0,0 @@
import proxyFromEnv from 'proxy-from-env';
import { ProxyAgent } from 'undici';
/**
* Returns a ProxyAgent or undefined based on the environment variables and target URL.
* When target URL is not provided, NO_PROXY environment variable is not respected.
*/
export function getProxyAgent(targetUrl?: string) {
// There are cases where we don't know the target URL in advance (e.g. when we need to provide a proxy agent to ChatAwsBedrock)
// In such case we use a dummy URL.
// This will lead to `NO_PROXY` environment variable not being respected, but it is better than not having a proxy agent at all.
const proxyUrl = proxyFromEnv.getProxyForUrl(targetUrl ?? 'https://example.nonexistent/');
if (!proxyUrl) {
return undefined;
}
return new ProxyAgent(proxyUrl);
}

View File

@@ -1,472 +0,0 @@
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import type { BaseCallbackConfig, Callbacks } from '@langchain/core/callbacks/manager';
import type { BaseChatMessageHistory } from '@langchain/core/chat_history';
import type { Document } from '@langchain/core/documents';
import { Embeddings } from '@langchain/core/embeddings';
import type { InputValues, MemoryVariables, OutputValues } from '@langchain/core/memory';
import type { BaseMessage } from '@langchain/core/messages';
import { BaseRetriever } from '@langchain/core/retrievers';
import { BaseDocumentCompressor } from '@langchain/core/retrievers/document_compressors';
import type { StructuredTool, Tool } from '@langchain/core/tools';
import { VectorStore } from '@langchain/core/vectorstores';
import { TextSplitter } from '@langchain/textsplitters';
import type { BaseDocumentLoader } from 'langchain/dist/document_loaders/base';
import type {
IDataObject,
IExecuteFunctions,
INodeExecutionData,
ISupplyDataFunctions,
ITaskMetadata,
NodeConnectionType,
} from 'n8n-workflow';
import {
NodeOperationError,
NodeConnectionTypes,
parseErrorMetadata,
deepCopy,
} from 'n8n-workflow';
import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers';
import { N8nBinaryLoader } from './N8nBinaryLoader';
import { N8nJsonLoader } from './N8nJsonLoader';
export async function callMethodAsync<T>(
this: T,
parameters: {
executeFunctions: IExecuteFunctions | ISupplyDataFunctions;
connectionType: NodeConnectionType;
currentNodeRunIndex: number;
method: (...args: any[]) => Promise<unknown>;
arguments: unknown[];
},
): Promise<unknown> {
try {
return await parameters.method.call(this, ...parameters.arguments);
} catch (e) {
const connectedNode = parameters.executeFunctions.getNode();
const error = new NodeOperationError(connectedNode, e, {
functionality: 'configuration-node',
});
const metadata = parseErrorMetadata(error);
parameters.executeFunctions.addOutputData(
parameters.connectionType,
parameters.currentNodeRunIndex,
error,
metadata,
);
if (error.message) {
if (!error.description) {
error.description = error.message;
}
throw error;
}
throw new NodeOperationError(
connectedNode,
`Error on node "${connectedNode.name}" which is connected via input "${parameters.connectionType}"`,
{ functionality: 'configuration-node' },
);
}
}
export function callMethodSync<T>(
this: T,
parameters: {
executeFunctions: IExecuteFunctions;
connectionType: NodeConnectionType;
currentNodeRunIndex: number;
method: (...args: any[]) => T;
arguments: unknown[];
},
): unknown {
try {
return parameters.method.call(this, ...parameters.arguments);
} catch (e) {
const connectedNode = parameters.executeFunctions.getNode();
const error = new NodeOperationError(connectedNode, e);
parameters.executeFunctions.addOutputData(
parameters.connectionType,
parameters.currentNodeRunIndex,
error,
);
throw new NodeOperationError(
connectedNode,
`Error on node "${connectedNode.name}" which is connected via input "${parameters.connectionType}"`,
{ functionality: 'configuration-node' },
);
}
}
export function logWrapper<
T extends
| Tool
| StructuredTool
| BaseChatMemory
| BaseChatMessageHistory
| BaseRetriever
| BaseDocumentCompressor
| Embeddings
| Document[]
| Document
| BaseDocumentLoader
| TextSplitter
| VectorStore
| N8nBinaryLoader
| N8nJsonLoader,
>(originalInstance: T, executeFunctions: IExecuteFunctions | ISupplyDataFunctions): T {
return new Proxy(originalInstance, {
get: (target, prop) => {
let connectionType: NodeConnectionType | undefined;
// ========== BaseChatMemory ==========
if (isBaseChatMemory(originalInstance)) {
if (prop === 'loadMemoryVariables' && 'loadMemoryVariables' in target) {
return async (values: InputValues): Promise<MemoryVariables> => {
connectionType = NodeConnectionTypes.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'loadMemoryVariables', values } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [values],
})) as MemoryVariables;
const chatHistory = (response?.chat_history as BaseMessage[]) ?? response;
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'loadMemoryVariables', chatHistory } }],
]);
return response;
};
} else if (prop === 'saveContext' && 'saveContext' in target) {
return async (input: InputValues, output: OutputValues): Promise<MemoryVariables> => {
connectionType = NodeConnectionTypes.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'saveContext', input, output } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [input, output],
})) as MemoryVariables;
const chatHistory = await target.chatHistory.getMessages();
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'saveContext', chatHistory } }],
]);
return response;
};
}
}
// ========== BaseChatMessageHistory ==========
if (isBaseChatMessageHistory(originalInstance)) {
if (prop === 'getMessages' && 'getMessages' in target) {
return async (): Promise<BaseMessage[]> => {
connectionType = NodeConnectionTypes.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'getMessages' } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [],
})) as BaseMessage[];
const payload = { action: 'getMessages', response };
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
logAiEvent(executeFunctions, 'ai-messages-retrieved-from-memory', { response });
return response;
};
} else if (prop === 'addMessage' && 'addMessage' in target) {
return async (message: BaseMessage): Promise<void> => {
connectionType = NodeConnectionTypes.AiMemory;
const payload = { action: 'addMessage', message };
const { index } = executeFunctions.addInputData(connectionType, [[{ json: payload }]]);
await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [message],
});
logAiEvent(executeFunctions, 'ai-message-added-to-memory', { message });
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
};
}
}
// ========== BaseRetriever ==========
if (originalInstance instanceof BaseRetriever) {
if (prop === 'getRelevantDocuments' && 'getRelevantDocuments' in target) {
return async (
query: string,
config?: Callbacks | BaseCallbackConfig,
): Promise<Document[]> => {
connectionType = NodeConnectionTypes.AiRetriever;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query, config } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query, config],
})) as Array<Document<Record<string, any>>>;
const executionId: string | undefined = response[0]?.metadata?.executionId as string;
const workflowId: string | undefined = response[0]?.metadata?.workflowId as string;
const metadata: ITaskMetadata = {};
if (executionId && workflowId) {
metadata.subExecution = {
executionId,
workflowId,
};
}
logAiEvent(executeFunctions, 'ai-documents-retrieved', { query });
executeFunctions.addOutputData(
connectionType,
index,
[[{ json: { response } }]],
metadata,
);
return response;
};
}
}
// ========== Embeddings ==========
if (originalInstance instanceof Embeddings) {
// Docs -> Embeddings
if (prop === 'embedDocuments' && 'embedDocuments' in target) {
return async (documents: string[]): Promise<number[][]> => {
connectionType = NodeConnectionTypes.AiEmbedding;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { documents } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [documents],
})) as number[][];
logAiEvent(executeFunctions, 'ai-document-embedded');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
// Query -> Embeddings
if (prop === 'embedQuery' && 'embedQuery' in target) {
return async (query: string): Promise<number[]> => {
connectionType = NodeConnectionTypes.AiEmbedding;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query],
})) as number[];
logAiEvent(executeFunctions, 'ai-query-embedded');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
// ========== Rerankers ==========
if (originalInstance instanceof BaseDocumentCompressor) {
if (prop === 'compressDocuments' && 'compressDocuments' in target) {
return async (documents: Document[], query: string): Promise<Document[]> => {
connectionType = NodeConnectionTypes.AiReranker;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query, documents } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
// compressDocuments mutates the original object
// messing up the input data logging
arguments: [deepCopy(documents), query],
})) as Document[];
logAiEvent(executeFunctions, 'ai-document-reranked', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
// ========== N8n Loaders Process All ==========
if (
originalInstance instanceof N8nJsonLoader ||
originalInstance instanceof N8nBinaryLoader
) {
// Process All
if (prop === 'processAll' && 'processAll' in target) {
return async (items: INodeExecutionData[]): Promise<number[]> => {
connectionType = NodeConnectionTypes.AiDocument;
const { index } = executeFunctions.addInputData(connectionType, [items]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [items],
})) as number[];
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
// Process Each
if (prop === 'processItem' && 'processItem' in target) {
return async (item: INodeExecutionData, itemIndex: number): Promise<number[]> => {
connectionType = NodeConnectionTypes.AiDocument;
const { index } = executeFunctions.addInputData(connectionType, [[item]]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [item, itemIndex],
})) as number[];
logAiEvent(executeFunctions, 'ai-document-processed');
executeFunctions.addOutputData(connectionType, index, [
[{ json: { response }, pairedItem: { item: itemIndex } }],
]);
return response;
};
}
}
// ========== TextSplitter ==========
if (originalInstance instanceof TextSplitter) {
if (prop === 'splitText' && 'splitText' in target) {
return async (text: string): Promise<string[]> => {
connectionType = NodeConnectionTypes.AiTextSplitter;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { textSplitter: text } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [text],
})) as string[];
logAiEvent(executeFunctions, 'ai-text-split');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
// ========== Tool ==========
if (isToolsInstance(originalInstance)) {
if (prop === '_call' && '_call' in target) {
return async (query: string): Promise<string> => {
connectionType = NodeConnectionTypes.AiTool;
const inputData: IDataObject = { query };
if (target.metadata?.isFromToolkit) {
inputData.tool = {
name: target.name,
description: target.description,
};
}
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: inputData }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query],
})) as string;
logAiEvent(executeFunctions, 'ai-tool-called', { ...inputData, response });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
if (typeof response === 'string') return response;
return JSON.stringify(response);
};
}
}
// ========== VectorStore ==========
if (originalInstance instanceof VectorStore) {
if (prop === 'similaritySearch' && 'similaritySearch' in target) {
return async (
query: string,
k?: number,
filter?: BiquadFilterType,
_callbacks?: Callbacks,
): Promise<Document[]> => {
connectionType = NodeConnectionTypes.AiVectorStore;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query, k, filter } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query, k, filter, _callbacks],
})) as Array<Document<Record<string, any>>>;
logAiEvent(executeFunctions, 'ai-vector-store-searched', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return (target as any)[prop];
},
});
}

View File

@@ -1,63 +0,0 @@
import { jsonSchemaToZod } from '@n8n/json-schema-to-zod';
import { json as generateJsonSchema } from 'generate-schema';
import type { SchemaObject } from 'generate-schema';
import type { JSONSchema7 } from 'json-schema';
import type { IExecuteFunctions } from 'n8n-workflow';
import { NodeOperationError, jsonParse } from 'n8n-workflow';
import type { z } from 'zod';
function makeAllPropertiesRequired(schema: JSONSchema7): JSONSchema7 {
function isPropertySchema(property: unknown): property is JSONSchema7 {
return typeof property === 'object' && property !== null && 'type' in property;
}
// Handle object properties
if (schema.type === 'object' && schema.properties) {
const properties = Object.keys(schema.properties);
if (properties.length > 0) {
schema.required = properties;
}
for (const key of properties) {
if (isPropertySchema(schema.properties[key])) {
makeAllPropertiesRequired(schema.properties[key]);
}
}
}
// Handle arrays
if (schema.type === 'array' && schema.items && isPropertySchema(schema.items)) {
schema.items = makeAllPropertiesRequired(schema.items);
}
return schema;
}
export function generateSchemaFromExample(
exampleJsonString: string,
allFieldsRequired = false,
): JSONSchema7 {
const parsedExample = jsonParse<SchemaObject>(exampleJsonString);
const schema = generateJsonSchema(parsedExample) as JSONSchema7;
if (allFieldsRequired) {
return makeAllPropertiesRequired(schema);
}
return schema;
}
export function convertJsonSchemaToZod<T extends z.ZodTypeAny = z.ZodTypeAny>(schema: JSONSchema7) {
return jsonSchemaToZod<T>(schema);
}
export function throwIfToolSchema(ctx: IExecuteFunctions, error: Error) {
if (error?.message?.includes('tool input did not match expected schema')) {
throw new NodeOperationError(
ctx.getNode(),
`${error.message}.
This is most likely because some of your tools are configured to require a specific schema. This is not supported by Conversational Agent. Remove the schema from the tool configuration or use Tools agent instead.`,
);
}
}

View File

@@ -1,187 +0,0 @@
import { NodeConnectionTypes } from 'n8n-workflow';
import type { IDisplayOptions, INodeProperties } from 'n8n-workflow';
export const metadataFilterField: INodeProperties = {
displayName: 'Metadata Filter',
name: 'metadata',
type: 'fixedCollection',
description: 'Metadata to filter the document by',
typeOptions: {
multipleValues: true,
},
default: {},
placeholder: 'Add filter field',
options: [
{
name: 'metadataValues',
displayName: 'Fields to Set',
values: [
{
displayName: 'Name',
name: 'name',
type: 'string',
default: '',
required: true,
},
{
displayName: 'Value',
name: 'value',
type: 'string',
default: '',
},
],
},
],
};
export function getTemplateNoticeField(templateId: number): INodeProperties {
return {
displayName: `Save time with an <a href="/templates/${templateId}" target="_blank">example</a> of how this node works`,
name: 'notice',
type: 'notice',
default: '',
};
}
export function getBatchingOptionFields(
displayOptions: IDisplayOptions | undefined,
defaultBatchSize: number = 5,
): INodeProperties {
return {
displayName: 'Batch Processing',
name: 'batching',
type: 'collection',
placeholder: 'Add Batch Processing Option',
description: 'Batch processing options for rate limiting',
default: {},
options: [
{
displayName: 'Batch Size',
name: 'batchSize',
default: defaultBatchSize,
type: 'number',
description:
'How many items to process in parallel. This is useful for rate limiting, but might impact the log output ordering.',
},
{
displayName: 'Delay Between Batches',
name: 'delayBetweenBatches',
default: 0,
type: 'number',
description: 'Delay in milliseconds between batches. This is useful for rate limiting.',
},
],
displayOptions,
};
}
const connectionsString = {
[NodeConnectionTypes.AiAgent]: {
// Root AI view
connection: '',
locale: 'AI Agent',
},
[NodeConnectionTypes.AiChain]: {
// Root AI view
connection: '',
locale: 'AI Chain',
},
[NodeConnectionTypes.AiDocument]: {
connection: NodeConnectionTypes.AiDocument,
locale: 'Document Loader',
},
[NodeConnectionTypes.AiVectorStore]: {
connection: NodeConnectionTypes.AiVectorStore,
locale: 'Vector Store',
},
[NodeConnectionTypes.AiRetriever]: {
connection: NodeConnectionTypes.AiRetriever,
locale: 'Vector Store Retriever',
},
};
type AllowedConnectionTypes =
| typeof NodeConnectionTypes.AiAgent
| typeof NodeConnectionTypes.AiChain
| typeof NodeConnectionTypes.AiDocument
| typeof NodeConnectionTypes.AiVectorStore
| typeof NodeConnectionTypes.AiRetriever;
function determineArticle(nextWord: string): string {
// check if the next word starts with a vowel sound
const vowels = /^[aeiouAEIOU]/;
return vowels.test(nextWord) ? 'an' : 'a';
}
const getConnectionParameterString = (connectionType: string) => {
if (connectionType === '') return "data-action-parameter-creatorview='AI'";
return `data-action-parameter-connectiontype='${connectionType}'`;
};
const getAhref = (connectionType: { connection: string; locale: string }) =>
`<a class="test" data-action='openSelectiveNodeCreator'${getConnectionParameterString(
connectionType.connection,
)}'>${connectionType.locale}</a>`;
export function getConnectionHintNoticeField(
connectionTypes: AllowedConnectionTypes[],
): INodeProperties {
const groupedConnections = new Map<string, string[]>();
// group connection types by their 'connection' value
// to not create multiple links
connectionTypes.forEach((connectionType) => {
const connectionString = connectionsString[connectionType].connection;
const localeString = connectionsString[connectionType].locale;
if (!groupedConnections.has(connectionString)) {
groupedConnections.set(connectionString, [localeString]);
return;
}
groupedConnections.get(connectionString)?.push(localeString);
});
let displayName;
if (groupedConnections.size === 1) {
const [[connection, locales]] = Array.from(groupedConnections);
displayName = `This node must be connected to ${determineArticle(locales[0])} ${locales[0]
.toLowerCase()
.replace(
/^ai /,
'AI ',
)}. <a data-action='openSelectiveNodeCreator' ${getConnectionParameterString(
connection,
)}>Insert one</a>`;
} else {
const ahrefs = Array.from(groupedConnections, ([connection, locales]) => {
// If there are multiple locales, join them with ' or '
// use determineArticle to insert the correct article
const locale =
locales.length > 1
? locales
.map((localeString, index, { length }) => {
return (
(index === 0 ? `${determineArticle(localeString)} ` : '') +
(index < length - 1 ? `${localeString} or ` : localeString)
);
})
.join('')
: `${determineArticle(locales[0])} ${locales[0]}`;
return getAhref({ connection, locale });
});
displayName = `This node needs to be connected to ${ahrefs.join(' or ')}.`;
}
return {
displayName,
name: 'notice',
type: 'notice',
default: '',
typeOptions: {
containerClass: 'ndv-connection-hint-notice',
},
};
}

View File

@@ -1,26 +0,0 @@
import type { BaseCallbackConfig } from '@langchain/core/callbacks/manager';
import type { IExecuteFunctions } from 'n8n-workflow';
interface TracingConfig {
additionalMetadata?: Record<string, unknown>;
}
export function getTracingConfig(
context: IExecuteFunctions,
config: TracingConfig = {},
): BaseCallbackConfig {
const parentRunManager = context.getParentCallbackManager
? context.getParentCallbackManager()
: undefined;
return {
runName: `[${context.getWorkflow().name}] ${context.getNode().name}`,
metadata: {
execution_id: context.getExecutionId(),
workflow: context.getWorkflow(),
node: context.getNode().name,
...(config.additionalMetadata ?? {}),
},
callbacks: parentRunManager,
};
}