mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
refactor(core): Move more code out of NodeExecutionFunctions, and add unit tests (no-changelog) (#13131)
This commit is contained in:
parent
ccdca6b39f
commit
4577ce0846
|
@ -7,13 +7,13 @@ import {
|
|||
Logger,
|
||||
PollContext,
|
||||
TriggerContext,
|
||||
type IGetExecutePollFunctions,
|
||||
type IGetExecuteTriggerFunctions,
|
||||
} from 'n8n-core';
|
||||
import type {
|
||||
ExecutionError,
|
||||
IDeferredPromise,
|
||||
IExecuteResponsePromiseData,
|
||||
IGetExecutePollFunctions,
|
||||
IGetExecuteTriggerFunctions,
|
||||
INode,
|
||||
INodeExecutionData,
|
||||
IRun,
|
||||
|
|
|
@ -1,425 +1,23 @@
|
|||
import { Container } from '@n8n/di';
|
||||
import FormData from 'form-data';
|
||||
import { mkdtempSync, readFileSync } from 'fs';
|
||||
import { IncomingMessage } from 'http';
|
||||
import type { Agent } from 'https';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type {
|
||||
IBinaryData,
|
||||
IHttpRequestMethods,
|
||||
IHttpRequestOptions,
|
||||
INode,
|
||||
IRequestOptions,
|
||||
ITaskDataConnections,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
Workflow,
|
||||
WorkflowHooks,
|
||||
} from 'n8n-workflow';
|
||||
import nock from 'nock';
|
||||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
import { Readable } from 'stream';
|
||||
import type { SecureContextOptions } from 'tls';
|
||||
|
||||
import { BinaryDataService } from '@/binary-data/binary-data.service';
|
||||
import { InstanceSettings } from '@/instance-settings';
|
||||
import {
|
||||
binaryToString,
|
||||
copyInputItems,
|
||||
getBinaryDataBuffer,
|
||||
invokeAxios,
|
||||
isFilePathBlocked,
|
||||
parseContentDisposition,
|
||||
parseContentType,
|
||||
parseIncomingMessage,
|
||||
parseRequestObject,
|
||||
prepareBinaryData,
|
||||
proxyRequestToAxios,
|
||||
removeEmptyBody,
|
||||
setBinaryDataBuffer,
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
const temporaryDir = mkdtempSync(join(tmpdir(), 'n8n'));
|
||||
|
||||
describe('NodeExecuteFunctions', () => {
|
||||
describe('test binary data helper methods', () => {
|
||||
test("test getBinaryDataBuffer(...) & setBinaryDataBuffer(...) methods in 'default' mode", async () => {
|
||||
// Setup a 'default' binary data manager instance
|
||||
Container.set(BinaryDataService, new BinaryDataService());
|
||||
|
||||
await Container.get(BinaryDataService).init({
|
||||
mode: 'default',
|
||||
availableModes: ['default'],
|
||||
localStoragePath: temporaryDir,
|
||||
});
|
||||
|
||||
// Set our binary data buffer
|
||||
const inputData: Buffer = Buffer.from('This is some binary data', 'utf8');
|
||||
const setBinaryDataBufferResponse: IBinaryData = await setBinaryDataBuffer(
|
||||
{
|
||||
mimeType: 'txt',
|
||||
data: 'This should be overwritten by the actual payload in the response',
|
||||
},
|
||||
inputData,
|
||||
'workflowId',
|
||||
'executionId',
|
||||
);
|
||||
|
||||
// Expect our return object to contain the base64 encoding of the input data, as it should be stored in memory.
|
||||
expect(setBinaryDataBufferResponse.data).toEqual(inputData.toString('base64'));
|
||||
|
||||
// Now, re-fetch our data.
|
||||
// An ITaskDataConnections object is used to share data between nodes. The top level property, 'main', represents the successful output object from a previous node.
|
||||
const taskDataConnectionsInput: ITaskDataConnections = {
|
||||
main: [],
|
||||
};
|
||||
|
||||
// We add an input set, with one item at index 0, to this input. It contains an empty json payload and our binary data.
|
||||
taskDataConnectionsInput.main.push([
|
||||
{
|
||||
json: {},
|
||||
binary: {
|
||||
data: setBinaryDataBufferResponse,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
// Now, lets fetch our data! The item will be item index 0.
|
||||
const getBinaryDataBufferResponse: Buffer = await getBinaryDataBuffer(
|
||||
taskDataConnectionsInput,
|
||||
0,
|
||||
'data',
|
||||
0,
|
||||
);
|
||||
|
||||
expect(getBinaryDataBufferResponse).toEqual(inputData);
|
||||
});
|
||||
|
||||
test('test prepareBinaryData parses filenames correctly', async () => {
|
||||
const filenameExpected = [
|
||||
{
|
||||
filename: 't?ext',
|
||||
expected: 't?ext',
|
||||
},
|
||||
{
|
||||
filename: 'no-symbol',
|
||||
expected: 'no-symbol',
|
||||
},
|
||||
];
|
||||
|
||||
for (const { filename, expected } of filenameExpected) {
|
||||
const binaryData: Buffer = Buffer.from('This is some binary data', 'utf8');
|
||||
|
||||
const result = await prepareBinaryData(binaryData, 'workflowId', 'executionId', filename);
|
||||
|
||||
expect(result.fileName).toEqual(expected);
|
||||
}
|
||||
});
|
||||
|
||||
test("test getBinaryDataBuffer(...) & setBinaryDataBuffer(...) methods in 'filesystem' mode", async () => {
|
||||
Container.set(BinaryDataService, new BinaryDataService());
|
||||
|
||||
// Setup a 'filesystem' binary data manager instance
|
||||
await Container.get(BinaryDataService).init({
|
||||
mode: 'filesystem',
|
||||
availableModes: ['filesystem'],
|
||||
localStoragePath: temporaryDir,
|
||||
});
|
||||
|
||||
// Set our binary data buffer
|
||||
const inputData: Buffer = Buffer.from('This is some binary data', 'utf8');
|
||||
const setBinaryDataBufferResponse: IBinaryData = await setBinaryDataBuffer(
|
||||
{
|
||||
mimeType: 'txt',
|
||||
data: 'This should be overwritten with the name of the configured data manager',
|
||||
},
|
||||
inputData,
|
||||
'workflowId',
|
||||
'executionId',
|
||||
);
|
||||
|
||||
// Expect our return object to contain the name of the configured data manager.
|
||||
expect(setBinaryDataBufferResponse.data).toEqual('filesystem-v2');
|
||||
|
||||
// Ensure that the input data was successfully persisted to disk.
|
||||
expect(
|
||||
readFileSync(
|
||||
`${temporaryDir}/${setBinaryDataBufferResponse.id?.replace('filesystem-v2:', '')}`,
|
||||
),
|
||||
).toEqual(inputData);
|
||||
|
||||
// Now, re-fetch our data.
|
||||
// An ITaskDataConnections object is used to share data between nodes. The top level property, 'main', represents the successful output object from a previous node.
|
||||
const taskDataConnectionsInput: ITaskDataConnections = {
|
||||
main: [],
|
||||
};
|
||||
|
||||
// We add an input set, with one item at index 0, to this input. It contains an empty json payload and our binary data.
|
||||
taskDataConnectionsInput.main.push([
|
||||
{
|
||||
json: {},
|
||||
binary: {
|
||||
data: setBinaryDataBufferResponse,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
// Now, lets fetch our data! The item will be item index 0.
|
||||
const getBinaryDataBufferResponse: Buffer = await getBinaryDataBuffer(
|
||||
taskDataConnectionsInput,
|
||||
0,
|
||||
'data',
|
||||
0,
|
||||
);
|
||||
|
||||
expect(getBinaryDataBufferResponse).toEqual(inputData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseContentType', () => {
|
||||
const testCases = [
|
||||
{
|
||||
input: 'text/plain',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
},
|
||||
},
|
||||
description: 'should parse basic content type',
|
||||
},
|
||||
{
|
||||
input: 'TEXT/PLAIN',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
},
|
||||
},
|
||||
description: 'should convert type to lowercase',
|
||||
},
|
||||
{
|
||||
input: 'text/html; charset=iso-8859-1',
|
||||
expected: {
|
||||
type: 'text/html',
|
||||
parameters: {
|
||||
charset: 'iso-8859-1',
|
||||
},
|
||||
},
|
||||
description: 'should parse content type with charset',
|
||||
},
|
||||
{
|
||||
input: 'application/json; charset=utf-8; boundary=---123',
|
||||
expected: {
|
||||
type: 'application/json',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
boundary: '---123',
|
||||
},
|
||||
},
|
||||
description: 'should parse content type with multiple parameters',
|
||||
},
|
||||
{
|
||||
input: 'text/plain; charset="utf-8"; filename="test.txt"',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
filename: 'test.txt',
|
||||
},
|
||||
},
|
||||
description: 'should handle quoted parameter values',
|
||||
},
|
||||
{
|
||||
input: 'text/plain; filename=%22test%20file.txt%22',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
filename: 'test file.txt',
|
||||
},
|
||||
},
|
||||
description: 'should handle encoded parameter values',
|
||||
},
|
||||
{
|
||||
input: undefined,
|
||||
expected: null,
|
||||
description: 'should return null for undefined input',
|
||||
},
|
||||
{
|
||||
input: '',
|
||||
expected: null,
|
||||
description: 'should return null for empty string',
|
||||
},
|
||||
];
|
||||
|
||||
test.each(testCases)('$description', ({ input, expected }) => {
|
||||
expect(parseContentType(input)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseContentDisposition', () => {
|
||||
const testCases = [
|
||||
{
|
||||
input: 'attachment; filename="file.txt"',
|
||||
expected: { type: 'attachment', filename: 'file.txt' },
|
||||
description: 'should parse basic content disposition',
|
||||
},
|
||||
{
|
||||
input: 'attachment; filename=file.txt',
|
||||
expected: { type: 'attachment', filename: 'file.txt' },
|
||||
description: 'should parse filename without quotes',
|
||||
},
|
||||
{
|
||||
input: 'inline; filename="image.jpg"',
|
||||
expected: { type: 'inline', filename: 'image.jpg' },
|
||||
description: 'should parse inline disposition',
|
||||
},
|
||||
{
|
||||
input: 'attachment; filename="my file.pdf"',
|
||||
expected: { type: 'attachment', filename: 'my file.pdf' },
|
||||
description: 'should parse filename with spaces',
|
||||
},
|
||||
{
|
||||
input: "attachment; filename*=UTF-8''my%20file.txt",
|
||||
expected: { type: 'attachment', filename: 'my file.txt' },
|
||||
description: 'should parse filename* parameter (RFC 5987)',
|
||||
},
|
||||
{
|
||||
input: 'filename="test.txt"',
|
||||
expected: { type: 'attachment', filename: 'test.txt' },
|
||||
description: 'should handle invalid syntax but with filename',
|
||||
},
|
||||
{
|
||||
input: 'filename=test.txt',
|
||||
expected: { type: 'attachment', filename: 'test.txt' },
|
||||
description: 'should handle invalid syntax with only filename parameter',
|
||||
},
|
||||
{
|
||||
input: undefined,
|
||||
expected: null,
|
||||
description: 'should return null for undefined input',
|
||||
},
|
||||
{
|
||||
input: '',
|
||||
expected: null,
|
||||
description: 'should return null for empty string',
|
||||
},
|
||||
{
|
||||
input: 'attachment; filename="%F0%9F%98%80.txt"',
|
||||
expected: { type: 'attachment', filename: '😀.txt' },
|
||||
description: 'should handle encoded filenames',
|
||||
},
|
||||
{
|
||||
input: 'attachment; size=123; filename="test.txt"; creation-date="Thu, 1 Jan 2020"',
|
||||
expected: { type: 'attachment', filename: 'test.txt' },
|
||||
description: 'should handle multiple parameters',
|
||||
},
|
||||
];
|
||||
|
||||
test.each(testCases)('$description', ({ input, expected }) => {
|
||||
expect(parseContentDisposition(input)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseIncomingMessage', () => {
|
||||
it('parses valid content-type header', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: { 'content-type': 'application/json', 'content-disposition': undefined },
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentType).toEqual('application/json');
|
||||
});
|
||||
|
||||
it('parses valid content-type header with parameters', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': 'application/json; charset=utf-8',
|
||||
'content-disposition': undefined,
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentType).toEqual('application/json');
|
||||
expect(message.encoding).toEqual('utf-8');
|
||||
});
|
||||
|
||||
it('parses valid content-type header with encoding wrapped in quotes', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': 'application/json; charset="utf-8"',
|
||||
'content-disposition': undefined,
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentType).toEqual('application/json');
|
||||
expect(message.encoding).toEqual('utf-8');
|
||||
});
|
||||
|
||||
it('parses valid content-disposition header with filename*', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition':
|
||||
'attachment; filename="screenshot%20(1).png"; filename*=UTF-8\'\'screenshot%20(1).png',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'screenshot (1).png',
|
||||
type: 'attachment',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses valid content-disposition header with filename* (quoted)', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition': ' attachment;filename*="utf-8\' \'test-unsplash.jpg"',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'test-unsplash.jpg',
|
||||
type: 'attachment',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses valid content-disposition header with filename and trailing ";"', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition': 'inline; filename="screenshot%20(1).png";',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'screenshot (1).png',
|
||||
type: 'inline',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses non standard content-disposition with missing type', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition': 'filename="screenshot%20(1).png";',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'screenshot (1).png',
|
||||
type: 'attachment',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('proxyRequestToAxios', () => {
|
||||
const baseUrl = 'http://example.de';
|
||||
const workflow = mock<Workflow>();
|
||||
|
@ -566,132 +164,6 @@ describe('NodeExecuteFunctions', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('parseRequestObject', () => {
|
||||
test('should handle basic request options', async () => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
url: 'https://example.com',
|
||||
method: 'POST',
|
||||
headers: { 'content-type': 'application/json' },
|
||||
body: { key: 'value' },
|
||||
});
|
||||
|
||||
expect(axiosOptions).toEqual(
|
||||
expect.objectContaining({
|
||||
url: 'https://example.com',
|
||||
method: 'POST',
|
||||
headers: { accept: '*/*', 'content-type': 'application/json' },
|
||||
data: { key: 'value' },
|
||||
maxRedirects: 0,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('should set correct headers for FormData', async () => {
|
||||
const formData = new FormData();
|
||||
formData.append('key', 'value');
|
||||
|
||||
const axiosOptions = await parseRequestObject({
|
||||
url: 'https://example.com',
|
||||
formData,
|
||||
headers: {
|
||||
'content-type': 'multipart/form-data',
|
||||
},
|
||||
});
|
||||
|
||||
expect(axiosOptions.headers).toMatchObject({
|
||||
accept: '*/*',
|
||||
'content-length': 163,
|
||||
'content-type': expect.stringMatching(/^multipart\/form-data; boundary=/),
|
||||
});
|
||||
|
||||
expect(axiosOptions.data).toBeInstanceOf(FormData);
|
||||
});
|
||||
|
||||
test('should not use Host header for SNI', async () => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
url: 'https://example.de/foo/bar',
|
||||
headers: { Host: 'other.host.com' },
|
||||
});
|
||||
expect((axiosOptions.httpsAgent as Agent).options.servername).toEqual('example.de');
|
||||
});
|
||||
|
||||
describe('should set SSL certificates', () => {
|
||||
const agentOptions: SecureContextOptions = {
|
||||
ca: '-----BEGIN CERTIFICATE-----\nTEST\n-----END CERTIFICATE-----',
|
||||
};
|
||||
const requestObject: IRequestOptions = {
|
||||
method: 'GET',
|
||||
uri: 'https://example.de',
|
||||
agentOptions,
|
||||
};
|
||||
|
||||
test('on regular requests', async () => {
|
||||
const axiosOptions = await parseRequestObject(requestObject);
|
||||
expect((axiosOptions.httpsAgent as Agent).options).toEqual({
|
||||
servername: 'example.de',
|
||||
...agentOptions,
|
||||
noDelay: true,
|
||||
path: null,
|
||||
});
|
||||
});
|
||||
|
||||
test('on redirected requests', async () => {
|
||||
const axiosOptions = await parseRequestObject(requestObject);
|
||||
expect(axiosOptions.beforeRedirect).toBeDefined;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const redirectOptions: Record<string, any> = { agents: {}, hostname: 'example.de' };
|
||||
axiosOptions.beforeRedirect!(redirectOptions, mock());
|
||||
expect(redirectOptions.agent).toEqual(redirectOptions.agents.https);
|
||||
expect((redirectOptions.agent as Agent).options).toEqual({
|
||||
servername: 'example.de',
|
||||
...agentOptions,
|
||||
noDelay: true,
|
||||
path: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when followRedirect is true', () => {
|
||||
test.each(['GET', 'HEAD'] as IHttpRequestMethods[])(
|
||||
'should set maxRedirects on %s ',
|
||||
async (method) => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
method,
|
||||
followRedirect: true,
|
||||
maxRedirects: 1234,
|
||||
});
|
||||
expect(axiosOptions.maxRedirects).toEqual(1234);
|
||||
},
|
||||
);
|
||||
|
||||
test.each(['POST', 'PUT', 'PATCH', 'DELETE'] as IHttpRequestMethods[])(
|
||||
'should not set maxRedirects on %s ',
|
||||
async (method) => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
method,
|
||||
followRedirect: true,
|
||||
maxRedirects: 1234,
|
||||
});
|
||||
expect(axiosOptions.maxRedirects).toEqual(0);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('when followAllRedirects is true', () => {
|
||||
test.each(['GET', 'HEAD', 'POST', 'PUT', 'PATCH', 'DELETE'] as IHttpRequestMethods[])(
|
||||
'should set maxRedirects on %s ',
|
||||
async (method) => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
method,
|
||||
followAllRedirects: true,
|
||||
maxRedirects: 1234,
|
||||
});
|
||||
expect(axiosOptions.maxRedirects).toEqual(1234);
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('invokeAxios', () => {
|
||||
const baseUrl = 'http://example.de';
|
||||
|
||||
|
@ -848,107 +320,4 @@ describe('NodeExecuteFunctions', () => {
|
|||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('binaryToString', () => {
|
||||
const ENCODING_SAMPLES = {
|
||||
utf8: {
|
||||
text: 'Hello, 世界! τεστ мир ⚡️ é à ü ñ',
|
||||
buffer: Buffer.from([
|
||||
0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0xe4, 0xb8, 0x96, 0xe7, 0x95, 0x8c, 0x21, 0x20,
|
||||
0xcf, 0x84, 0xce, 0xb5, 0xcf, 0x83, 0xcf, 0x84, 0x20, 0xd0, 0xbc, 0xd0, 0xb8, 0xd1, 0x80,
|
||||
0x20, 0xe2, 0x9a, 0xa1, 0xef, 0xb8, 0x8f, 0x20, 0xc3, 0xa9, 0x20, 0xc3, 0xa0, 0x20, 0xc3,
|
||||
0xbc, 0x20, 0xc3, 0xb1,
|
||||
]),
|
||||
},
|
||||
|
||||
'iso-8859-15': {
|
||||
text: 'Café € personnalité',
|
||||
buffer: Buffer.from([
|
||||
0x43, 0x61, 0x66, 0xe9, 0x20, 0xa4, 0x20, 0x70, 0x65, 0x72, 0x73, 0x6f, 0x6e, 0x6e, 0x61,
|
||||
0x6c, 0x69, 0x74, 0xe9,
|
||||
]),
|
||||
},
|
||||
|
||||
latin1: {
|
||||
text: 'señor année déjà',
|
||||
buffer: Buffer.from([
|
||||
0x73, 0x65, 0xf1, 0x6f, 0x72, 0x20, 0x61, 0x6e, 0x6e, 0xe9, 0x65, 0x20, 0x64, 0xe9, 0x6a,
|
||||
0xe0,
|
||||
]),
|
||||
},
|
||||
|
||||
ascii: {
|
||||
text: 'Hello, World! 123',
|
||||
buffer: Buffer.from([
|
||||
0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0x57, 0x6f, 0x72, 0x6c, 0x64, 0x21, 0x20, 0x31,
|
||||
0x32, 0x33,
|
||||
]),
|
||||
},
|
||||
|
||||
'windows-1252': {
|
||||
text: '€ Smart "quotes" • bullet',
|
||||
buffer: Buffer.from([
|
||||
0x80, 0x20, 0x53, 0x6d, 0x61, 0x72, 0x74, 0x20, 0x22, 0x71, 0x75, 0x6f, 0x74, 0x65, 0x73,
|
||||
0x22, 0x20, 0x95, 0x20, 0x62, 0x75, 0x6c, 0x6c, 0x65, 0x74,
|
||||
]),
|
||||
},
|
||||
|
||||
'shift-jis': {
|
||||
text: 'こんにちは世界',
|
||||
buffer: Buffer.from([
|
||||
0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd, 0x90, 0xa2, 0x8a, 0x45,
|
||||
]),
|
||||
},
|
||||
|
||||
big5: {
|
||||
text: '哈囉世界',
|
||||
buffer: Buffer.from([0xab, 0xa2, 0xc5, 0x6f, 0xa5, 0x40, 0xac, 0xc9]),
|
||||
},
|
||||
|
||||
'koi8-r': {
|
||||
text: 'Привет мир',
|
||||
buffer: Buffer.from([0xf0, 0xd2, 0xc9, 0xd7, 0xc5, 0xd4, 0x20, 0xcd, 0xc9, 0xd2]),
|
||||
},
|
||||
};
|
||||
|
||||
describe('should handle Buffer', () => {
|
||||
for (const [encoding, { text, buffer }] of Object.entries(ENCODING_SAMPLES)) {
|
||||
test(`with ${encoding}`, async () => {
|
||||
const data = await binaryToString(buffer, encoding);
|
||||
expect(data).toBe(text);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('should handle streams', () => {
|
||||
for (const [encoding, { text, buffer }] of Object.entries(ENCODING_SAMPLES)) {
|
||||
test(`with ${encoding}`, async () => {
|
||||
const stream = Readable.from(buffer);
|
||||
const data = await binaryToString(stream, encoding);
|
||||
expect(data).toBe(text);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('should handle IncomingMessage', () => {
|
||||
for (const [encoding, { text, buffer }] of Object.entries(ENCODING_SAMPLES)) {
|
||||
test(`with ${encoding}`, async () => {
|
||||
const response = Readable.from(buffer) as IncomingMessage;
|
||||
response.headers = { 'content-type': `application/json;charset=${encoding}` };
|
||||
// @ts-expect-error need this hack to fake `instanceof IncomingMessage` checks
|
||||
response.__proto__ = IncomingMessage.prototype;
|
||||
const data = await binaryToString(response);
|
||||
expect(data).toBe(text);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('isFilePathBlocked', () => {
|
||||
test('should return true for static cache dir', () => {
|
||||
const filePath = Container.get(InstanceSettings).staticCacheDir;
|
||||
|
||||
expect(isFilePathBlocked(filePath)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { mock } from 'jest-mock-extended';
|
||||
import type {
|
||||
IGetExecuteTriggerFunctions,
|
||||
INode,
|
||||
ITriggerResponse,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
|
@ -15,6 +14,7 @@ import { LoggerProxy, TriggerCloseError, WorkflowActivationError } from 'n8n-wor
|
|||
import type { ErrorReporter } from '@/errors/error-reporter';
|
||||
|
||||
import { ActiveWorkflows } from '../active-workflows';
|
||||
import type { IGetExecuteTriggerFunctions } from '../interfaces';
|
||||
import type { PollContext } from '../node-execution-context';
|
||||
import type { ScheduledTaskManager } from '../scheduled-task-manager';
|
||||
import type { TriggersAndPollers } from '../triggers-and-pollers';
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
import { Service } from '@n8n/di';
|
||||
import type {
|
||||
IGetExecutePollFunctions,
|
||||
IGetExecuteTriggerFunctions,
|
||||
INode,
|
||||
ITriggerResponse,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
|
@ -22,6 +20,7 @@ import { ErrorReporter } from '@/errors/error-reporter';
|
|||
import type { IWorkflowData } from '@/interfaces';
|
||||
import { Logger } from '@/logging/logger';
|
||||
|
||||
import type { IGetExecutePollFunctions, IGetExecuteTriggerFunctions } from './interfaces';
|
||||
import { ScheduledTaskManager } from './scheduled-task-manager';
|
||||
import { TriggersAndPollers } from './triggers-and-pollers';
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
export * from './active-workflows';
|
||||
export * from './interfaces';
|
||||
export * from './routing-node';
|
||||
export * from './node-execution-context';
|
||||
export * from './partial-execution-utils';
|
||||
|
|
29
packages/core/src/execution-engine/interfaces.ts
Normal file
29
packages/core/src/execution-engine/interfaces.ts
Normal file
|
@ -0,0 +1,29 @@
|
|||
import type {
|
||||
INode,
|
||||
IPollFunctions,
|
||||
ITriggerFunctions,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
Workflow,
|
||||
WorkflowActivateMode,
|
||||
WorkflowExecuteMode,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export interface IGetExecutePollFunctions {
|
||||
(
|
||||
workflow: Workflow,
|
||||
node: INode,
|
||||
additionalData: IWorkflowExecuteAdditionalData,
|
||||
mode: WorkflowExecuteMode,
|
||||
activation: WorkflowActivateMode,
|
||||
): IPollFunctions;
|
||||
}
|
||||
|
||||
export interface IGetExecuteTriggerFunctions {
|
||||
(
|
||||
workflow: Workflow,
|
||||
node: INode,
|
||||
additionalData: IWorkflowExecuteAdditionalData,
|
||||
mode: WorkflowExecuteMode,
|
||||
activation: WorkflowActivateMode,
|
||||
): ITriggerFunctions;
|
||||
}
|
|
@ -28,18 +28,20 @@ import {
|
|||
copyInputItems,
|
||||
normalizeItems,
|
||||
constructExecutionMetaData,
|
||||
assertBinaryData,
|
||||
getBinaryDataBuffer,
|
||||
copyBinaryFile,
|
||||
getRequestHelperFunctions,
|
||||
getBinaryHelperFunctions,
|
||||
getSSHTunnelFunctions,
|
||||
getFileSystemHelperFunctions,
|
||||
getCheckProcessedHelperFunctions,
|
||||
detectBinaryEncoding,
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
import { BaseExecuteContext } from './base-execute-context';
|
||||
import {
|
||||
assertBinaryData,
|
||||
getBinaryDataBuffer,
|
||||
copyBinaryFile,
|
||||
getBinaryHelperFunctions,
|
||||
detectBinaryEncoding,
|
||||
} from './utils/binary-helper-functions';
|
||||
import { getDeduplicationHelperFunctions } from './utils/deduplication-helper-functions';
|
||||
import { getFileSystemHelperFunctions } from './utils/file-system-helper-functions';
|
||||
import { getInputConnectionData } from './utils/get-input-connection-data';
|
||||
|
||||
export class ExecuteContext extends BaseExecuteContext implements IExecuteFunctions {
|
||||
|
@ -91,7 +93,7 @@ export class ExecuteContext extends BaseExecuteContext implements IExecuteFuncti
|
|||
...getBinaryHelperFunctions(additionalData, workflow.id),
|
||||
...getSSHTunnelFunctions(),
|
||||
...getFileSystemHelperFunctions(node),
|
||||
...getCheckProcessedHelperFunctions(workflow, node),
|
||||
...getDeduplicationHelperFunctions(workflow, node),
|
||||
|
||||
assertBinaryData: (itemIndex, propertyName) =>
|
||||
assertBinaryData(inputData, node, itemIndex, propertyName, 0),
|
||||
|
|
|
@ -14,16 +14,15 @@ import type {
|
|||
import { ApplicationError, createDeferredPromise, NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { getRequestHelperFunctions, returnJsonArray } from '@/node-execute-functions';
|
||||
|
||||
import { BaseExecuteContext } from './base-execute-context';
|
||||
import {
|
||||
assertBinaryData,
|
||||
detectBinaryEncoding,
|
||||
getBinaryDataBuffer,
|
||||
getBinaryHelperFunctions,
|
||||
getRequestHelperFunctions,
|
||||
returnJsonArray,
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
import { BaseExecuteContext } from './base-execute-context';
|
||||
} from './utils/binary-helper-functions';
|
||||
|
||||
export class ExecuteSingleContext extends BaseExecuteContext implements IExecuteSingleFunctions {
|
||||
readonly helpers: IExecuteSingleFunctions['helpers'];
|
||||
|
|
|
@ -13,3 +13,6 @@ export { TriggerContext } from './trigger-context';
|
|||
export { WebhookContext } from './webhook-context';
|
||||
|
||||
export { getAdditionalKeys } from './utils/get-additional-keys';
|
||||
export { parseIncomingMessage } from './utils/parse-incoming-message';
|
||||
export { parseRequestObject } from './utils/parse-request-object';
|
||||
export * from './utils/binary-helper-functions';
|
||||
|
|
|
@ -11,13 +11,13 @@ import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
|
|||
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
getBinaryHelperFunctions,
|
||||
getRequestHelperFunctions,
|
||||
getSchedulingFunctions,
|
||||
returnJsonArray,
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
import { NodeExecutionContext } from './node-execution-context';
|
||||
import { getBinaryHelperFunctions } from './utils/binary-helper-functions';
|
||||
|
||||
const throwOnEmit = () => {
|
||||
throw new ApplicationError('Overwrite PollContext.__emit function');
|
||||
|
|
|
@ -21,14 +21,8 @@ import { createDeferredPromise } from 'n8n-workflow';
|
|||
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
assertBinaryData,
|
||||
constructExecutionMetaData,
|
||||
copyInputItems,
|
||||
detectBinaryEncoding,
|
||||
getBinaryDataBuffer,
|
||||
getBinaryHelperFunctions,
|
||||
getCheckProcessedHelperFunctions,
|
||||
getFileSystemHelperFunctions,
|
||||
getRequestHelperFunctions,
|
||||
getSSHTunnelFunctions,
|
||||
normalizeItems,
|
||||
|
@ -36,6 +30,14 @@ import {
|
|||
} from '@/node-execute-functions';
|
||||
|
||||
import { BaseExecuteContext } from './base-execute-context';
|
||||
import {
|
||||
assertBinaryData,
|
||||
detectBinaryEncoding,
|
||||
getBinaryDataBuffer,
|
||||
getBinaryHelperFunctions,
|
||||
} from './utils/binary-helper-functions';
|
||||
import { getDeduplicationHelperFunctions } from './utils/deduplication-helper-functions';
|
||||
import { getFileSystemHelperFunctions } from './utils/file-system-helper-functions';
|
||||
import { getInputConnectionData } from './utils/get-input-connection-data';
|
||||
|
||||
export class SupplyDataContext extends BaseExecuteContext implements ISupplyDataFunctions {
|
||||
|
@ -83,7 +85,7 @@ export class SupplyDataContext extends BaseExecuteContext implements ISupplyData
|
|||
...getSSHTunnelFunctions(),
|
||||
...getFileSystemHelperFunctions(node),
|
||||
...getBinaryHelperFunctions(additionalData, workflow.id),
|
||||
...getCheckProcessedHelperFunctions(workflow, node),
|
||||
...getDeduplicationHelperFunctions(workflow, node),
|
||||
assertBinaryData: (itemIndex, propertyName) =>
|
||||
assertBinaryData(inputData, node, itemIndex, propertyName, 0),
|
||||
getBinaryDataBuffer: async (itemIndex, propertyName) =>
|
||||
|
|
|
@ -11,7 +11,6 @@ import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
|
|||
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
getBinaryHelperFunctions,
|
||||
getRequestHelperFunctions,
|
||||
getSchedulingFunctions,
|
||||
getSSHTunnelFunctions,
|
||||
|
@ -19,6 +18,7 @@ import {
|
|||
} from '@/node-execute-functions';
|
||||
|
||||
import { NodeExecutionContext } from './node-execution-context';
|
||||
import { getBinaryHelperFunctions } from './utils/binary-helper-functions';
|
||||
|
||||
const throwOnEmit = () => {
|
||||
throw new ApplicationError('Overwrite TriggerContext.emit function');
|
||||
|
|
|
@ -0,0 +1,480 @@
|
|||
import { Container } from '@n8n/di';
|
||||
import { mkdtempSync, readFileSync } from 'fs';
|
||||
import { IncomingMessage } from 'http';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type {
|
||||
IBinaryData,
|
||||
INode,
|
||||
ITaskDataConnections,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
} from 'n8n-workflow';
|
||||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
import { BinaryDataService } from '@/binary-data/binary-data.service';
|
||||
|
||||
import {
|
||||
assertBinaryData,
|
||||
binaryToString,
|
||||
copyBinaryFile,
|
||||
detectBinaryEncoding,
|
||||
getBinaryDataBuffer,
|
||||
getBinaryHelperFunctions,
|
||||
prepareBinaryData,
|
||||
setBinaryDataBuffer,
|
||||
} from '../binary-helper-functions';
|
||||
|
||||
const workflowId = 'workflow123';
|
||||
const executionId = 'execution456';
|
||||
|
||||
const bufferToIncomingMessage = (buffer: Buffer, encoding = 'utf-8') => {
|
||||
const incomingMessage = Readable.from(buffer) as IncomingMessage;
|
||||
incomingMessage.headers = { 'content-type': `application/json;charset=${encoding}` };
|
||||
// @ts-expect-error need this hack to fake `instanceof IncomingMessage` checks
|
||||
incomingMessage.__proto__ = IncomingMessage.prototype;
|
||||
return incomingMessage;
|
||||
};
|
||||
|
||||
describe('test binary data helper methods', () => {
|
||||
let binaryDataService: BinaryDataService;
|
||||
const temporaryDir = mkdtempSync(join(tmpdir(), 'n8n'));
|
||||
|
||||
beforeEach(() => {
|
||||
binaryDataService = new BinaryDataService();
|
||||
Container.set(BinaryDataService, binaryDataService);
|
||||
});
|
||||
|
||||
test("test getBinaryDataBuffer(...) & setBinaryDataBuffer(...) methods in 'default' mode", async () => {
|
||||
// Setup a 'default' binary data manager instance
|
||||
await binaryDataService.init({
|
||||
mode: 'default',
|
||||
availableModes: ['default'],
|
||||
localStoragePath: temporaryDir,
|
||||
});
|
||||
|
||||
// Set our binary data buffer
|
||||
const inputData: Buffer = Buffer.from('This is some binary data', 'utf8');
|
||||
const setBinaryDataBufferResponse: IBinaryData = await setBinaryDataBuffer(
|
||||
{
|
||||
mimeType: 'txt',
|
||||
data: 'This should be overwritten by the actual payload in the response',
|
||||
},
|
||||
inputData,
|
||||
'workflowId',
|
||||
'executionId',
|
||||
);
|
||||
|
||||
// Expect our return object to contain the base64 encoding of the input data, as it should be stored in memory.
|
||||
expect(setBinaryDataBufferResponse.data).toEqual(inputData.toString('base64'));
|
||||
|
||||
// Now, re-fetch our data.
|
||||
// An ITaskDataConnections object is used to share data between nodes. The top level property, 'main', represents the successful output object from a previous node.
|
||||
const taskDataConnectionsInput: ITaskDataConnections = {
|
||||
main: [],
|
||||
};
|
||||
|
||||
// We add an input set, with one item at index 0, to this input. It contains an empty json payload and our binary data.
|
||||
taskDataConnectionsInput.main.push([
|
||||
{
|
||||
json: {},
|
||||
binary: {
|
||||
data: setBinaryDataBufferResponse,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
// Now, lets fetch our data! The item will be item index 0.
|
||||
const getBinaryDataBufferResponse: Buffer = await getBinaryDataBuffer(
|
||||
taskDataConnectionsInput,
|
||||
0,
|
||||
'data',
|
||||
0,
|
||||
);
|
||||
|
||||
expect(getBinaryDataBufferResponse).toEqual(inputData);
|
||||
});
|
||||
|
||||
test("test getBinaryDataBuffer(...) & setBinaryDataBuffer(...) methods in 'filesystem' mode", async () => {
|
||||
// Setup a 'filesystem' binary data manager instance
|
||||
await binaryDataService.init({
|
||||
mode: 'filesystem',
|
||||
availableModes: ['filesystem'],
|
||||
localStoragePath: temporaryDir,
|
||||
});
|
||||
|
||||
// Set our binary data buffer
|
||||
const inputData: Buffer = Buffer.from('This is some binary data', 'utf8');
|
||||
const setBinaryDataBufferResponse: IBinaryData = await setBinaryDataBuffer(
|
||||
{
|
||||
mimeType: 'txt',
|
||||
data: 'This should be overwritten with the name of the configured data manager',
|
||||
},
|
||||
inputData,
|
||||
'workflowId',
|
||||
'executionId',
|
||||
);
|
||||
|
||||
// Expect our return object to contain the name of the configured data manager.
|
||||
expect(setBinaryDataBufferResponse.data).toEqual('filesystem-v2');
|
||||
|
||||
// Ensure that the input data was successfully persisted to disk.
|
||||
expect(
|
||||
readFileSync(
|
||||
`${temporaryDir}/${setBinaryDataBufferResponse.id?.replace('filesystem-v2:', '')}`,
|
||||
),
|
||||
).toEqual(inputData);
|
||||
|
||||
// Now, re-fetch our data.
|
||||
// An ITaskDataConnections object is used to share data between nodes. The top level property, 'main', represents the successful output object from a previous node.
|
||||
const taskDataConnectionsInput: ITaskDataConnections = {
|
||||
main: [],
|
||||
};
|
||||
|
||||
// We add an input set, with one item at index 0, to this input. It contains an empty json payload and our binary data.
|
||||
taskDataConnectionsInput.main.push([
|
||||
{
|
||||
json: {},
|
||||
binary: {
|
||||
data: setBinaryDataBufferResponse,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
// Now, lets fetch our data! The item will be item index 0.
|
||||
const getBinaryDataBufferResponse: Buffer = await getBinaryDataBuffer(
|
||||
taskDataConnectionsInput,
|
||||
0,
|
||||
'data',
|
||||
0,
|
||||
);
|
||||
|
||||
expect(getBinaryDataBufferResponse).toEqual(inputData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('binaryToString', () => {
|
||||
const ENCODING_SAMPLES = {
|
||||
utf8: {
|
||||
text: 'Hello, 世界! τεστ мир ⚡️ é à ü ñ',
|
||||
buffer: Buffer.from([
|
||||
0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0xe4, 0xb8, 0x96, 0xe7, 0x95, 0x8c, 0x21, 0x20,
|
||||
0xcf, 0x84, 0xce, 0xb5, 0xcf, 0x83, 0xcf, 0x84, 0x20, 0xd0, 0xbc, 0xd0, 0xb8, 0xd1, 0x80,
|
||||
0x20, 0xe2, 0x9a, 0xa1, 0xef, 0xb8, 0x8f, 0x20, 0xc3, 0xa9, 0x20, 0xc3, 0xa0, 0x20, 0xc3,
|
||||
0xbc, 0x20, 0xc3, 0xb1,
|
||||
]),
|
||||
},
|
||||
|
||||
'iso-8859-15': {
|
||||
text: 'Café € personnalité',
|
||||
buffer: Buffer.from([
|
||||
0x43, 0x61, 0x66, 0xe9, 0x20, 0xa4, 0x20, 0x70, 0x65, 0x72, 0x73, 0x6f, 0x6e, 0x6e, 0x61,
|
||||
0x6c, 0x69, 0x74, 0xe9,
|
||||
]),
|
||||
},
|
||||
|
||||
latin1: {
|
||||
text: 'señor année déjà',
|
||||
buffer: Buffer.from([
|
||||
0x73, 0x65, 0xf1, 0x6f, 0x72, 0x20, 0x61, 0x6e, 0x6e, 0xe9, 0x65, 0x20, 0x64, 0xe9, 0x6a,
|
||||
0xe0,
|
||||
]),
|
||||
},
|
||||
|
||||
ascii: {
|
||||
text: 'Hello, World! 123',
|
||||
buffer: Buffer.from([
|
||||
0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20, 0x57, 0x6f, 0x72, 0x6c, 0x64, 0x21, 0x20, 0x31,
|
||||
0x32, 0x33,
|
||||
]),
|
||||
},
|
||||
|
||||
'windows-1252': {
|
||||
text: '€ Smart "quotes" • bullet',
|
||||
buffer: Buffer.from([
|
||||
0x80, 0x20, 0x53, 0x6d, 0x61, 0x72, 0x74, 0x20, 0x22, 0x71, 0x75, 0x6f, 0x74, 0x65, 0x73,
|
||||
0x22, 0x20, 0x95, 0x20, 0x62, 0x75, 0x6c, 0x6c, 0x65, 0x74,
|
||||
]),
|
||||
},
|
||||
|
||||
'shift-jis': {
|
||||
text: 'こんにちは世界',
|
||||
buffer: Buffer.from([
|
||||
0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd, 0x90, 0xa2, 0x8a, 0x45,
|
||||
]),
|
||||
},
|
||||
|
||||
big5: {
|
||||
text: '哈囉世界',
|
||||
buffer: Buffer.from([0xab, 0xa2, 0xc5, 0x6f, 0xa5, 0x40, 0xac, 0xc9]),
|
||||
},
|
||||
|
||||
'koi8-r': {
|
||||
text: 'Привет мир',
|
||||
buffer: Buffer.from([0xf0, 0xd2, 0xc9, 0xd7, 0xc5, 0xd4, 0x20, 0xcd, 0xc9, 0xd2]),
|
||||
},
|
||||
};
|
||||
|
||||
describe('should handle Buffer', () => {
|
||||
for (const [encoding, { text, buffer }] of Object.entries(ENCODING_SAMPLES)) {
|
||||
test(`with ${encoding}`, async () => {
|
||||
const data = await binaryToString(buffer, encoding);
|
||||
expect(data).toBe(text);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('should handle streams', () => {
|
||||
for (const [encoding, { text, buffer }] of Object.entries(ENCODING_SAMPLES)) {
|
||||
test(`with ${encoding}`, async () => {
|
||||
const stream = Readable.from(buffer);
|
||||
const data = await binaryToString(stream, encoding);
|
||||
expect(data).toBe(text);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('should handle IncomingMessage', () => {
|
||||
for (const [encoding, { text, buffer }] of Object.entries(ENCODING_SAMPLES)) {
|
||||
test(`with ${encoding}`, async () => {
|
||||
const incomingMessage = bufferToIncomingMessage(buffer, encoding);
|
||||
const data = await binaryToString(incomingMessage);
|
||||
expect(data).toBe(text);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle undefined encoding', async () => {
|
||||
const buffer = Buffer.from('Test');
|
||||
const result = await binaryToString(buffer);
|
||||
expect(result).toBe('Test');
|
||||
});
|
||||
|
||||
it('should handle stream with no explicit encoding', async () => {
|
||||
const stream = Readable.from(Buffer.from('Test'));
|
||||
const result = await binaryToString(stream);
|
||||
expect(result).toBe('Test');
|
||||
});
|
||||
});
|
||||
|
||||
describe('detectBinaryEncoding', () => {
|
||||
it('should detect encoding for utf-8 buffers', () => {
|
||||
const utf8Buffer = Buffer.from('Hello, 世界');
|
||||
expect(detectBinaryEncoding(utf8Buffer)).toBe('UTF-8');
|
||||
});
|
||||
|
||||
it('should detect encoding for latin1 buffers', () => {
|
||||
const latinBuffer = Buffer.from('señor', 'latin1');
|
||||
expect(detectBinaryEncoding(latinBuffer)).toBe('ISO-8859-1');
|
||||
});
|
||||
|
||||
it('should handle empty buffer', () => {
|
||||
const emptyBuffer = Buffer.from('');
|
||||
expect(detectBinaryEncoding(emptyBuffer)).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('assertBinaryData', () => {
|
||||
const mockNode = mock<INode>({ name: 'Test Node' });
|
||||
|
||||
it('should throw error when no binary data exists', () => {
|
||||
const inputData = { main: [[{ json: {} }]] };
|
||||
|
||||
expect(() => assertBinaryData(inputData, mockNode, 0, 'testFile', 0)).toThrow(
|
||||
"expects the node's input data to contain a binary file",
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error when specific binary property does not exist', () => {
|
||||
const inputData = {
|
||||
main: [
|
||||
[
|
||||
{
|
||||
json: {},
|
||||
binary: {
|
||||
otherFile: mock<IBinaryData>(),
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
|
||||
expect(() => assertBinaryData(inputData, mockNode, 0, 'testFile', 0)).toThrow(
|
||||
'The item has no binary field',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return binary data when it exists', () => {
|
||||
const binaryData = mock<IBinaryData>({ fileName: 'test.txt' });
|
||||
const inputData = {
|
||||
main: [
|
||||
[
|
||||
{
|
||||
json: {},
|
||||
binary: {
|
||||
testFile: binaryData,
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
|
||||
const result = assertBinaryData(inputData, mockNode, 0, 'testFile', 0);
|
||||
expect(result).toBe(binaryData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('copyBinaryFile', () => {
|
||||
const fileName = 'test.txt';
|
||||
const filePath = `/path/to/${fileName}`;
|
||||
const binaryData: IBinaryData = {
|
||||
data: '',
|
||||
mimeType: 'text/plain',
|
||||
fileName,
|
||||
};
|
||||
|
||||
const binaryDataService = mock<BinaryDataService>();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
Container.set(BinaryDataService, binaryDataService);
|
||||
binaryDataService.copyBinaryFile.mockResolvedValueOnce(binaryData);
|
||||
});
|
||||
|
||||
it('should handle files without explicit mime type', async () => {
|
||||
const result = await copyBinaryFile(workflowId, executionId, filePath, fileName);
|
||||
|
||||
expect(result.fileName).toBe(fileName);
|
||||
expect(binaryDataService.copyBinaryFile).toHaveBeenCalledWith(
|
||||
workflowId,
|
||||
executionId,
|
||||
{
|
||||
...binaryData,
|
||||
fileExtension: 'txt',
|
||||
fileType: 'text',
|
||||
},
|
||||
filePath,
|
||||
);
|
||||
});
|
||||
|
||||
it('should use provided mime type', async () => {
|
||||
const result = await copyBinaryFile(
|
||||
workflowId,
|
||||
executionId,
|
||||
filePath,
|
||||
fileName,
|
||||
'application/octet-stream',
|
||||
);
|
||||
|
||||
expect(result.fileName).toBe(fileName);
|
||||
expect(binaryDataService.copyBinaryFile).toHaveBeenCalledWith(
|
||||
workflowId,
|
||||
executionId,
|
||||
{
|
||||
...binaryData,
|
||||
fileExtension: 'bin',
|
||||
fileType: undefined,
|
||||
mimeType: 'application/octet-stream',
|
||||
},
|
||||
filePath,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prepareBinaryData', () => {
|
||||
const buffer: Buffer = Buffer.from('test', 'utf8');
|
||||
const binaryDataService = mock<BinaryDataService>();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
Container.set(BinaryDataService, binaryDataService);
|
||||
|
||||
binaryDataService.store.mockImplementation(async (_w, _e, _b, binaryData) => binaryData);
|
||||
});
|
||||
|
||||
it('parses filenames correctly', async () => {
|
||||
const fileName = 'test-file';
|
||||
|
||||
const result = await prepareBinaryData(buffer, executionId, workflowId, fileName);
|
||||
|
||||
expect(result.fileName).toEqual(fileName);
|
||||
expect(binaryDataService.store).toHaveBeenCalledWith(workflowId, executionId, buffer, {
|
||||
data: '',
|
||||
fileExtension: undefined,
|
||||
fileName,
|
||||
fileType: 'text',
|
||||
mimeType: 'text/plain',
|
||||
});
|
||||
});
|
||||
|
||||
it('handles IncomingMessage with responseUrl', async () => {
|
||||
const incomingMessage = bufferToIncomingMessage(buffer);
|
||||
incomingMessage.responseUrl = 'http://example.com/file.txt';
|
||||
|
||||
const result = await prepareBinaryData(incomingMessage, executionId, workflowId);
|
||||
|
||||
expect(result.fileName).toBe('file.txt');
|
||||
expect(result.mimeType).toBe('text/plain');
|
||||
});
|
||||
|
||||
it('handles buffer with no detectable mime type', async () => {
|
||||
const buffer = Buffer.from([0x00, 0x01, 0x02, 0x03]);
|
||||
|
||||
const result = await prepareBinaryData(buffer, executionId, workflowId);
|
||||
|
||||
expect(result.mimeType).toBe('text/plain');
|
||||
});
|
||||
|
||||
it('handles IncomingMessage with no content type or filename', async () => {
|
||||
const incomingMessage = bufferToIncomingMessage(Buffer.from('test'));
|
||||
delete incomingMessage.headers['content-type'];
|
||||
delete incomingMessage.contentDisposition;
|
||||
|
||||
const result = await prepareBinaryData(incomingMessage, executionId, workflowId);
|
||||
|
||||
expect(result.mimeType).toBe('text/plain');
|
||||
});
|
||||
});
|
||||
|
||||
describe('setBinaryDataBuffer', () => {
|
||||
it('should handle empty buffer', async () => {
|
||||
const emptyBuffer = Buffer.from('');
|
||||
const binaryData: IBinaryData = {
|
||||
mimeType: 'text/plain',
|
||||
data: '',
|
||||
};
|
||||
|
||||
const result = await setBinaryDataBuffer(binaryData, emptyBuffer, workflowId, executionId);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.data).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getBinaryHelperFunctions', () => {
|
||||
it('should return helper functions with correct context', async () => {
|
||||
const additionalData = { executionId } as IWorkflowExecuteAdditionalData;
|
||||
|
||||
const helperFunctions = getBinaryHelperFunctions(additionalData, workflowId);
|
||||
|
||||
const expectedMethods = [
|
||||
'getBinaryPath',
|
||||
'getBinaryStream',
|
||||
'getBinaryMetadata',
|
||||
'binaryToBuffer',
|
||||
'binaryToString',
|
||||
'prepareBinaryData',
|
||||
'setBinaryDataBuffer',
|
||||
'copyBinaryFile',
|
||||
] as const;
|
||||
|
||||
expectedMethods.forEach((method) => {
|
||||
expect(helperFunctions).toHaveProperty(method);
|
||||
expect(typeof helperFunctions[method]).toBe('function');
|
||||
});
|
||||
|
||||
await expect(async () => await helperFunctions.copyBinaryFile()).rejects.toThrow(
|
||||
'`copyBinaryFile` has been removed',
|
||||
);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,25 @@
|
|||
import { mock } from 'jest-mock-extended';
|
||||
import type { Workflow, INode } from 'n8n-workflow';
|
||||
|
||||
import { getDeduplicationHelperFunctions } from '../deduplication-helper-functions';
|
||||
|
||||
describe('getDeduplicationHelperFunctions', () => {
|
||||
const workflow = mock<Workflow>();
|
||||
const node = mock<INode>();
|
||||
const helperFunctions = getDeduplicationHelperFunctions(workflow, node);
|
||||
|
||||
it('should create helper functions with correct context', () => {
|
||||
const expectedMethods = [
|
||||
'checkProcessedAndRecord',
|
||||
'checkProcessedItemsAndRecord',
|
||||
'removeProcessed',
|
||||
'clearAllProcessedItems',
|
||||
'getProcessedDataCount',
|
||||
] as const;
|
||||
|
||||
expectedMethods.forEach((method) => {
|
||||
expect(helperFunctions).toHaveProperty(method);
|
||||
expect(typeof helperFunctions[method]).toBe('function');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,186 @@
|
|||
import { Container } from '@n8n/di';
|
||||
import type { INode } from 'n8n-workflow';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { access as fsAccess } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import {
|
||||
BINARY_DATA_STORAGE_PATH,
|
||||
BLOCK_FILE_ACCESS_TO_N8N_FILES,
|
||||
CONFIG_FILES,
|
||||
CUSTOM_EXTENSION_ENV,
|
||||
RESTRICT_FILE_ACCESS_TO,
|
||||
UM_EMAIL_TEMPLATES_INVITE,
|
||||
UM_EMAIL_TEMPLATES_PWRESET,
|
||||
} from '@/constants';
|
||||
import { InstanceSettings } from '@/instance-settings';
|
||||
|
||||
import { getFileSystemHelperFunctions, isFilePathBlocked } from '../file-system-helper-functions';
|
||||
|
||||
jest.mock('node:fs');
|
||||
jest.mock('node:fs/promises');
|
||||
|
||||
const originalProcessEnv = { ...process.env };
|
||||
|
||||
let instanceSettings: InstanceSettings;
|
||||
beforeEach(() => {
|
||||
process.env = { ...originalProcessEnv };
|
||||
|
||||
const error = new Error('ENOENT');
|
||||
// @ts-expect-error undefined property
|
||||
error.code = 'ENOENT';
|
||||
(fsAccess as jest.Mock).mockRejectedValue(error);
|
||||
|
||||
instanceSettings = Container.get(InstanceSettings);
|
||||
});
|
||||
|
||||
describe('isFilePathBlocked', () => {
|
||||
beforeEach(() => {
|
||||
process.env[BLOCK_FILE_ACCESS_TO_N8N_FILES] = 'true';
|
||||
});
|
||||
|
||||
it('should return true for static cache dir', () => {
|
||||
const filePath = instanceSettings.staticCacheDir;
|
||||
expect(isFilePathBlocked(filePath)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for restricted paths', () => {
|
||||
const restrictedPath = instanceSettings.n8nFolder;
|
||||
expect(isFilePathBlocked(restrictedPath)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle empty allowed paths', () => {
|
||||
delete process.env[RESTRICT_FILE_ACCESS_TO];
|
||||
const result = isFilePathBlocked('/some/random/path');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle multiple allowed paths', () => {
|
||||
process.env[RESTRICT_FILE_ACCESS_TO] = '/path1;/path2;/path3';
|
||||
const allowedPath = '/path2/somefile';
|
||||
expect(isFilePathBlocked(allowedPath)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle empty strings in allowed paths', () => {
|
||||
process.env[RESTRICT_FILE_ACCESS_TO] = '/path1;;/path2';
|
||||
const allowedPath = '/path2/somefile';
|
||||
expect(isFilePathBlocked(allowedPath)).toBe(false);
|
||||
});
|
||||
|
||||
it('should trim whitespace in allowed paths', () => {
|
||||
process.env[RESTRICT_FILE_ACCESS_TO] = ' /path1 ; /path2 ; /path3 ';
|
||||
const allowedPath = '/path2/somefile';
|
||||
expect(isFilePathBlocked(allowedPath)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when BLOCK_FILE_ACCESS_TO_N8N_FILES is false', () => {
|
||||
process.env[BLOCK_FILE_ACCESS_TO_N8N_FILES] = 'false';
|
||||
const restrictedPath = instanceSettings.n8nFolder;
|
||||
expect(isFilePathBlocked(restrictedPath)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when path is in allowed paths but still restricted', () => {
|
||||
process.env[RESTRICT_FILE_ACCESS_TO] = '/some/allowed/path';
|
||||
const restrictedPath = instanceSettings.n8nFolder;
|
||||
expect(isFilePathBlocked(restrictedPath)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when path is in allowed paths', () => {
|
||||
const allowedPath = '/some/allowed/path';
|
||||
process.env[RESTRICT_FILE_ACCESS_TO] = allowedPath;
|
||||
expect(isFilePathBlocked(allowedPath)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when file paths in CONFIG_FILES', () => {
|
||||
process.env[CONFIG_FILES] = '/path/to/config1,/path/to/config2';
|
||||
const configPath = '/path/to/config1/somefile';
|
||||
expect(isFilePathBlocked(configPath)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when file paths in CUSTOM_EXTENSION_ENV', () => {
|
||||
process.env[CUSTOM_EXTENSION_ENV] = '/path/to/extensions1;/path/to/extensions2';
|
||||
const extensionPath = '/path/to/extensions1/somefile';
|
||||
expect(isFilePathBlocked(extensionPath)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when file paths in BINARY_DATA_STORAGE_PATH', () => {
|
||||
process.env[BINARY_DATA_STORAGE_PATH] = '/path/to/binary/storage';
|
||||
const binaryPath = '/path/to/binary/storage/somefile';
|
||||
expect(isFilePathBlocked(binaryPath)).toBe(true);
|
||||
});
|
||||
|
||||
it('should block file paths in email template paths', () => {
|
||||
process.env[UM_EMAIL_TEMPLATES_INVITE] = '/path/to/invite/templates';
|
||||
process.env[UM_EMAIL_TEMPLATES_PWRESET] = '/path/to/pwreset/templates';
|
||||
|
||||
const invitePath = '/path/to/invite/templates/invite.html';
|
||||
const pwResetPath = '/path/to/pwreset/templates/reset.html';
|
||||
|
||||
expect(isFilePathBlocked(invitePath)).toBe(true);
|
||||
expect(isFilePathBlocked(pwResetPath)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFileSystemHelperFunctions', () => {
|
||||
const node = { type: 'TestNode' } as INode;
|
||||
const helperFunctions = getFileSystemHelperFunctions(node);
|
||||
|
||||
it('should create helper functions with correct context', () => {
|
||||
const expectedMethods = ['createReadStream', 'getStoragePath', 'writeContentToFile'] as const;
|
||||
|
||||
expectedMethods.forEach((method) => {
|
||||
expect(helperFunctions).toHaveProperty(method);
|
||||
expect(typeof helperFunctions[method]).toBe('function');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStoragePath', () => {
|
||||
it('returns correct path', () => {
|
||||
const expectedPath = join(instanceSettings.n8nFolder, `storage/${node.type}`);
|
||||
expect(helperFunctions.getStoragePath()).toBe(expectedPath);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createReadStream', () => {
|
||||
it('should throw error for non-existent file', async () => {
|
||||
const filePath = '/non/existent/file';
|
||||
const error = new Error('ENOENT');
|
||||
// @ts-expect-error undefined property
|
||||
error.code = 'ENOENT';
|
||||
(fsAccess as jest.Mock).mockRejectedValueOnce(error);
|
||||
|
||||
await expect(helperFunctions.createReadStream(filePath)).rejects.toThrow(
|
||||
`The file "${filePath}" could not be accessed.`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw when file access is blocked', async () => {
|
||||
process.env[RESTRICT_FILE_ACCESS_TO] = '/allowed/path';
|
||||
(fsAccess as jest.Mock).mockResolvedValueOnce({});
|
||||
await expect(helperFunctions.createReadStream('/blocked/path')).rejects.toThrow(
|
||||
'Access to the file is not allowed',
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a read stream if file access is permitted', async () => {
|
||||
const filePath = '/allowed/path';
|
||||
(fsAccess as jest.Mock).mockResolvedValueOnce({});
|
||||
await helperFunctions.createReadStream(filePath);
|
||||
expect(createReadStream).toHaveBeenCalledWith(filePath);
|
||||
});
|
||||
});
|
||||
|
||||
describe('writeContentToFile', () => {
|
||||
it('should throw error for blocked file path', async () => {
|
||||
process.env[BLOCK_FILE_ACCESS_TO_N8N_FILES] = 'true';
|
||||
|
||||
await expect(
|
||||
helperFunctions.writeContentToFile(
|
||||
instanceSettings.n8nFolder + '/test.txt',
|
||||
'content',
|
||||
'w',
|
||||
),
|
||||
).rejects.toThrow('not writable');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,252 @@
|
|||
import type { IncomingMessage } from 'http';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
|
||||
import {
|
||||
parseContentDisposition,
|
||||
parseContentType,
|
||||
parseIncomingMessage,
|
||||
} from '../parse-incoming-message';
|
||||
|
||||
describe('parseContentType', () => {
|
||||
const testCases = [
|
||||
{
|
||||
input: 'text/plain',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
},
|
||||
},
|
||||
description: 'should parse basic content type',
|
||||
},
|
||||
{
|
||||
input: 'TEXT/PLAIN',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
},
|
||||
},
|
||||
description: 'should convert type to lowercase',
|
||||
},
|
||||
{
|
||||
input: 'text/html; charset=iso-8859-1',
|
||||
expected: {
|
||||
type: 'text/html',
|
||||
parameters: {
|
||||
charset: 'iso-8859-1',
|
||||
},
|
||||
},
|
||||
description: 'should parse content type with charset',
|
||||
},
|
||||
{
|
||||
input: 'application/json; charset=utf-8; boundary=---123',
|
||||
expected: {
|
||||
type: 'application/json',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
boundary: '---123',
|
||||
},
|
||||
},
|
||||
description: 'should parse content type with multiple parameters',
|
||||
},
|
||||
{
|
||||
input: 'text/plain; charset="utf-8"; filename="test.txt"',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
filename: 'test.txt',
|
||||
},
|
||||
},
|
||||
description: 'should handle quoted parameter values',
|
||||
},
|
||||
{
|
||||
input: 'text/plain; filename=%22test%20file.txt%22',
|
||||
expected: {
|
||||
type: 'text/plain',
|
||||
parameters: {
|
||||
charset: 'utf-8',
|
||||
filename: 'test file.txt',
|
||||
},
|
||||
},
|
||||
description: 'should handle encoded parameter values',
|
||||
},
|
||||
{
|
||||
input: undefined,
|
||||
expected: null,
|
||||
description: 'should return null for undefined input',
|
||||
},
|
||||
{
|
||||
input: '',
|
||||
expected: null,
|
||||
description: 'should return null for empty string',
|
||||
},
|
||||
];
|
||||
|
||||
test.each(testCases)('$description', ({ input, expected }) => {
|
||||
expect(parseContentType(input)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseContentDisposition', () => {
|
||||
const testCases = [
|
||||
{
|
||||
input: 'attachment; filename="file.txt"',
|
||||
expected: { type: 'attachment', filename: 'file.txt' },
|
||||
description: 'should parse basic content disposition',
|
||||
},
|
||||
{
|
||||
input: 'attachment; filename=file.txt',
|
||||
expected: { type: 'attachment', filename: 'file.txt' },
|
||||
description: 'should parse filename without quotes',
|
||||
},
|
||||
{
|
||||
input: 'inline; filename="image.jpg"',
|
||||
expected: { type: 'inline', filename: 'image.jpg' },
|
||||
description: 'should parse inline disposition',
|
||||
},
|
||||
{
|
||||
input: 'attachment; filename="my file.pdf"',
|
||||
expected: { type: 'attachment', filename: 'my file.pdf' },
|
||||
description: 'should parse filename with spaces',
|
||||
},
|
||||
{
|
||||
input: "attachment; filename*=UTF-8''my%20file.txt",
|
||||
expected: { type: 'attachment', filename: 'my file.txt' },
|
||||
description: 'should parse filename* parameter (RFC 5987)',
|
||||
},
|
||||
{
|
||||
input: 'filename="test.txt"',
|
||||
expected: { type: 'attachment', filename: 'test.txt' },
|
||||
description: 'should handle invalid syntax but with filename',
|
||||
},
|
||||
{
|
||||
input: 'filename=test.txt',
|
||||
expected: { type: 'attachment', filename: 'test.txt' },
|
||||
description: 'should handle invalid syntax with only filename parameter',
|
||||
},
|
||||
{
|
||||
input: undefined,
|
||||
expected: null,
|
||||
description: 'should return null for undefined input',
|
||||
},
|
||||
{
|
||||
input: '',
|
||||
expected: null,
|
||||
description: 'should return null for empty string',
|
||||
},
|
||||
{
|
||||
input: 'attachment; filename="%F0%9F%98%80.txt"',
|
||||
expected: { type: 'attachment', filename: '😀.txt' },
|
||||
description: 'should handle encoded filenames',
|
||||
},
|
||||
{
|
||||
input: 'attachment; size=123; filename="test.txt"; creation-date="Thu, 1 Jan 2020"',
|
||||
expected: { type: 'attachment', filename: 'test.txt' },
|
||||
description: 'should handle multiple parameters',
|
||||
},
|
||||
];
|
||||
|
||||
test.each(testCases)('$description', ({ input, expected }) => {
|
||||
expect(parseContentDisposition(input)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseIncomingMessage', () => {
|
||||
it('parses valid content-type header', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: { 'content-type': 'application/json', 'content-disposition': undefined },
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentType).toEqual('application/json');
|
||||
});
|
||||
|
||||
it('parses valid content-type header with parameters', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': 'application/json; charset=utf-8',
|
||||
'content-disposition': undefined,
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentType).toEqual('application/json');
|
||||
expect(message.encoding).toEqual('utf-8');
|
||||
});
|
||||
|
||||
it('parses valid content-type header with encoding wrapped in quotes', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': 'application/json; charset="utf-8"',
|
||||
'content-disposition': undefined,
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentType).toEqual('application/json');
|
||||
expect(message.encoding).toEqual('utf-8');
|
||||
});
|
||||
|
||||
it('parses valid content-disposition header with filename*', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition':
|
||||
'attachment; filename="screenshot%20(1).png"; filename*=UTF-8\'\'screenshot%20(1).png',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'screenshot (1).png',
|
||||
type: 'attachment',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses valid content-disposition header with filename* (quoted)', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition': ' attachment;filename*="utf-8\' \'test-unsplash.jpg"',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'test-unsplash.jpg',
|
||||
type: 'attachment',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses valid content-disposition header with filename and trailing ";"', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition': 'inline; filename="screenshot%20(1).png";',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'screenshot (1).png',
|
||||
type: 'inline',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses non standard content-disposition with missing type', () => {
|
||||
const message = mock<IncomingMessage>({
|
||||
headers: {
|
||||
'content-type': undefined,
|
||||
'content-disposition': 'filename="screenshot%20(1).png";',
|
||||
},
|
||||
});
|
||||
parseIncomingMessage(message);
|
||||
|
||||
expect(message.contentDisposition).toEqual({
|
||||
filename: 'screenshot (1).png',
|
||||
type: 'attachment',
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,133 @@
|
|||
import FormData from 'form-data';
|
||||
import type { Agent } from 'https';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { IHttpRequestMethods, IRequestOptions } from 'n8n-workflow';
|
||||
import type { SecureContextOptions } from 'tls';
|
||||
|
||||
import { parseRequestObject } from '../parse-request-object';
|
||||
|
||||
describe('parseRequestObject', () => {
|
||||
test('should handle basic request options', async () => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
url: 'https://example.com',
|
||||
method: 'POST',
|
||||
headers: { 'content-type': 'application/json' },
|
||||
body: { key: 'value' },
|
||||
});
|
||||
|
||||
expect(axiosOptions).toEqual(
|
||||
expect.objectContaining({
|
||||
url: 'https://example.com',
|
||||
method: 'POST',
|
||||
headers: { accept: '*/*', 'content-type': 'application/json' },
|
||||
data: { key: 'value' },
|
||||
maxRedirects: 0,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('should set correct headers for FormData', async () => {
|
||||
const formData = new FormData();
|
||||
formData.append('key', 'value');
|
||||
|
||||
const axiosOptions = await parseRequestObject({
|
||||
url: 'https://example.com',
|
||||
formData,
|
||||
headers: {
|
||||
'content-type': 'multipart/form-data',
|
||||
},
|
||||
});
|
||||
|
||||
expect(axiosOptions.headers).toMatchObject({
|
||||
accept: '*/*',
|
||||
'content-length': 163,
|
||||
'content-type': expect.stringMatching(/^multipart\/form-data; boundary=/),
|
||||
});
|
||||
|
||||
expect(axiosOptions.data).toBeInstanceOf(FormData);
|
||||
});
|
||||
|
||||
test('should not use Host header for SNI', async () => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
url: 'https://example.de/foo/bar',
|
||||
headers: { Host: 'other.host.com' },
|
||||
});
|
||||
expect((axiosOptions.httpsAgent as Agent).options.servername).toEqual('example.de');
|
||||
});
|
||||
|
||||
describe('should set SSL certificates', () => {
|
||||
const agentOptions: SecureContextOptions = {
|
||||
ca: '-----BEGIN CERTIFICATE-----\nTEST\n-----END CERTIFICATE-----',
|
||||
};
|
||||
const requestObject: IRequestOptions = {
|
||||
method: 'GET',
|
||||
uri: 'https://example.de',
|
||||
agentOptions,
|
||||
};
|
||||
|
||||
test('on regular requests', async () => {
|
||||
const axiosOptions = await parseRequestObject(requestObject);
|
||||
expect((axiosOptions.httpsAgent as Agent).options).toEqual({
|
||||
servername: 'example.de',
|
||||
...agentOptions,
|
||||
noDelay: true,
|
||||
path: null,
|
||||
});
|
||||
});
|
||||
|
||||
test('on redirected requests', async () => {
|
||||
const axiosOptions = await parseRequestObject(requestObject);
|
||||
expect(axiosOptions.beforeRedirect).toBeDefined;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const redirectOptions: Record<string, any> = { agents: {}, hostname: 'example.de' };
|
||||
axiosOptions.beforeRedirect!(redirectOptions, mock());
|
||||
expect(redirectOptions.agent).toEqual(redirectOptions.agents.https);
|
||||
expect((redirectOptions.agent as Agent).options).toEqual({
|
||||
servername: 'example.de',
|
||||
...agentOptions,
|
||||
noDelay: true,
|
||||
path: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when followRedirect is true', () => {
|
||||
test.each(['GET', 'HEAD'] as IHttpRequestMethods[])(
|
||||
'should set maxRedirects on %s ',
|
||||
async (method) => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
method,
|
||||
followRedirect: true,
|
||||
maxRedirects: 1234,
|
||||
});
|
||||
expect(axiosOptions.maxRedirects).toEqual(1234);
|
||||
},
|
||||
);
|
||||
|
||||
test.each(['POST', 'PUT', 'PATCH', 'DELETE'] as IHttpRequestMethods[])(
|
||||
'should not set maxRedirects on %s ',
|
||||
async (method) => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
method,
|
||||
followRedirect: true,
|
||||
maxRedirects: 1234,
|
||||
});
|
||||
expect(axiosOptions.maxRedirects).toEqual(0);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('when followAllRedirects is true', () => {
|
||||
test.each(['GET', 'HEAD', 'POST', 'PUT', 'PATCH', 'DELETE'] as IHttpRequestMethods[])(
|
||||
'should set maxRedirects on %s ',
|
||||
async (method) => {
|
||||
const axiosOptions = await parseRequestObject({
|
||||
method,
|
||||
followAllRedirects: true,
|
||||
maxRedirects: 1234,
|
||||
});
|
||||
expect(axiosOptions.maxRedirects).toEqual(1234);
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,289 @@
|
|||
import { Container } from '@n8n/di';
|
||||
import chardet from 'chardet';
|
||||
import FileType from 'file-type';
|
||||
import { IncomingMessage } from 'http';
|
||||
import iconv from 'iconv-lite';
|
||||
import { extension, lookup } from 'mime-types';
|
||||
import type {
|
||||
BinaryHelperFunctions,
|
||||
IBinaryData,
|
||||
INode,
|
||||
ITaskDataConnections,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeOperationError, fileTypeFromMimeType, ApplicationError } from 'n8n-workflow';
|
||||
import path from 'path';
|
||||
import type { Readable } from 'stream';
|
||||
import { URL } from 'url';
|
||||
|
||||
import { BinaryDataService } from '@/binary-data/binary-data.service';
|
||||
import type { BinaryData } from '@/binary-data/types';
|
||||
import { binaryToBuffer } from '@/binary-data/utils';
|
||||
|
||||
import { parseIncomingMessage } from './parse-incoming-message';
|
||||
|
||||
export async function binaryToString(body: Buffer | Readable, encoding?: string) {
|
||||
if (!encoding && body instanceof IncomingMessage) {
|
||||
parseIncomingMessage(body);
|
||||
encoding = body.encoding;
|
||||
}
|
||||
const buffer = await binaryToBuffer(body);
|
||||
return iconv.decode(buffer, encoding ?? 'utf-8');
|
||||
}
|
||||
|
||||
function getBinaryPath(binaryDataId: string): string {
|
||||
return Container.get(BinaryDataService).getPath(binaryDataId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns binary file metadata
|
||||
*/
|
||||
async function getBinaryMetadata(binaryDataId: string): Promise<BinaryData.Metadata> {
|
||||
return await Container.get(BinaryDataService).getMetadata(binaryDataId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns binary file stream for piping
|
||||
*/
|
||||
async function getBinaryStream(binaryDataId: string, chunkSize?: number): Promise<Readable> {
|
||||
return await Container.get(BinaryDataService).getAsStream(binaryDataId, chunkSize);
|
||||
}
|
||||
|
||||
export function assertBinaryData(
|
||||
inputData: ITaskDataConnections,
|
||||
node: INode,
|
||||
itemIndex: number,
|
||||
propertyName: string,
|
||||
inputIndex: number,
|
||||
): IBinaryData {
|
||||
const binaryKeyData = inputData.main[inputIndex]![itemIndex].binary;
|
||||
if (binaryKeyData === undefined) {
|
||||
throw new NodeOperationError(
|
||||
node,
|
||||
`This operation expects the node's input data to contain a binary file '${propertyName}', but none was found [item ${itemIndex}]`,
|
||||
{
|
||||
itemIndex,
|
||||
description: 'Make sure that the previous node outputs a binary file',
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const binaryPropertyData = binaryKeyData[propertyName];
|
||||
if (binaryPropertyData === undefined) {
|
||||
throw new NodeOperationError(
|
||||
node,
|
||||
`The item has no binary field '${propertyName}' [item ${itemIndex}]`,
|
||||
{
|
||||
itemIndex,
|
||||
description:
|
||||
'Check that the parameter where you specified the input binary field name is correct, and that it matches a field in the binary input',
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
return binaryPropertyData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns binary data buffer for given item index and property name.
|
||||
*/
|
||||
export async function getBinaryDataBuffer(
|
||||
inputData: ITaskDataConnections,
|
||||
itemIndex: number,
|
||||
propertyName: string,
|
||||
inputIndex: number,
|
||||
): Promise<Buffer> {
|
||||
const binaryData = inputData.main[inputIndex]![itemIndex].binary![propertyName];
|
||||
return await Container.get(BinaryDataService).getAsBuffer(binaryData);
|
||||
}
|
||||
|
||||
export function detectBinaryEncoding(buffer: Buffer): string {
|
||||
return chardet.detect(buffer) as string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Store an incoming IBinaryData & related buffer using the configured binary data manager.
|
||||
*
|
||||
* @export
|
||||
* @param {IBinaryData} binaryData
|
||||
* @param {Buffer | Readable} bufferOrStream
|
||||
* @returns {Promise<IBinaryData>}
|
||||
*/
|
||||
export async function setBinaryDataBuffer(
|
||||
binaryData: IBinaryData,
|
||||
bufferOrStream: Buffer | Readable,
|
||||
workflowId: string,
|
||||
executionId: string,
|
||||
): Promise<IBinaryData> {
|
||||
return await Container.get(BinaryDataService).store(
|
||||
workflowId,
|
||||
executionId,
|
||||
bufferOrStream,
|
||||
binaryData,
|
||||
);
|
||||
}
|
||||
|
||||
export async function copyBinaryFile(
|
||||
workflowId: string,
|
||||
executionId: string,
|
||||
filePath: string,
|
||||
fileName: string,
|
||||
mimeType?: string,
|
||||
): Promise<IBinaryData> {
|
||||
let fileExtension: string | undefined;
|
||||
if (!mimeType) {
|
||||
// If no mime type is given figure it out
|
||||
|
||||
if (filePath) {
|
||||
// Use file path to guess mime type
|
||||
const mimeTypeLookup = lookup(filePath);
|
||||
if (mimeTypeLookup) {
|
||||
mimeType = mimeTypeLookup;
|
||||
}
|
||||
}
|
||||
|
||||
if (!mimeType) {
|
||||
// read the first bytes of the file to guess mime type
|
||||
const fileTypeData = await FileType.fromFile(filePath);
|
||||
if (fileTypeData) {
|
||||
mimeType = fileTypeData.mime;
|
||||
fileExtension = fileTypeData.ext;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!fileExtension && mimeType) {
|
||||
fileExtension = extension(mimeType) || undefined;
|
||||
}
|
||||
|
||||
if (!mimeType) {
|
||||
// Fall back to text
|
||||
mimeType = 'text/plain';
|
||||
}
|
||||
|
||||
const returnData: IBinaryData = {
|
||||
mimeType,
|
||||
fileType: fileTypeFromMimeType(mimeType),
|
||||
fileExtension,
|
||||
data: '',
|
||||
};
|
||||
|
||||
if (fileName) {
|
||||
returnData.fileName = fileName;
|
||||
} else if (filePath) {
|
||||
returnData.fileName = path.parse(filePath).base;
|
||||
}
|
||||
|
||||
return await Container.get(BinaryDataService).copyBinaryFile(
|
||||
workflowId,
|
||||
executionId,
|
||||
returnData,
|
||||
filePath,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a buffer and converts it into the format n8n uses. It encodes the binary data as
|
||||
* base64 and adds metadata.
|
||||
*/
|
||||
// eslint-disable-next-line complexity
|
||||
export async function prepareBinaryData(
|
||||
binaryData: Buffer | Readable,
|
||||
executionId: string,
|
||||
workflowId: string,
|
||||
filePath?: string,
|
||||
mimeType?: string,
|
||||
): Promise<IBinaryData> {
|
||||
let fileExtension: string | undefined;
|
||||
if (binaryData instanceof IncomingMessage) {
|
||||
if (!filePath) {
|
||||
try {
|
||||
const { responseUrl } = binaryData;
|
||||
filePath =
|
||||
binaryData.contentDisposition?.filename ??
|
||||
((responseUrl && new URL(responseUrl).pathname) ?? binaryData.req?.path)?.slice(1);
|
||||
} catch {}
|
||||
}
|
||||
if (!mimeType) {
|
||||
mimeType = binaryData.contentType;
|
||||
}
|
||||
}
|
||||
|
||||
if (!mimeType) {
|
||||
// If no mime type is given figure it out
|
||||
|
||||
if (filePath) {
|
||||
// Use file path to guess mime type
|
||||
const mimeTypeLookup = lookup(filePath);
|
||||
if (mimeTypeLookup) {
|
||||
mimeType = mimeTypeLookup;
|
||||
}
|
||||
}
|
||||
|
||||
if (!mimeType) {
|
||||
if (Buffer.isBuffer(binaryData)) {
|
||||
// Use buffer to guess mime type
|
||||
const fileTypeData = await FileType.fromBuffer(binaryData);
|
||||
if (fileTypeData) {
|
||||
mimeType = fileTypeData.mime;
|
||||
fileExtension = fileTypeData.ext;
|
||||
}
|
||||
} else if (binaryData instanceof IncomingMessage) {
|
||||
mimeType = binaryData.headers['content-type'];
|
||||
} else {
|
||||
// TODO: detect filetype from other kind of streams
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!fileExtension && mimeType) {
|
||||
fileExtension = extension(mimeType) || undefined;
|
||||
}
|
||||
|
||||
if (!mimeType) {
|
||||
// Fall back to text
|
||||
mimeType = 'text/plain';
|
||||
}
|
||||
|
||||
const returnData: IBinaryData = {
|
||||
mimeType,
|
||||
fileType: fileTypeFromMimeType(mimeType),
|
||||
fileExtension,
|
||||
data: '',
|
||||
};
|
||||
|
||||
if (filePath) {
|
||||
const filePathParts = path.parse(filePath);
|
||||
|
||||
if (filePathParts.dir !== '') {
|
||||
returnData.directory = filePathParts.dir;
|
||||
}
|
||||
returnData.fileName = filePathParts.base;
|
||||
|
||||
// Remove the dot
|
||||
fileExtension = filePathParts.ext.slice(1);
|
||||
if (fileExtension) {
|
||||
returnData.fileExtension = fileExtension;
|
||||
}
|
||||
}
|
||||
|
||||
return await setBinaryDataBuffer(returnData, binaryData, workflowId, executionId);
|
||||
}
|
||||
|
||||
export const getBinaryHelperFunctions = (
|
||||
{ executionId }: IWorkflowExecuteAdditionalData,
|
||||
workflowId: string,
|
||||
): BinaryHelperFunctions => ({
|
||||
getBinaryPath,
|
||||
getBinaryStream,
|
||||
getBinaryMetadata,
|
||||
binaryToBuffer,
|
||||
binaryToString,
|
||||
prepareBinaryData: async (binaryData, filePath, mimeType) =>
|
||||
await prepareBinaryData(binaryData, executionId!, workflowId, filePath, mimeType),
|
||||
setBinaryDataBuffer: async (data, binaryData) =>
|
||||
await setBinaryDataBuffer(data, binaryData, workflowId, executionId!),
|
||||
copyBinaryFile: async () => {
|
||||
throw new ApplicationError('`copyBinaryFile` has been removed. Please upgrade this node.');
|
||||
},
|
||||
});
|
|
@ -0,0 +1,128 @@
|
|||
import type {
|
||||
IDataObject,
|
||||
INode,
|
||||
Workflow,
|
||||
DeduplicationHelperFunctions,
|
||||
IDeduplicationOutput,
|
||||
IDeduplicationOutputItems,
|
||||
ICheckProcessedOptions,
|
||||
DeduplicationScope,
|
||||
DeduplicationItemTypes,
|
||||
ICheckProcessedContextData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { DataDeduplicationService } from '@/data-deduplication-service';
|
||||
|
||||
async function checkProcessedAndRecord(
|
||||
items: DeduplicationItemTypes[],
|
||||
scope: DeduplicationScope,
|
||||
contextData: ICheckProcessedContextData,
|
||||
options: ICheckProcessedOptions,
|
||||
): Promise<IDeduplicationOutput> {
|
||||
return await DataDeduplicationService.getInstance().checkProcessedAndRecord(
|
||||
items,
|
||||
scope,
|
||||
contextData,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
async function checkProcessedItemsAndRecord(
|
||||
key: string,
|
||||
items: IDataObject[],
|
||||
scope: DeduplicationScope,
|
||||
contextData: ICheckProcessedContextData,
|
||||
options: ICheckProcessedOptions,
|
||||
): Promise<IDeduplicationOutputItems> {
|
||||
return await DataDeduplicationService.getInstance().checkProcessedItemsAndRecord(
|
||||
key,
|
||||
items,
|
||||
scope,
|
||||
contextData,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
async function removeProcessed(
|
||||
items: DeduplicationItemTypes[],
|
||||
scope: DeduplicationScope,
|
||||
contextData: ICheckProcessedContextData,
|
||||
options: ICheckProcessedOptions,
|
||||
): Promise<void> {
|
||||
return await DataDeduplicationService.getInstance().removeProcessed(
|
||||
items,
|
||||
scope,
|
||||
contextData,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
async function clearAllProcessedItems(
|
||||
scope: DeduplicationScope,
|
||||
contextData: ICheckProcessedContextData,
|
||||
options: ICheckProcessedOptions,
|
||||
): Promise<void> {
|
||||
return await DataDeduplicationService.getInstance().clearAllProcessedItems(
|
||||
scope,
|
||||
contextData,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
async function getProcessedDataCount(
|
||||
scope: DeduplicationScope,
|
||||
contextData: ICheckProcessedContextData,
|
||||
options: ICheckProcessedOptions,
|
||||
): Promise<number> {
|
||||
return await DataDeduplicationService.getInstance().getProcessedDataCount(
|
||||
scope,
|
||||
contextData,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
export const getDeduplicationHelperFunctions = (
|
||||
workflow: Workflow,
|
||||
node: INode,
|
||||
): DeduplicationHelperFunctions => ({
|
||||
async checkProcessedAndRecord(
|
||||
items: DeduplicationItemTypes[],
|
||||
scope: DeduplicationScope,
|
||||
options: ICheckProcessedOptions,
|
||||
): Promise<IDeduplicationOutput> {
|
||||
return await checkProcessedAndRecord(items, scope, { node, workflow }, options);
|
||||
},
|
||||
async checkProcessedItemsAndRecord(
|
||||
propertyName: string,
|
||||
items: IDataObject[],
|
||||
scope: DeduplicationScope,
|
||||
options: ICheckProcessedOptions,
|
||||
): Promise<IDeduplicationOutputItems> {
|
||||
return await checkProcessedItemsAndRecord(
|
||||
propertyName,
|
||||
items,
|
||||
scope,
|
||||
{ node, workflow },
|
||||
options,
|
||||
);
|
||||
},
|
||||
async removeProcessed(
|
||||
items: DeduplicationItemTypes[],
|
||||
scope: DeduplicationScope,
|
||||
options: ICheckProcessedOptions,
|
||||
): Promise<void> {
|
||||
return await removeProcessed(items, scope, { node, workflow }, options);
|
||||
},
|
||||
async clearAllProcessedItems(
|
||||
scope: DeduplicationScope,
|
||||
options: ICheckProcessedOptions,
|
||||
): Promise<void> {
|
||||
return await clearAllProcessedItems(scope, { node, workflow }, options);
|
||||
},
|
||||
async getProcessedDataCount(
|
||||
scope: DeduplicationScope,
|
||||
options: ICheckProcessedOptions,
|
||||
): Promise<number> {
|
||||
return await getProcessedDataCount(scope, { node, workflow }, options);
|
||||
},
|
||||
});
|
|
@ -0,0 +1,121 @@
|
|||
import { Container } from '@n8n/di';
|
||||
import type { FileSystemHelperFunctions, INode } from 'n8n-workflow';
|
||||
import { NodeOperationError } from 'n8n-workflow';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { access as fsAccess, writeFile as fsWriteFile } from 'node:fs/promises';
|
||||
import { join, resolve } from 'node:path';
|
||||
|
||||
import {
|
||||
BINARY_DATA_STORAGE_PATH,
|
||||
BLOCK_FILE_ACCESS_TO_N8N_FILES,
|
||||
CONFIG_FILES,
|
||||
CUSTOM_EXTENSION_ENV,
|
||||
RESTRICT_FILE_ACCESS_TO,
|
||||
UM_EMAIL_TEMPLATES_INVITE,
|
||||
UM_EMAIL_TEMPLATES_PWRESET,
|
||||
} from '@/constants';
|
||||
import { InstanceSettings } from '@/instance-settings';
|
||||
|
||||
const getAllowedPaths = () => {
|
||||
const restrictFileAccessTo = process.env[RESTRICT_FILE_ACCESS_TO];
|
||||
if (!restrictFileAccessTo) {
|
||||
return [];
|
||||
}
|
||||
const allowedPaths = restrictFileAccessTo
|
||||
.split(';')
|
||||
.map((path) => path.trim())
|
||||
.filter((path) => path);
|
||||
return allowedPaths;
|
||||
};
|
||||
|
||||
export function isFilePathBlocked(filePath: string): boolean {
|
||||
const allowedPaths = getAllowedPaths();
|
||||
const resolvedFilePath = resolve(filePath);
|
||||
const blockFileAccessToN8nFiles = process.env[BLOCK_FILE_ACCESS_TO_N8N_FILES] !== 'false';
|
||||
|
||||
//if allowed paths are defined, allow access only to those paths
|
||||
if (allowedPaths.length) {
|
||||
for (const path of allowedPaths) {
|
||||
if (resolvedFilePath.startsWith(path)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
//restrict access to .n8n folder, ~/.cache/n8n/public, and other .env config related paths
|
||||
if (blockFileAccessToN8nFiles) {
|
||||
const { n8nFolder, staticCacheDir } = Container.get(InstanceSettings);
|
||||
const restrictedPaths = [n8nFolder, staticCacheDir];
|
||||
|
||||
if (process.env[CONFIG_FILES]) {
|
||||
restrictedPaths.push(...process.env[CONFIG_FILES].split(','));
|
||||
}
|
||||
|
||||
if (process.env[CUSTOM_EXTENSION_ENV]) {
|
||||
const customExtensionFolders = process.env[CUSTOM_EXTENSION_ENV].split(';');
|
||||
restrictedPaths.push(...customExtensionFolders);
|
||||
}
|
||||
|
||||
if (process.env[BINARY_DATA_STORAGE_PATH]) {
|
||||
restrictedPaths.push(process.env[BINARY_DATA_STORAGE_PATH]);
|
||||
}
|
||||
|
||||
if (process.env[UM_EMAIL_TEMPLATES_INVITE]) {
|
||||
restrictedPaths.push(process.env[UM_EMAIL_TEMPLATES_INVITE]);
|
||||
}
|
||||
|
||||
if (process.env[UM_EMAIL_TEMPLATES_PWRESET]) {
|
||||
restrictedPaths.push(process.env[UM_EMAIL_TEMPLATES_PWRESET]);
|
||||
}
|
||||
|
||||
//check if the file path is restricted
|
||||
for (const path of restrictedPaths) {
|
||||
if (resolvedFilePath.startsWith(path)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//path is not restricted
|
||||
return false;
|
||||
}
|
||||
|
||||
export const getFileSystemHelperFunctions = (node: INode): FileSystemHelperFunctions => ({
|
||||
async createReadStream(filePath) {
|
||||
try {
|
||||
await fsAccess(filePath);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
throw error.code === 'ENOENT'
|
||||
? // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
new NodeOperationError(node, error, {
|
||||
message: `The file "${String(filePath)}" could not be accessed.`,
|
||||
level: 'warning',
|
||||
})
|
||||
: error;
|
||||
}
|
||||
if (isFilePathBlocked(filePath as string)) {
|
||||
const allowedPaths = getAllowedPaths();
|
||||
const message = allowedPaths.length ? ` Allowed paths: ${allowedPaths.join(', ')}` : '';
|
||||
throw new NodeOperationError(node, `Access to the file is not allowed.${message}`, {
|
||||
level: 'warning',
|
||||
});
|
||||
}
|
||||
return createReadStream(filePath);
|
||||
},
|
||||
|
||||
getStoragePath() {
|
||||
return join(Container.get(InstanceSettings).n8nFolder, `storage/${node.type}`);
|
||||
},
|
||||
|
||||
async writeContentToFile(filePath, content, flag) {
|
||||
if (isFilePathBlocked(filePath as string)) {
|
||||
throw new NodeOperationError(node, `The file "${String(filePath)}" is not writable.`, {
|
||||
level: 'warning',
|
||||
});
|
||||
}
|
||||
return await fsWriteFile(filePath, content, { encoding: 'binary', flag });
|
||||
},
|
||||
});
|
|
@ -0,0 +1,95 @@
|
|||
import type { IncomingMessage } from 'http';
|
||||
|
||||
function parseHeaderParameters(parameters: string[]): Record<string, string> {
|
||||
return parameters.reduce(
|
||||
(acc, param) => {
|
||||
const [key, value] = param.split('=');
|
||||
let decodedValue = decodeURIComponent(value).trim();
|
||||
if (decodedValue.startsWith('"') && decodedValue.endsWith('"')) {
|
||||
decodedValue = decodedValue.slice(1, -1);
|
||||
}
|
||||
acc[key.toLowerCase().trim()] = decodedValue;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string>,
|
||||
);
|
||||
}
|
||||
|
||||
interface IContentType {
|
||||
type: string;
|
||||
parameters: {
|
||||
charset: string;
|
||||
[key: string]: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the Content-Type header string into a structured object
|
||||
* @returns {IContentType | null} Parsed content type details or null if no content type is detected
|
||||
*/
|
||||
export const parseContentType = (contentType?: string): IContentType | null => {
|
||||
if (!contentType) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const [type, ...parameters] = contentType.split(';');
|
||||
|
||||
return {
|
||||
type: type.toLowerCase(),
|
||||
parameters: { charset: 'utf-8', ...parseHeaderParameters(parameters) },
|
||||
};
|
||||
};
|
||||
|
||||
interface IContentDisposition {
|
||||
type: string;
|
||||
filename?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the Content-Disposition header string into a structured object
|
||||
* @returns {IContentDisposition | null} Parsed content disposition details or null if no content disposition is detected
|
||||
*/
|
||||
export const parseContentDisposition = (
|
||||
contentDisposition?: string,
|
||||
): IContentDisposition | null => {
|
||||
if (!contentDisposition) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// This is invalid syntax, but common
|
||||
// Example 'filename="example.png"' (instead of 'attachment; filename="example.png"')
|
||||
if (!contentDisposition.startsWith('attachment') && !contentDisposition.startsWith('inline')) {
|
||||
contentDisposition = `attachment; ${contentDisposition}`;
|
||||
}
|
||||
|
||||
const [type, ...parameters] = contentDisposition.split(';');
|
||||
|
||||
const parsedParameters = parseHeaderParameters(parameters);
|
||||
|
||||
let { filename } = parsedParameters;
|
||||
const wildcard = parsedParameters['filename*'];
|
||||
if (wildcard) {
|
||||
// https://datatracker.ietf.org/doc/html/rfc5987
|
||||
const [_encoding, _locale, content] = wildcard?.split("'") ?? [];
|
||||
filename = content;
|
||||
}
|
||||
|
||||
return { type, filename };
|
||||
};
|
||||
|
||||
/**
|
||||
* Augments an IncomingMessage with parsed content type and disposition information
|
||||
*/
|
||||
export function parseIncomingMessage(message: IncomingMessage) {
|
||||
const contentType = parseContentType(message.headers['content-type']);
|
||||
if (contentType) {
|
||||
const { type, parameters } = contentType;
|
||||
message.contentType = type;
|
||||
message.encoding = parameters.charset.toLowerCase() as BufferEncoding;
|
||||
}
|
||||
|
||||
const contentDisposition = parseContentDisposition(message.headers['content-disposition']);
|
||||
if (contentDisposition) {
|
||||
message.contentDisposition = contentDisposition;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,468 @@
|
|||
import { Container } from '@n8n/di';
|
||||
import type { AxiosHeaders, AxiosRequestConfig } from 'axios';
|
||||
import crypto from 'crypto';
|
||||
import FormData from 'form-data';
|
||||
import { Agent, type AgentOptions } from 'https';
|
||||
import type { GenericValue, IRequestOptions } from 'n8n-workflow';
|
||||
import { stringify } from 'qs';
|
||||
import { URL } from 'url';
|
||||
|
||||
import { Logger } from '@/logging/logger';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const pushFormDataValue = (form: FormData, key: string, value: any) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
|
||||
if (value?.hasOwnProperty('value') && value.hasOwnProperty('options')) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-argument
|
||||
form.append(key, value.value, value.options);
|
||||
} else {
|
||||
form.append(key, value);
|
||||
}
|
||||
};
|
||||
|
||||
const createFormDataObject = (data: Record<string, unknown>) => {
|
||||
const formData = new FormData();
|
||||
const keys = Object.keys(data);
|
||||
keys.forEach((key) => {
|
||||
const formField = data[key];
|
||||
|
||||
if (formField instanceof Array) {
|
||||
formField.forEach((item) => {
|
||||
pushFormDataValue(formData, key, item);
|
||||
});
|
||||
} else {
|
||||
pushFormDataValue(formData, key, formField);
|
||||
}
|
||||
});
|
||||
return formData;
|
||||
};
|
||||
|
||||
function searchForHeader(config: AxiosRequestConfig, headerName: string) {
|
||||
if (config.headers === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const headerNames = Object.keys(config.headers);
|
||||
headerName = headerName.toLowerCase();
|
||||
return headerNames.find((thisHeader) => thisHeader.toLowerCase() === headerName);
|
||||
}
|
||||
|
||||
async function generateContentLengthHeader(config: AxiosRequestConfig) {
|
||||
if (!(config.data instanceof FormData)) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const length = await new Promise<number>((res, rej) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
|
||||
config.data.getLength((error: Error | null, dataLength: number) => {
|
||||
if (error) rej(error);
|
||||
else res(dataLength);
|
||||
});
|
||||
});
|
||||
config.headers = {
|
||||
...config.headers,
|
||||
'content-length': length,
|
||||
};
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
Container.get(Logger).error('Unable to calculate form data length', { error });
|
||||
}
|
||||
}
|
||||
|
||||
const getHostFromRequestObject = (
|
||||
requestObject: Partial<{
|
||||
url: string;
|
||||
uri: string;
|
||||
baseURL: string;
|
||||
}>,
|
||||
): string | null => {
|
||||
try {
|
||||
const url = (requestObject.url ?? requestObject.uri) as string;
|
||||
return new URL(url, requestObject.baseURL).hostname;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const getBeforeRedirectFn =
|
||||
(agentOptions: AgentOptions, axiosConfig: AxiosRequestConfig) =>
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(redirectedRequest: Record<string, any>) => {
|
||||
const redirectAgent = new Agent({
|
||||
...agentOptions,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
servername: redirectedRequest.hostname,
|
||||
});
|
||||
redirectedRequest.agent = redirectAgent;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
redirectedRequest.agents.https = redirectAgent;
|
||||
|
||||
if (axiosConfig.headers?.Authorization) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access
|
||||
redirectedRequest.headers.Authorization = axiosConfig.headers.Authorization;
|
||||
}
|
||||
if (axiosConfig.auth) {
|
||||
redirectedRequest.auth = `${axiosConfig.auth.username}:${axiosConfig.auth.password}`;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* This function is a temporary implementation that translates all http requests
|
||||
* done via the request library to axios directly.
|
||||
* We are not using n8n's interface as it would an unnecessary step,
|
||||
* considering the `request` helper has been be deprecated and should be removed.
|
||||
* @deprecated This is only used by legacy request helpers, that are also deprecated
|
||||
*/
|
||||
// eslint-disable-next-line complexity
|
||||
export async function parseRequestObject(requestObject: IRequestOptions) {
|
||||
const axiosConfig: AxiosRequestConfig = {};
|
||||
|
||||
if (requestObject.headers !== undefined) {
|
||||
axiosConfig.headers = requestObject.headers as AxiosHeaders;
|
||||
}
|
||||
|
||||
// Let's start parsing the hardest part, which is the request body.
|
||||
// The process here is as following?
|
||||
// - Check if we have a `content-type` header. If this was set,
|
||||
// we will follow
|
||||
// - Check if the `form` property was set. If yes, then it's x-www-form-urlencoded
|
||||
// - Check if the `formData` property exists. If yes, then it's multipart/form-data
|
||||
// - Lastly, we should have a regular `body` that is probably a JSON.
|
||||
|
||||
const contentTypeHeaderKeyName =
|
||||
axiosConfig.headers &&
|
||||
Object.keys(axiosConfig.headers).find(
|
||||
(headerName) => headerName.toLowerCase() === 'content-type',
|
||||
);
|
||||
const contentType =
|
||||
contentTypeHeaderKeyName &&
|
||||
(axiosConfig.headers?.[contentTypeHeaderKeyName] as string | undefined);
|
||||
if (contentType === 'application/x-www-form-urlencoded' && requestObject.formData === undefined) {
|
||||
// there are nodes incorrectly created, informing the content type header
|
||||
// and also using formData. Request lib takes precedence for the formData.
|
||||
// We will do the same.
|
||||
// Merge body and form properties.
|
||||
if (typeof requestObject.body === 'string') {
|
||||
axiosConfig.data = requestObject.body;
|
||||
} else {
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
const allData = Object.assign(requestObject.body || {}, requestObject.form || {}) as Record<
|
||||
string,
|
||||
string
|
||||
>;
|
||||
if (requestObject.useQuerystring === true) {
|
||||
axiosConfig.data = stringify(allData, { arrayFormat: 'repeat' });
|
||||
} else {
|
||||
axiosConfig.data = stringify(allData);
|
||||
}
|
||||
}
|
||||
} else if (contentType?.includes('multipart/form-data')) {
|
||||
if (requestObject.formData !== undefined && requestObject.formData instanceof FormData) {
|
||||
axiosConfig.data = requestObject.formData;
|
||||
} else {
|
||||
const allData: Partial<FormData> = {
|
||||
...(requestObject.body as object | undefined),
|
||||
...(requestObject.formData as object | undefined),
|
||||
};
|
||||
|
||||
axiosConfig.data = createFormDataObject(allData);
|
||||
}
|
||||
// replace the existing header with a new one that
|
||||
// contains the boundary property.
|
||||
delete axiosConfig.headers?.[contentTypeHeaderKeyName!];
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
|
||||
const headers = axiosConfig.data.getHeaders();
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/prefer-nullish-coalescing
|
||||
axiosConfig.headers = Object.assign(axiosConfig.headers || {}, headers);
|
||||
await generateContentLengthHeader(axiosConfig);
|
||||
} else {
|
||||
// When using the `form` property it means the content should be x-www-form-urlencoded.
|
||||
if (requestObject.form !== undefined && requestObject.body === undefined) {
|
||||
// If we have only form
|
||||
axiosConfig.data =
|
||||
typeof requestObject.form === 'string'
|
||||
? stringify(requestObject.form, { format: 'RFC3986' })
|
||||
: stringify(requestObject.form).toString();
|
||||
if (axiosConfig.headers !== undefined) {
|
||||
const headerName = searchForHeader(axiosConfig, 'content-type');
|
||||
if (headerName) {
|
||||
delete axiosConfig.headers[headerName];
|
||||
}
|
||||
axiosConfig.headers['Content-Type'] = 'application/x-www-form-urlencoded';
|
||||
} else {
|
||||
axiosConfig.headers = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
};
|
||||
}
|
||||
} else if (requestObject.formData !== undefined) {
|
||||
// remove any "content-type" that might exist.
|
||||
if (axiosConfig.headers !== undefined) {
|
||||
const headers = Object.keys(axiosConfig.headers);
|
||||
headers.forEach((header) => {
|
||||
if (header.toLowerCase() === 'content-type') {
|
||||
delete axiosConfig.headers?.[header];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (requestObject.formData instanceof FormData) {
|
||||
axiosConfig.data = requestObject.formData;
|
||||
} else {
|
||||
axiosConfig.data = createFormDataObject(requestObject.formData as Record<string, unknown>);
|
||||
}
|
||||
// Mix in headers as FormData creates the boundary.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access
|
||||
const headers = axiosConfig.data.getHeaders();
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/prefer-nullish-coalescing
|
||||
axiosConfig.headers = Object.assign(axiosConfig.headers || {}, headers);
|
||||
await generateContentLengthHeader(axiosConfig);
|
||||
} else if (requestObject.body !== undefined) {
|
||||
// If we have body and possibly form
|
||||
if (requestObject.form !== undefined && requestObject.body) {
|
||||
// merge both objects when exist.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
requestObject.body = Object.assign(requestObject.body, requestObject.form);
|
||||
}
|
||||
axiosConfig.data = requestObject.body as FormData | GenericValue | GenericValue[];
|
||||
}
|
||||
}
|
||||
|
||||
if (requestObject.uri !== undefined) {
|
||||
axiosConfig.url = requestObject.uri?.toString();
|
||||
}
|
||||
|
||||
if (requestObject.url !== undefined) {
|
||||
axiosConfig.url = requestObject.url?.toString();
|
||||
}
|
||||
|
||||
if (requestObject.baseURL !== undefined) {
|
||||
axiosConfig.baseURL = requestObject.baseURL?.toString();
|
||||
}
|
||||
|
||||
if (requestObject.method !== undefined) {
|
||||
axiosConfig.method = requestObject.method;
|
||||
}
|
||||
|
||||
if (requestObject.qs !== undefined && Object.keys(requestObject.qs as object).length > 0) {
|
||||
axiosConfig.params = requestObject.qs;
|
||||
}
|
||||
|
||||
function hasArrayFormatOptions(
|
||||
arg: IRequestOptions,
|
||||
): arg is Required<Pick<IRequestOptions, 'qsStringifyOptions'>> {
|
||||
if (
|
||||
typeof arg.qsStringifyOptions === 'object' &&
|
||||
arg.qsStringifyOptions !== null &&
|
||||
!Array.isArray(arg.qsStringifyOptions) &&
|
||||
'arrayFormat' in arg.qsStringifyOptions
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
requestObject.useQuerystring === true ||
|
||||
(hasArrayFormatOptions(requestObject) &&
|
||||
requestObject.qsStringifyOptions.arrayFormat === 'repeat')
|
||||
) {
|
||||
axiosConfig.paramsSerializer = (params) => {
|
||||
return stringify(params, { arrayFormat: 'repeat' });
|
||||
};
|
||||
} else if (requestObject.useQuerystring === false) {
|
||||
axiosConfig.paramsSerializer = (params) => {
|
||||
return stringify(params, { arrayFormat: 'indices' });
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
hasArrayFormatOptions(requestObject) &&
|
||||
requestObject.qsStringifyOptions.arrayFormat === 'brackets'
|
||||
) {
|
||||
axiosConfig.paramsSerializer = (params) => {
|
||||
return stringify(params, { arrayFormat: 'brackets' });
|
||||
};
|
||||
}
|
||||
|
||||
if (requestObject.auth !== undefined) {
|
||||
// Check support for sendImmediately
|
||||
if (requestObject.auth.bearer !== undefined) {
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
axiosConfig.headers = Object.assign(axiosConfig.headers || {}, {
|
||||
Authorization: `Bearer ${requestObject.auth.bearer}`,
|
||||
});
|
||||
} else {
|
||||
const authObj = requestObject.auth;
|
||||
// Request accepts both user/username and pass/password
|
||||
axiosConfig.auth = {
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
username: (authObj.user || authObj.username) as string,
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
password: (authObj.password || authObj.pass) as string,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Only set header if we have a body, otherwise it may fail
|
||||
if (requestObject.json === true) {
|
||||
// Add application/json headers - do not set charset as it breaks a lot of stuff
|
||||
// only add if no other accept headers was sent.
|
||||
const acceptHeaderExists =
|
||||
axiosConfig.headers === undefined
|
||||
? false
|
||||
: Object.keys(axiosConfig.headers)
|
||||
.map((headerKey) => headerKey.toLowerCase())
|
||||
.includes('accept');
|
||||
if (!acceptHeaderExists) {
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
axiosConfig.headers = Object.assign(axiosConfig.headers || {}, {
|
||||
Accept: 'application/json',
|
||||
});
|
||||
}
|
||||
}
|
||||
if (requestObject.json === false || requestObject.json === undefined) {
|
||||
// Prevent json parsing
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
||||
axiosConfig.transformResponse = (res) => res;
|
||||
}
|
||||
|
||||
// Axios will follow redirects by default, so we simply tell it otherwise if needed.
|
||||
const { method } = requestObject;
|
||||
if (
|
||||
(requestObject.followRedirect !== false &&
|
||||
(!method || method === 'GET' || method === 'HEAD')) ||
|
||||
requestObject.followAllRedirects
|
||||
) {
|
||||
axiosConfig.maxRedirects = requestObject.maxRedirects;
|
||||
} else {
|
||||
axiosConfig.maxRedirects = 0;
|
||||
}
|
||||
|
||||
const host = getHostFromRequestObject(requestObject);
|
||||
const agentOptions: AgentOptions = { ...requestObject.agentOptions };
|
||||
if (host) {
|
||||
agentOptions.servername = host;
|
||||
}
|
||||
if (requestObject.rejectUnauthorized === false) {
|
||||
agentOptions.rejectUnauthorized = false;
|
||||
agentOptions.secureOptions = crypto.constants.SSL_OP_LEGACY_SERVER_CONNECT;
|
||||
}
|
||||
|
||||
axiosConfig.httpsAgent = new Agent(agentOptions);
|
||||
|
||||
axiosConfig.beforeRedirect = getBeforeRedirectFn(agentOptions, axiosConfig);
|
||||
|
||||
if (requestObject.timeout !== undefined) {
|
||||
axiosConfig.timeout = requestObject.timeout;
|
||||
}
|
||||
|
||||
if (requestObject.proxy !== undefined) {
|
||||
// try our best to parse the url provided.
|
||||
if (typeof requestObject.proxy === 'string') {
|
||||
try {
|
||||
const url = new URL(requestObject.proxy);
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
const host = url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname;
|
||||
axiosConfig.proxy = {
|
||||
host,
|
||||
port: parseInt(url.port, 10),
|
||||
protocol: url.protocol,
|
||||
};
|
||||
if (!url.port) {
|
||||
// Sets port to a default if not informed
|
||||
if (url.protocol === 'http') {
|
||||
axiosConfig.proxy.port = 80;
|
||||
} else if (url.protocol === 'https') {
|
||||
axiosConfig.proxy.port = 443;
|
||||
}
|
||||
}
|
||||
if (url.username || url.password) {
|
||||
axiosConfig.proxy.auth = {
|
||||
username: url.username,
|
||||
password: url.password,
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
// Not a valid URL. We will try to simply parse stuff
|
||||
// such as user:pass@host:port without protocol (we'll assume http)
|
||||
if (requestObject.proxy.includes('@')) {
|
||||
const [userpass, hostport] = requestObject.proxy.split('@');
|
||||
const [username, password] = userpass.split(':');
|
||||
const [hostname, port] = hostport.split(':');
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
const host = hostname.startsWith('[') ? hostname.slice(1, -1) : hostname;
|
||||
axiosConfig.proxy = {
|
||||
host,
|
||||
port: parseInt(port, 10),
|
||||
protocol: 'http',
|
||||
auth: {
|
||||
username,
|
||||
password,
|
||||
},
|
||||
};
|
||||
} else if (requestObject.proxy.includes(':')) {
|
||||
const [hostname, port] = requestObject.proxy.split(':');
|
||||
axiosConfig.proxy = {
|
||||
host: hostname,
|
||||
port: parseInt(port, 10),
|
||||
protocol: 'http',
|
||||
};
|
||||
} else {
|
||||
axiosConfig.proxy = {
|
||||
host: requestObject.proxy,
|
||||
port: 80,
|
||||
protocol: 'http',
|
||||
};
|
||||
}
|
||||
}
|
||||
} else {
|
||||
axiosConfig.proxy = requestObject.proxy;
|
||||
}
|
||||
}
|
||||
|
||||
if (requestObject.useStream) {
|
||||
axiosConfig.responseType = 'stream';
|
||||
} else if (requestObject.encoding === null) {
|
||||
// When downloading files, return an arrayBuffer.
|
||||
axiosConfig.responseType = 'arraybuffer';
|
||||
}
|
||||
|
||||
// If we don't set an accept header
|
||||
// Axios forces "application/json, text/plan, */*"
|
||||
// Which causes some nodes like NextCloud to break
|
||||
// as the service returns XML unless requested otherwise.
|
||||
const allHeaders = axiosConfig.headers ? Object.keys(axiosConfig.headers) : [];
|
||||
if (!allHeaders.some((headerKey) => headerKey.toLowerCase() === 'accept')) {
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
axiosConfig.headers = Object.assign(axiosConfig.headers || {}, { accept: '*/*' });
|
||||
}
|
||||
if (
|
||||
requestObject.json !== false &&
|
||||
axiosConfig.data !== undefined &&
|
||||
axiosConfig.data !== '' &&
|
||||
!(axiosConfig.data instanceof Buffer) &&
|
||||
!allHeaders.some((headerKey) => headerKey.toLowerCase() === 'content-type')
|
||||
) {
|
||||
// Use default header for application/json
|
||||
// If we don't specify this here, axios will add
|
||||
// application/json; charset=utf-8
|
||||
// and this breaks a lot of stuff
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
axiosConfig.headers = Object.assign(axiosConfig.headers || {}, {
|
||||
'content-type': 'application/json',
|
||||
});
|
||||
}
|
||||
|
||||
if (requestObject.simple === false) {
|
||||
axiosConfig.validateStatus = () => true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Missing properties:
|
||||
* encoding (need testing)
|
||||
* gzip (ignored - default already works)
|
||||
* resolveWithFullResponse (implemented elsewhere)
|
||||
*/
|
||||
return axiosConfig;
|
||||
}
|
|
@ -20,14 +20,13 @@ import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
|
|||
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
copyBinaryFile,
|
||||
getBinaryHelperFunctions,
|
||||
getNodeWebhookUrl,
|
||||
getRequestHelperFunctions,
|
||||
returnJsonArray,
|
||||
} from '@/node-execute-functions';
|
||||
|
||||
import { NodeExecutionContext } from './node-execution-context';
|
||||
import { copyBinaryFile, getBinaryHelperFunctions } from './utils/binary-helper-functions';
|
||||
import { getInputConnectionData } from './utils/get-input-connection-data';
|
||||
|
||||
export class WebhookContext extends NodeExecutionContext implements IWebhookFunctions {
|
||||
|
|
|
@ -5,7 +5,6 @@ import type {
|
|||
INode,
|
||||
INodeExecutionData,
|
||||
IPollFunctions,
|
||||
IGetExecuteTriggerFunctions,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
WorkflowExecuteMode,
|
||||
WorkflowActivateMode,
|
||||
|
@ -15,6 +14,8 @@ import type {
|
|||
IRun,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { IGetExecuteTriggerFunctions } from './interfaces';
|
||||
|
||||
@Service()
|
||||
export class TriggersAndPollers {
|
||||
/**
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -398,26 +398,6 @@ export interface INodeTypeNameVersion {
|
|||
version: number;
|
||||
}
|
||||
|
||||
export interface IGetExecutePollFunctions {
|
||||
(
|
||||
workflow: Workflow,
|
||||
node: INode,
|
||||
additionalData: IWorkflowExecuteAdditionalData,
|
||||
mode: WorkflowExecuteMode,
|
||||
activation: WorkflowActivateMode,
|
||||
): IPollFunctions;
|
||||
}
|
||||
|
||||
export interface IGetExecuteTriggerFunctions {
|
||||
(
|
||||
workflow: Workflow,
|
||||
node: INode,
|
||||
additionalData: IWorkflowExecuteAdditionalData,
|
||||
mode: WorkflowExecuteMode,
|
||||
activation: WorkflowActivateMode,
|
||||
): ITriggerFunctions;
|
||||
}
|
||||
|
||||
export interface IRunNodeResponse {
|
||||
data: INodeExecutionData[][] | NodeExecutionOutput | null | undefined;
|
||||
closeFunction?: CloseFunction;
|
||||
|
@ -759,7 +739,7 @@ export interface DeduplicationHelperFunctions {
|
|||
options: ICheckProcessedOptions,
|
||||
): Promise<number>;
|
||||
}
|
||||
export interface NodeHelperFunctions {
|
||||
interface NodeHelperFunctions {
|
||||
copyBinaryFile(filePath: string, fileName: string, mimeType?: string): Promise<IBinaryData>;
|
||||
}
|
||||
|
||||
|
@ -1196,11 +1176,6 @@ export interface INodeExecutionData {
|
|||
index?: number;
|
||||
}
|
||||
|
||||
export interface INodeExecuteFunctions {
|
||||
getExecutePollFunctions: IGetExecutePollFunctions;
|
||||
getExecuteTriggerFunctions: IGetExecuteTriggerFunctions;
|
||||
}
|
||||
|
||||
export type NodeParameterValue = string | number | boolean | undefined | null;
|
||||
|
||||
export type ResourceLocatorModes = 'id' | 'url' | 'list' | string;
|
||||
|
|
Loading…
Reference in a new issue