mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-09 22:24:05 -08:00
feat(core): Improvements/overhaul for nodes working with binary data (#7651)
Github issue / Community forum post (link here to close automatically): --------- Co-authored-by: Giulio Andreini <andreini@netseven.it> Co-authored-by: Marcus <marcus@n8n.io>
This commit is contained in:
parent
259323b97e
commit
5e16dd4ab4
|
@ -187,12 +187,14 @@ describe('Webhook Trigger node', async () => {
|
|||
|
||||
ndv.getters.backToCanvas().click();
|
||||
|
||||
workflowPage.actions.addNodeToCanvas('Convert to/from binary data');
|
||||
workflowPage.actions.addNodeToCanvas('Convert to File');
|
||||
workflowPage.actions.zoomToFit();
|
||||
|
||||
workflowPage.actions.openNode('Convert to/from binary data');
|
||||
workflowPage.actions.openNode('Convert to File');
|
||||
cy.getByTestId('parameter-input-operation').click();
|
||||
getVisibleSelect().find('.option-headline').contains('Convert to JSON').click();
|
||||
cy.getByTestId('parameter-input-mode').click();
|
||||
getVisibleSelect().find('.option-headline').contains('JSON to Binary').click();
|
||||
getVisibleSelect().find('.option-headline').contains('Each Item to Separate File').click();
|
||||
ndv.getters.backToCanvas().click();
|
||||
|
||||
workflowPage.actions.executeWorkflow();
|
||||
|
|
|
@ -110,7 +110,7 @@ describe('Node Creator', () => {
|
|||
it('should not show actions for single action nodes', () => {
|
||||
const singleActionNodes = [
|
||||
'DHL',
|
||||
'iCalendar',
|
||||
'Edit Fields',
|
||||
'LingvaNex',
|
||||
'Mailcheck',
|
||||
'MSG91',
|
||||
|
@ -484,8 +484,9 @@ describe('Node Creator', () => {
|
|||
nodeCreatorFeature.getters.nodeItemName().first().should('have.text', 'Wait');
|
||||
|
||||
nodeCreatorFeature.getters.searchBar().find('input').clear().type('spreadsheet');
|
||||
nodeCreatorFeature.getters.nodeItemName().first().should('have.text', 'Spreadsheet File');
|
||||
nodeCreatorFeature.getters.nodeItemName().eq(1).should('have.text', 'Google Sheets');
|
||||
nodeCreatorFeature.getters.nodeItemName().first().should('have.text', 'Convert to File');
|
||||
nodeCreatorFeature.getters.nodeItemName().eq(1).should('have.text', 'Extract From File');
|
||||
nodeCreatorFeature.getters.nodeItemName().eq(2).should('have.text', 'Google Sheets');
|
||||
|
||||
nodeCreatorFeature.getters.searchBar().find('input').clear().type('sheets');
|
||||
nodeCreatorFeature.getters.nodeItemName().first().should('have.text', 'Google Sheets');
|
||||
|
|
|
@ -987,16 +987,27 @@ export function assertBinaryData(
|
|||
): IBinaryData {
|
||||
const binaryKeyData = inputData.main[inputIndex]![itemIndex]!.binary;
|
||||
if (binaryKeyData === undefined) {
|
||||
throw new NodeOperationError(node, 'No binary data exists on item!', {
|
||||
itemIndex,
|
||||
});
|
||||
throw new NodeOperationError(
|
||||
node,
|
||||
`This operation expects the node's input data to contain a binary file '${propertyName}', but none was found [item ${itemIndex}]`,
|
||||
{
|
||||
itemIndex,
|
||||
description: 'Make sure that the previous node outputs a binary file',
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const binaryPropertyData = binaryKeyData[propertyName];
|
||||
if (binaryPropertyData === undefined) {
|
||||
throw new NodeOperationError(node, `Item has no binary property called "${propertyName}"`, {
|
||||
itemIndex,
|
||||
});
|
||||
throw new NodeOperationError(
|
||||
node,
|
||||
`The item has no binary field '${propertyName}' [item ${itemIndex}]`,
|
||||
{
|
||||
itemIndex,
|
||||
description:
|
||||
'Check that the parameter where you specified the input binary field name is correct, and that it matches a field in the binary input',
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
return binaryPropertyData;
|
||||
|
|
|
@ -28,6 +28,8 @@ import {
|
|||
AI_CATEGORY_EMBEDDING,
|
||||
AI_OTHERS_NODE_CREATOR_VIEW,
|
||||
AI_UNCATEGORIZED_CATEGORY,
|
||||
CONVERT_TO_FILE_NODE_TYPE,
|
||||
EXTRACT_FROM_FILE_NODE_TYPE,
|
||||
SET_NODE_TYPE,
|
||||
CODE_NODE_TYPE,
|
||||
DATETIME_NODE_TYPE,
|
||||
|
@ -48,6 +50,8 @@ import {
|
|||
HELPERS_SUBCATEGORY,
|
||||
RSS_READ_NODE_TYPE,
|
||||
EMAIL_SEND_NODE_TYPE,
|
||||
EDIT_IMAGE_NODE_TYPE,
|
||||
COMPRESSION_NODE_TYPE,
|
||||
} from '@/constants';
|
||||
import { useI18n } from '@/composables/useI18n';
|
||||
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
|
||||
|
@ -394,7 +398,16 @@ export function RegularView(nodes: SimplifiedNodeType[]) {
|
|||
{
|
||||
key: 'convert',
|
||||
title: i18n.baseText('nodeCreator.sectionNames.transform.convert'),
|
||||
items: [HTML_NODE_TYPE, MARKDOWN_NODE_TYPE, XML_NODE_TYPE, CRYPTO_NODE_TYPE],
|
||||
items: [
|
||||
HTML_NODE_TYPE,
|
||||
MARKDOWN_NODE_TYPE,
|
||||
XML_NODE_TYPE,
|
||||
CRYPTO_NODE_TYPE,
|
||||
EXTRACT_FROM_FILE_NODE_TYPE,
|
||||
CONVERT_TO_FILE_NODE_TYPE,
|
||||
COMPRESSION_NODE_TYPE,
|
||||
EDIT_IMAGE_NODE_TYPE,
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -422,6 +435,13 @@ export function RegularView(nodes: SimplifiedNodeType[]) {
|
|||
properties: {
|
||||
title: FILES_SUBCATEGORY,
|
||||
icon: 'file-alt',
|
||||
sections: [
|
||||
{
|
||||
key: 'popular',
|
||||
title: i18n.baseText('nodeCreator.sectionNames.popular'),
|
||||
items: [CONVERT_TO_FILE_NODE_TYPE, EXTRACT_FROM_FILE_NODE_TYPE],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
|
@ -161,6 +161,8 @@ export const XERO_NODE_TYPE = 'n8n-nodes-base.xero';
|
|||
export const ZENDESK_NODE_TYPE = 'n8n-nodes-base.zendesk';
|
||||
export const ZENDESK_TRIGGER_NODE_TYPE = 'n8n-nodes-base.zendeskTrigger';
|
||||
export const DISCORD_NODE_TYPE = 'n8n-nodes-base.discord';
|
||||
export const EXTRACT_FROM_FILE_NODE_TYPE = 'n8n-nodes-base.extractFromFile';
|
||||
export const CONVERT_TO_FILE_NODE_TYPE = 'n8n-nodes-base.convertToFile';
|
||||
export const DATETIME_NODE_TYPE = 'n8n-nodes-base.dateTime';
|
||||
export const REMOVE_DUPLICATES_NODE_TYPE = 'n8n-nodes-base.removeDuplicates';
|
||||
export const SPLIT_OUT_NODE_TYPE = 'n8n-nodes-base.splitOut';
|
||||
|
@ -172,6 +174,8 @@ export const MARKDOWN_NODE_TYPE = 'n8n-nodes-base.markdown';
|
|||
export const XML_NODE_TYPE = 'n8n-nodes-base.xml';
|
||||
export const CRYPTO_NODE_TYPE = 'n8n-nodes-base.crypto';
|
||||
export const RSS_READ_NODE_TYPE = 'n8n-nodes-base.rssFeedRead';
|
||||
export const COMPRESSION_NODE_TYPE = 'n8n-nodes-base.compression';
|
||||
export const EDIT_IMAGE_NODE_TYPE = 'n8n-nodes-base.editImage';
|
||||
|
||||
export const CREDENTIAL_ONLY_NODE_PREFIX = 'n8n-creds-base';
|
||||
export const CREDENTIAL_ONLY_HTTP_NODE_VERSION = 4.1;
|
||||
|
|
|
@ -181,12 +181,12 @@ export class ApiTemplateIo implements INodeType {
|
|||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryProperty',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: 'data',
|
||||
description: 'Name of the binary property to which to write to',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['pdf', 'image'],
|
||||
|
|
|
@ -90,7 +90,7 @@ export class AwsRekognition implements INodeType {
|
|||
default: 'detectFaces',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -104,7 +104,7 @@ export class AwsRekognition implements INodeType {
|
|||
description: 'Whether the image to analyze should be taken from binary field',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['analyze'],
|
||||
|
@ -115,7 +115,7 @@ export class AwsRekognition implements INodeType {
|
|||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
description: 'Object property name which holds binary data',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
|
|
|
@ -373,7 +373,7 @@ export const fileFields: INodeProperties[] = [
|
|||
description: 'If not set the binary data filename will be used',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
|
@ -401,7 +401,7 @@ export const fileFields: INodeProperties[] = [
|
|||
description: 'The text content of the file to upload',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -414,7 +414,7 @@ export const fileFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description: 'Name of the binary property which contains the data for the file to be uploaded',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
{
|
||||
displayName: 'Additional Fields',
|
||||
|
@ -698,7 +698,7 @@ export const fileFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -709,7 +709,7 @@ export const fileFields: INodeProperties[] = [
|
|||
resource: ['file'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
/* -------------------------------------------------------------------------- */
|
||||
/* file:delete */
|
||||
|
|
|
@ -373,7 +373,7 @@ export const fileFields: INodeProperties[] = [
|
|||
description: 'If not set the binary data filename will be used',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
|
@ -401,7 +401,7 @@ export const fileFields: INodeProperties[] = [
|
|||
description: 'The text content of the file to upload',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -414,7 +414,7 @@ export const fileFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description: 'Name of the binary property which contains the data for the file to be uploaded',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
{
|
||||
displayName: 'Additional Fields',
|
||||
|
@ -698,7 +698,7 @@ export const fileFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -709,7 +709,7 @@ export const fileFields: INodeProperties[] = [
|
|||
resource: ['file'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
/* -------------------------------------------------------------------------- */
|
||||
/* file:delete */
|
||||
|
|
|
@ -161,7 +161,7 @@ export const fileFields: INodeProperties[] = [
|
|||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -172,7 +172,7 @@ export const fileFields: INodeProperties[] = [
|
|||
resource: ['file'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
@ -671,7 +671,7 @@ export const fileFields: INodeProperties[] = [
|
|||
description: 'The name the file should be saved as',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -700,7 +700,7 @@ export const fileFields: INodeProperties[] = [
|
|||
description: 'The text content of the file',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -712,7 +712,7 @@ export const fileFields: INodeProperties[] = [
|
|||
resource: ['file'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property which contains the data for the file',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
{
|
||||
displayName: 'Parent ID',
|
||||
|
|
|
@ -327,7 +327,7 @@ export const messageFields: INodeProperties[] = [
|
|||
value: 'url',
|
||||
},
|
||||
{
|
||||
name: 'Binary Data',
|
||||
name: 'Binary File',
|
||||
value: 'binaryData',
|
||||
},
|
||||
],
|
||||
|
|
|
@ -11,8 +11,19 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"alias": ["Zip", "Gzip", "uncompress"],
|
||||
"alias": [
|
||||
"Zip",
|
||||
"Gzip",
|
||||
"uncompress",
|
||||
"compress",
|
||||
"decompress",
|
||||
"archive",
|
||||
"unarchive",
|
||||
"Binary",
|
||||
"Files",
|
||||
"File"
|
||||
],
|
||||
"subcategories": {
|
||||
"Core Nodes": ["Files"]
|
||||
"Core Nodes": ["Files", "Data Transformation"]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ export class Compression implements INodeType {
|
|||
icon: 'fa:file-archive',
|
||||
group: ['transform'],
|
||||
subtitle: '={{$parameter["operation"]}}',
|
||||
version: 1,
|
||||
version: [1, 1.1],
|
||||
description: 'Compress and decompress files',
|
||||
defaults: {
|
||||
name: 'Compression',
|
||||
|
@ -68,28 +68,49 @@ export class Compression implements INodeType {
|
|||
{
|
||||
name: 'Compress',
|
||||
value: 'compress',
|
||||
action: 'Compress file(s)',
|
||||
description: 'Compress files into a zip or gzip archive',
|
||||
},
|
||||
{
|
||||
name: 'Decompress',
|
||||
value: 'decompress',
|
||||
action: 'Decompress file(s)',
|
||||
description: 'Decompress zip or gzip archives',
|
||||
},
|
||||
],
|
||||
default: 'decompress',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field(s)',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['compress', 'decompress'],
|
||||
operation: ['compress'],
|
||||
},
|
||||
},
|
||||
placeholder: '',
|
||||
placeholder: 'e.g. data,data2,data3',
|
||||
hint: 'The name of the input binary field(s) containing the file(s) to be compressed',
|
||||
description:
|
||||
'Name of the binary property which contains the data for the file(s) to be compress/decompress. Multiple can be used separated by a comma (,).',
|
||||
'To process more than one file, use a comma-separated list of the binary fields names',
|
||||
},
|
||||
{
|
||||
displayName: 'Input Binary Field(s)',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['decompress'],
|
||||
},
|
||||
},
|
||||
placeholder: 'e.g. data',
|
||||
hint: 'The name of the input binary field(s) containing the file(s) to decompress',
|
||||
description:
|
||||
'To process more than one file, use a comma-separated list of the binary fields names',
|
||||
},
|
||||
{
|
||||
displayName: 'Output Format',
|
||||
|
@ -109,16 +130,42 @@ export class Compression implements INodeType {
|
|||
displayOptions: {
|
||||
show: {
|
||||
operation: ['compress'],
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
description: 'Format of the output file',
|
||||
description: 'Format of the output',
|
||||
},
|
||||
{
|
||||
displayName: 'Output Format',
|
||||
name: 'outputFormat',
|
||||
type: 'options',
|
||||
default: 'zip',
|
||||
options: [
|
||||
{
|
||||
name: 'Gzip',
|
||||
value: 'gzip',
|
||||
},
|
||||
{
|
||||
name: 'Zip',
|
||||
value: 'zip',
|
||||
},
|
||||
],
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['compress'],
|
||||
},
|
||||
hide: {
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
description: 'Format of the output',
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'data.zip',
|
||||
placeholder: 'e.g. data.zip',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
|
@ -126,10 +173,10 @@ export class Compression implements INodeType {
|
|||
outputFormat: ['zip'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the file to be compressed',
|
||||
description: 'Name of the output file',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property Output',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyOutput',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -139,12 +186,43 @@ export class Compression implements INodeType {
|
|||
operation: ['compress'],
|
||||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property to which to write the data of the compressed files',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
{
|
||||
displayName: 'Output Prefix',
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. data.txt',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['compress'],
|
||||
outputFormat: ['gzip'],
|
||||
},
|
||||
hide: {
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
description: 'Name of the output file',
|
||||
},
|
||||
{
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyOutput',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
displayOptions: {
|
||||
show: {
|
||||
outputFormat: ['gzip'],
|
||||
operation: ['compress'],
|
||||
},
|
||||
hide: {
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
{
|
||||
displayName: 'Output File Prefix',
|
||||
name: 'outputPrefix',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -153,9 +231,10 @@ export class Compression implements INodeType {
|
|||
show: {
|
||||
operation: ['compress'],
|
||||
outputFormat: ['gzip'],
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
description: 'Prefix use for all gzip compressed files',
|
||||
description: 'Prefix to add to the gzip file',
|
||||
},
|
||||
{
|
||||
displayName: 'Output Prefix',
|
||||
|
@ -168,7 +247,7 @@ export class Compression implements INodeType {
|
|||
operation: ['decompress'],
|
||||
},
|
||||
},
|
||||
description: 'Prefix use for all decompressed files',
|
||||
description: 'Prefix to add to the decompressed files',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
@ -178,6 +257,7 @@ export class Compression implements INodeType {
|
|||
const length = items.length;
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
const operation = this.getNodeParameter('operation', 0);
|
||||
const nodeVersion = this.getNode().typeVersion;
|
||||
|
||||
for (let i = 0; i < length; i++) {
|
||||
try {
|
||||
|
@ -212,20 +292,37 @@ export class Compression implements INodeType {
|
|||
|
||||
binaryObject[`${outputPrefix}${zipIndex++}`] = data;
|
||||
}
|
||||
} else if (binaryData.fileExtension?.toLowerCase() === 'gz') {
|
||||
} else if (['gz', 'gzip'].includes(binaryData.fileExtension?.toLowerCase() as string)) {
|
||||
const file = await gunzip(binaryDataBuffer);
|
||||
|
||||
const fileName = binaryData.fileName?.split('.')[0];
|
||||
let fileExtension;
|
||||
let mimeType;
|
||||
|
||||
if (binaryData.fileName?.endsWith('.gz')) {
|
||||
const extractedFileExtension = binaryData.fileName.replace('.gz', '').split('.');
|
||||
if (extractedFileExtension.length > 1) {
|
||||
fileExtension = extractedFileExtension[extractedFileExtension.length - 1];
|
||||
mimeType = mime.lookup(fileExtension) as string;
|
||||
}
|
||||
}
|
||||
|
||||
const propertyName = `${outputPrefix}${index}`;
|
||||
|
||||
binaryObject[propertyName] = await this.helpers.prepareBinaryData(
|
||||
Buffer.from(file.buffer),
|
||||
fileName,
|
||||
mimeType,
|
||||
);
|
||||
const fileExtension = mime.extension(binaryObject[propertyName].mimeType) as string;
|
||||
|
||||
if (!fileExtension) {
|
||||
mimeType = binaryObject[propertyName].mimeType;
|
||||
fileExtension = mime.extension(mimeType) as string;
|
||||
}
|
||||
|
||||
binaryObject[propertyName].fileName = `${fileName}.${fileExtension}`;
|
||||
binaryObject[propertyName].fileExtension = fileExtension;
|
||||
binaryObject[propertyName].mimeType = mimeType as string;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -239,14 +336,21 @@ export class Compression implements INodeType {
|
|||
}
|
||||
|
||||
if (operation === 'compress') {
|
||||
const binaryPropertyNames = this.getNodeParameter('binaryPropertyName', 0)
|
||||
let binaryPropertyNameIndex = 0;
|
||||
if (nodeVersion > 1) {
|
||||
binaryPropertyNameIndex = i;
|
||||
}
|
||||
|
||||
const binaryPropertyNames = this.getNodeParameter(
|
||||
'binaryPropertyName',
|
||||
binaryPropertyNameIndex,
|
||||
)
|
||||
.split(',')
|
||||
.map((key) => key.trim());
|
||||
|
||||
const outputFormat = this.getNodeParameter('outputFormat', 0) as string;
|
||||
|
||||
const zipData: fflate.Zippable = {};
|
||||
|
||||
const binaryObject: IBinaryKeyData = {};
|
||||
|
||||
for (const [index, binaryPropertyName] of binaryPropertyNames.entries()) {
|
||||
|
@ -261,26 +365,53 @@ export class Compression implements INodeType {
|
|||
},
|
||||
];
|
||||
} else if (outputFormat === 'gzip') {
|
||||
const outputPrefix = this.getNodeParameter('outputPrefix', 0) as string;
|
||||
let outputPrefix;
|
||||
let fileName;
|
||||
let binaryProperty;
|
||||
let filePath;
|
||||
|
||||
if (nodeVersion > 1) {
|
||||
outputPrefix = this.getNodeParameter('binaryPropertyOutput', i, 'data');
|
||||
binaryProperty = `${outputPrefix}${index ? index : ''}`;
|
||||
|
||||
fileName = this.getNodeParameter('fileName', i, '') as string;
|
||||
if (!fileName) {
|
||||
fileName = binaryData.fileName?.split('.')[0];
|
||||
} else {
|
||||
fileName = fileName.replace('.gz', '').replace('.gzip', '');
|
||||
}
|
||||
|
||||
const fileExtension = binaryData.fileExtension
|
||||
? `.${binaryData.fileExtension.toLowerCase()}`
|
||||
: '';
|
||||
filePath = `${fileName}${fileExtension}.gz`;
|
||||
} else {
|
||||
outputPrefix = this.getNodeParameter('outputPrefix', 0) as string;
|
||||
binaryProperty = `${outputPrefix}${index}`;
|
||||
fileName = binaryData.fileName?.split('.')[0];
|
||||
filePath = `${fileName}.gzip`;
|
||||
}
|
||||
|
||||
const data = await gzip(binaryDataBuffer);
|
||||
|
||||
const fileName = binaryData.fileName?.split('.')[0];
|
||||
|
||||
binaryObject[`${outputPrefix}${index}`] = await this.helpers.prepareBinaryData(
|
||||
binaryObject[binaryProperty] = await this.helpers.prepareBinaryData(
|
||||
Buffer.from(data),
|
||||
`${fileName}.gzip`,
|
||||
filePath,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (outputFormat === 'zip') {
|
||||
const fileName = this.getNodeParameter('fileName', 0) as string;
|
||||
|
||||
const binaryPropertyOutput = this.getNodeParameter('binaryPropertyOutput', 0);
|
||||
|
||||
let zipOptionsIndex = 0;
|
||||
if (nodeVersion > 1) {
|
||||
zipOptionsIndex = i;
|
||||
}
|
||||
const fileName = this.getNodeParameter('fileName', zipOptionsIndex) as string;
|
||||
const binaryPropertyOutput = this.getNodeParameter(
|
||||
'binaryPropertyOutput',
|
||||
zipOptionsIndex,
|
||||
);
|
||||
const buffer = await zip(zipData);
|
||||
|
||||
const data = await this.helpers.prepareBinaryData(Buffer.from(buffer), fileName);
|
||||
|
||||
returnData.push({
|
||||
|
|
|
@ -89,7 +89,7 @@ export const analyzerFields: INodeProperties[] = [
|
|||
description: 'Enter the observable value',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -101,7 +101,7 @@ export const analyzerFields: INodeProperties[] = [
|
|||
operation: ['execute'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
{
|
||||
displayName: 'TLP',
|
||||
|
|
|
@ -233,7 +233,7 @@ export const responderFields: INodeProperties[] = [
|
|||
name: 'artifactValues',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Binary Field',
|
||||
name: 'binaryProperty',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
|
@ -490,7 +490,7 @@ export const responderFields: INodeProperties[] = [
|
|||
name: 'values',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -499,7 +499,7 @@ export const responderFields: INodeProperties[] = [
|
|||
dataType: ['file'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property which contains the attachement data',
|
||||
hint: 'The name of the input binary field containing the attachement data',
|
||||
},
|
||||
{
|
||||
displayName: 'Data',
|
||||
|
|
|
@ -121,7 +121,7 @@ export class Crypto implements INodeType {
|
|||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
|
|
@ -312,7 +312,7 @@ export class Dropbox implements INodeType {
|
|||
description: 'The file path of the file to download. Has to contain the full path.',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -323,7 +323,7 @@ export class Dropbox implements INodeType {
|
|||
resource: ['file'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
|
@ -346,7 +346,7 @@ export class Dropbox implements INodeType {
|
|||
'The file path of the file to upload. Has to contain the full path. The parent folder has to exist. Existing files get overwritten.',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -374,7 +374,7 @@ export class Dropbox implements INodeType {
|
|||
description: 'The text content of the file to upload',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -387,8 +387,7 @@ export class Dropbox implements INodeType {
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property which contains the data for the file to be uploaded',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
|
|
|
@ -11,7 +11,8 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"aliases": ["File", "Binary"],
|
||||
"subcategories": {
|
||||
"Core Nodes": ["Files"]
|
||||
"Core Nodes": ["Files", "Data Transformation"]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -184,7 +184,7 @@ export class FacebookGraphApi implements INodeType {
|
|||
description: 'Whether to connect even if SSL certificate validation is not possible',
|
||||
},
|
||||
{
|
||||
displayName: 'Send Binary Data',
|
||||
displayName: 'Send Binary File',
|
||||
name: 'sendBinaryData',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
|
@ -197,7 +197,7 @@ export class FacebookGraphApi implements INodeType {
|
|||
description: 'Whether binary data should be sent as body',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
|
@ -210,8 +210,9 @@ export class FacebookGraphApi implements INodeType {
|
|||
httpRequestMethod: ['POST', 'PUT'],
|
||||
},
|
||||
},
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
description:
|
||||
'Name of the binary property which contains the data for the file to be uploaded. For Form-Data Multipart, they can be provided in the format: <code>"sendKey1:binaryProperty1,sendKey2:binaryProperty2</code>',
|
||||
'For Form-Data Multipart, they can be provided in the format: <code>"sendKey1:binaryProperty1,sendKey2:binaryProperty2</code>',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"node": "n8n-nodes-base.convertToFile",
|
||||
"nodeVersion": "1.0",
|
||||
"codexVersion": "1.0",
|
||||
"categories": ["Core Nodes"],
|
||||
"resources": {
|
||||
"primaryDocumentation": [
|
||||
{
|
||||
"url": "https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.converttofile/"
|
||||
}
|
||||
]
|
||||
},
|
||||
"alias": [
|
||||
"CSV",
|
||||
"Spreadsheet",
|
||||
"Excel",
|
||||
"xls",
|
||||
"xlsx",
|
||||
"ods",
|
||||
"tabular",
|
||||
"encode",
|
||||
"encoding",
|
||||
"Move Binary Data",
|
||||
"Binary",
|
||||
"File",
|
||||
"JSON",
|
||||
"HTML",
|
||||
"ICS",
|
||||
"RTF",
|
||||
"64"
|
||||
],
|
||||
"subcategories": {
|
||||
"Core Nodes": ["Files", "Data Transformation"]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,121 @@
|
|||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as spreadsheet from './actions/spreadsheet.operation';
|
||||
import * as toBinary from './actions/toBinary.operation';
|
||||
import * as toJson from './actions/toJson.operation';
|
||||
import * as iCall from './actions/iCall.operation';
|
||||
|
||||
export class ConvertToFile implements INodeType {
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-missing-subtitle
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Convert to File',
|
||||
name: 'convertToFile',
|
||||
icon: 'file:convertToFile.svg',
|
||||
group: ['input'],
|
||||
version: 1,
|
||||
description: 'Convert JSON data to binary data',
|
||||
defaults: {
|
||||
name: 'Convert to File',
|
||||
},
|
||||
inputs: ['main'],
|
||||
outputs: ['main'],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'Convert to CSV',
|
||||
value: 'csv',
|
||||
action: 'Convert to CSV',
|
||||
description: 'Transform input data into a CSV file',
|
||||
},
|
||||
{
|
||||
name: 'Convert to HTML',
|
||||
value: 'html',
|
||||
action: 'Convert to HTML',
|
||||
description: 'Transform input data into a table in an HTML file',
|
||||
},
|
||||
{
|
||||
name: 'Convert to iCal',
|
||||
value: 'iCal',
|
||||
action: 'Convert to iCal',
|
||||
description: 'Converts each input item to an ICS event file',
|
||||
},
|
||||
{
|
||||
name: 'Convert to JSON',
|
||||
value: 'toJson',
|
||||
action: 'Convert to JSON',
|
||||
description: 'Transform input data into a single or multiple JSON files',
|
||||
},
|
||||
{
|
||||
name: 'Convert to ODS',
|
||||
value: 'ods',
|
||||
action: 'Convert to ODS',
|
||||
description: 'Transform input data into an ODS file',
|
||||
},
|
||||
{
|
||||
name: 'Convert to RTF',
|
||||
value: 'rtf',
|
||||
action: 'Convert to RTF',
|
||||
description: 'Transform input data into a table in an RTF file',
|
||||
},
|
||||
{
|
||||
name: 'Convert to XLS',
|
||||
value: 'xls',
|
||||
action: 'Convert to XLS',
|
||||
description: 'Transform input data into an Excel file',
|
||||
},
|
||||
{
|
||||
name: 'Convert to XLSX',
|
||||
value: 'xlsx',
|
||||
action: 'Convert to XLSX',
|
||||
description: 'Transform input data into an Excel file',
|
||||
},
|
||||
{
|
||||
name: 'Move Base64 String to File',
|
||||
value: 'toBinary',
|
||||
action: 'Move base64 string to file',
|
||||
description: 'Convert a base64-encoded string into its original file format',
|
||||
},
|
||||
],
|
||||
default: 'csv',
|
||||
},
|
||||
...spreadsheet.description,
|
||||
...toBinary.description,
|
||||
...toJson.description,
|
||||
...iCall.description,
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions) {
|
||||
const items = this.getInputData();
|
||||
const operation = this.getNodeParameter('operation', 0);
|
||||
let returnData: INodeExecutionData[] = [];
|
||||
|
||||
if (spreadsheet.operations.includes(operation)) {
|
||||
returnData = await spreadsheet.execute.call(this, items, operation);
|
||||
}
|
||||
|
||||
if (operation === 'toJson') {
|
||||
returnData = await toJson.execute.call(this, items);
|
||||
}
|
||||
|
||||
if (operation === 'toBinary') {
|
||||
returnData = await toBinary.execute.call(this, items);
|
||||
}
|
||||
|
||||
if (operation === 'iCal') {
|
||||
returnData = await iCall.execute.call(this, items);
|
||||
}
|
||||
|
||||
return [returnData];
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
|
||||
import * as createEvent from '../../../ICalendar/createEvent.operation';
|
||||
|
||||
import { updateDisplayOptions } from '@utils/utilities';
|
||||
|
||||
export const description: INodeProperties[] = updateDisplayOptions(
|
||||
{
|
||||
show: {
|
||||
operation: ['iCal'],
|
||||
},
|
||||
},
|
||||
createEvent.description,
|
||||
);
|
||||
|
||||
export async function execute(this: IExecuteFunctions, items: INodeExecutionData[]) {
|
||||
const returnData = await createEvent.execute.call(this, items);
|
||||
|
||||
return returnData;
|
||||
}
|
|
@ -0,0 +1,126 @@
|
|||
import {
|
||||
NodeOperationError,
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { generatePairedItemData, updateDisplayOptions } from '@utils/utilities';
|
||||
import type { JsonToSpreadsheetBinaryOptions, JsonToSpreadsheetBinaryFormat } from '@utils/binary';
|
||||
|
||||
import { convertJsonToSpreadsheetBinary } from '@utils/binary';
|
||||
|
||||
export const operations = ['csv', 'html', 'rtf', 'ods', 'xls', 'xlsx'];
|
||||
|
||||
export const properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: 'e.g data',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Compression',
|
||||
name: 'compression',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['xlsx', 'ods'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description: 'Whether to reduce the output file size',
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'Name of the output file',
|
||||
},
|
||||
{
|
||||
displayName: 'Header Row',
|
||||
name: 'headerRow',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description: 'Whether the first row of the file contains the header names',
|
||||
},
|
||||
{
|
||||
displayName: 'Sheet Name',
|
||||
name: 'sheetName',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['ods', 'xls', 'xlsx'],
|
||||
},
|
||||
},
|
||||
default: 'Sheet',
|
||||
description: 'Name of the sheet to create in the spreadsheet',
|
||||
placeholder: 'e.g. mySheet',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const displayOptions = {
|
||||
show: {
|
||||
operation: operations,
|
||||
},
|
||||
};
|
||||
|
||||
export const description = updateDisplayOptions(displayOptions, properties);
|
||||
|
||||
export async function execute(
|
||||
this: IExecuteFunctions,
|
||||
items: INodeExecutionData[],
|
||||
operation: string,
|
||||
) {
|
||||
let returnData: INodeExecutionData[] = [];
|
||||
|
||||
const pairedItem = generatePairedItemData(items.length);
|
||||
try {
|
||||
const options = this.getNodeParameter('options', 0, {}) as JsonToSpreadsheetBinaryOptions;
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', 0, 'data');
|
||||
|
||||
const binaryData = await convertJsonToSpreadsheetBinary.call(
|
||||
this,
|
||||
items,
|
||||
operation as JsonToSpreadsheetBinaryFormat,
|
||||
options,
|
||||
'File',
|
||||
);
|
||||
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
binary: {
|
||||
[binaryPropertyName]: binaryData,
|
||||
},
|
||||
pairedItem,
|
||||
};
|
||||
|
||||
returnData = [newItem];
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem,
|
||||
});
|
||||
} else {
|
||||
throw new NodeOperationError(this.getNode(), error);
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
|
||||
import { NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
import type { JsonToBinaryOptions } from '@utils/binary';
|
||||
import { createBinaryFromJson } from '@utils/binary';
|
||||
import { encodeDecodeOptions } from '@utils/descriptions';
|
||||
import { updateDisplayOptions } from '@utils/utilities';
|
||||
|
||||
export const properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Base64 Input Field',
|
||||
name: 'sourceProperty',
|
||||
type: 'string',
|
||||
default: '',
|
||||
required: true,
|
||||
placeholder: 'e.g data',
|
||||
requiresDataPath: 'single',
|
||||
description:
|
||||
"The name of the input field that contains the base64 string to convert to a file. Use dot-notation for deep fields (e.g. 'level1.level2.currentKey').",
|
||||
},
|
||||
{
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: 'e.g data',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Add Byte Order Mark (BOM)',
|
||||
description:
|
||||
'Whether to add special marker at the start of your text file. This marker helps some programs understand how to read the file correctly.',
|
||||
name: 'addBOM',
|
||||
displayOptions: {
|
||||
show: {
|
||||
encoding: ['utf8', 'cesu8', 'ucs2'],
|
||||
},
|
||||
},
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
displayName: 'Data Is Base64',
|
||||
name: 'dataIsBase64',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description: 'Whether the data is already base64 encoded',
|
||||
},
|
||||
{
|
||||
displayName: 'Encoding',
|
||||
name: 'encoding',
|
||||
type: 'options',
|
||||
options: encodeDecodeOptions,
|
||||
default: 'utf8',
|
||||
description: 'Choose the character set to use to encode the data',
|
||||
displayOptions: {
|
||||
hide: {
|
||||
dataIsBase64: [true],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. myFile',
|
||||
description: 'Name of the output file',
|
||||
},
|
||||
{
|
||||
displayName: 'MIME Type',
|
||||
name: 'mimeType',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g text/plain',
|
||||
description:
|
||||
'The MIME type of the output file. <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types" target="_blank">Common MIME types</a>.',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const displayOptions = {
|
||||
show: {
|
||||
operation: ['toBinary'],
|
||||
},
|
||||
};
|
||||
|
||||
export const description = updateDisplayOptions(displayOptions, properties);
|
||||
|
||||
export async function execute(this: IExecuteFunctions, items: INodeExecutionData[]) {
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
try {
|
||||
const options = this.getNodeParameter('options', i, {});
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i, 'data');
|
||||
const sourceProperty = this.getNodeParameter('sourceProperty', i) as string;
|
||||
|
||||
const jsonToBinaryOptions: JsonToBinaryOptions = {
|
||||
sourceKey: sourceProperty,
|
||||
fileName: options.fileName as string,
|
||||
mimeType: options.mimeType as string,
|
||||
dataIsBase64: options.dataIsBase64 !== false,
|
||||
encoding: options.encoding as string,
|
||||
addBOM: options.addBOM as boolean,
|
||||
itemIndex: i,
|
||||
};
|
||||
|
||||
const binaryData = await createBinaryFromJson.call(this, items[i].json, jsonToBinaryOptions);
|
||||
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
binary: {
|
||||
[binaryPropertyName]: binaryData,
|
||||
},
|
||||
pairedItem: { item: i },
|
||||
};
|
||||
|
||||
returnData.push(newItem);
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error, { itemIndex: i });
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
|
@ -0,0 +1,165 @@
|
|||
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
import { NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
import { generatePairedItemData, updateDisplayOptions } from '@utils/utilities';
|
||||
import { createBinaryFromJson } from '@utils/binary';
|
||||
import { encodeDecodeOptions } from '@utils/descriptions';
|
||||
|
||||
export const properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Mode',
|
||||
name: 'mode',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'All Items to One File',
|
||||
value: 'once',
|
||||
},
|
||||
{
|
||||
name: 'Each Item to Separate File',
|
||||
value: 'each',
|
||||
},
|
||||
],
|
||||
default: 'once',
|
||||
},
|
||||
{
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: 'e.g data',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Add Byte Order Mark (BOM)',
|
||||
name: 'addBOM',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description:
|
||||
'Whether to add special marker at the start of your text file. This marker helps some programs understand how to read the file correctly.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
encoding: ['utf8', 'cesu8', 'ucs2'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Encoding',
|
||||
name: 'encoding',
|
||||
type: 'options',
|
||||
options: encodeDecodeOptions,
|
||||
default: 'utf8',
|
||||
description: 'Choose the character set to use to encode the data',
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. myFile.json',
|
||||
description: 'Name of the output file',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const displayOptions = {
|
||||
show: {
|
||||
operation: ['toJson'],
|
||||
},
|
||||
};
|
||||
|
||||
export const description = updateDisplayOptions(displayOptions, properties);
|
||||
|
||||
export async function execute(this: IExecuteFunctions, items: INodeExecutionData[]) {
|
||||
let returnData: INodeExecutionData[] = [];
|
||||
|
||||
const mode = this.getNodeParameter('mode', 0, 'once') as string;
|
||||
if (mode === 'once') {
|
||||
const pairedItem = generatePairedItemData(items.length);
|
||||
try {
|
||||
const options = this.getNodeParameter('options', 0, {});
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', 0, 'data');
|
||||
|
||||
const binaryData = await createBinaryFromJson.call(
|
||||
this,
|
||||
items.map((item) => item.json),
|
||||
{
|
||||
fileName: options.fileName as string,
|
||||
mimeType: 'application/json',
|
||||
encoding: options.encoding as string,
|
||||
addBOM: options.addBOM as boolean,
|
||||
},
|
||||
);
|
||||
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
binary: {
|
||||
[binaryPropertyName]: binaryData,
|
||||
},
|
||||
pairedItem,
|
||||
};
|
||||
|
||||
returnData = [newItem];
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem,
|
||||
});
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error);
|
||||
}
|
||||
} else {
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
try {
|
||||
const options = this.getNodeParameter('options', i, {});
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i, 'data');
|
||||
|
||||
const binaryData = await createBinaryFromJson.call(this, items[i].json, {
|
||||
fileName: options.fileName as string,
|
||||
encoding: options.encoding as string,
|
||||
addBOM: options.addBOM as boolean,
|
||||
mimeType: 'application/json',
|
||||
itemIndex: i,
|
||||
});
|
||||
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
binary: {
|
||||
[binaryPropertyName]: binaryData,
|
||||
},
|
||||
pairedItem: { item: i },
|
||||
};
|
||||
|
||||
returnData.push(newItem);
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error, { itemIndex: i });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
<svg width="512" height="512" viewBox="0 0 512 512" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_1147_463)">
|
||||
<path d="M170 39.8081C170 33.2867 175.287 28 181.808 28H338.41V189.59C338.41 196.218 343.782 201.59 350.41 201.59H512L512 472.893C512 479.415 506.713 484.701 500.192 484.701H181.808C177.972 484.701 174.564 482.873 172.407 480.039C175.619 477.958 178.665 475.515 181.488 472.708L271.488 383.208C282.057 372.697 288 358.406 288 343.5C288 328.594 282.057 314.303 271.488 303.792L181.488 214.292C177.969 210.793 174.103 207.858 170 205.487V39.8081Z" fill="#2244FF"/>
|
||||
<path d="M369.898 34C369.898 30.6863 372.584 28 375.898 28H378.564C381.7 28 384.708 29.2479 386.923 31.4684L508.551 153.386C510.76 155.6 512 158.599 512 161.726L512 164.102C512 167.416 509.314 170.102 506 170.102H375.898C372.584 170.102 369.898 167.416 369.898 164.102V34Z" fill="#2244FF"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M125.077 415.982C115.678 425.329 115.636 440.525 124.982 449.923C134.329 459.322 149.525 459.364 158.923 450.018L248.923 360.518C253.453 356.013 256 349.888 256 343.5C256 337.112 253.453 330.987 248.923 326.482L158.923 236.982C149.525 227.636 134.329 227.678 124.982 237.077C115.636 246.475 115.678 261.671 125.077 271.018L173.327 319L12 319C5.37257 319 -4.12516e-06 324.373 -4.41485e-06 331L-5.46392e-06 355C-5.75362e-06 361.627 5.37258 367 12 367L174.333 367L125.077 415.982Z" fill="#2244FF"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_1147_463">
|
||||
<rect width="512" height="512" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
After Width: | Height: | Size: 1.5 KiB |
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"node": "n8n-nodes-base.extractFromFile",
|
||||
"nodeVersion": "1.0",
|
||||
"codexVersion": "1.0",
|
||||
"categories": ["Core Nodes"],
|
||||
"resources": {
|
||||
"primaryDocumentation": [
|
||||
{
|
||||
"url": "https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.extractfromfile/"
|
||||
}
|
||||
]
|
||||
},
|
||||
"alias": [
|
||||
"CSV",
|
||||
"Spreadsheet",
|
||||
"Excel",
|
||||
"xls",
|
||||
"xlsx",
|
||||
"ods",
|
||||
"tabular",
|
||||
"decode",
|
||||
"decoding",
|
||||
"Move Binary Data",
|
||||
"Binary",
|
||||
"File",
|
||||
"PDF",
|
||||
"JSON",
|
||||
"HTML",
|
||||
"ICS",
|
||||
"txt",
|
||||
"Text",
|
||||
"RTF",
|
||||
"XML",
|
||||
"64"
|
||||
],
|
||||
"subcategories": {
|
||||
"Core Nodes": ["Files", "Data Transformation"]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,134 @@
|
|||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as spreadsheet from './actions/spreadsheet.operation';
|
||||
import * as moveTo from './actions/moveTo.operation';
|
||||
import * as pdf from './actions/pdf.operation';
|
||||
|
||||
export class ExtractFromFile implements INodeType {
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-missing-subtitle
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Extract From File',
|
||||
name: 'extractFromFile',
|
||||
icon: 'file:extractFromFile.svg',
|
||||
group: ['input'],
|
||||
version: 1,
|
||||
description: 'Convert binary data to JSON',
|
||||
defaults: {
|
||||
name: 'Extract From File',
|
||||
},
|
||||
inputs: ['main'],
|
||||
outputs: ['main'],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
// eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items
|
||||
options: [
|
||||
{
|
||||
name: 'Extract From CSV',
|
||||
value: 'csv',
|
||||
action: 'Extract from CSV',
|
||||
description: 'Transform a CSV file into output items',
|
||||
},
|
||||
{
|
||||
name: 'Extract From HTML',
|
||||
value: 'html',
|
||||
action: 'Extract from HTML',
|
||||
description: 'Transform a table in an HTML file into output items',
|
||||
},
|
||||
{
|
||||
name: 'Extract From JSON',
|
||||
value: 'fromJson',
|
||||
action: 'Extract from JSON',
|
||||
description: 'Transform a JSON file into output items',
|
||||
},
|
||||
{
|
||||
name: 'Extract From ICS',
|
||||
value: 'fromIcs',
|
||||
action: 'Extract from ICS',
|
||||
description: 'Transform a ICS file into output items',
|
||||
},
|
||||
{
|
||||
name: 'Extract From ODS',
|
||||
value: 'ods',
|
||||
action: 'Extract from ODS',
|
||||
description: 'Transform an ODS file into output items',
|
||||
},
|
||||
{
|
||||
name: 'Extract From PDF',
|
||||
value: 'pdf',
|
||||
action: 'Extract from PDF',
|
||||
description: 'Extracts the content and metadata from a PDF file',
|
||||
},
|
||||
{
|
||||
name: 'Extract From RTF',
|
||||
value: 'rtf',
|
||||
action: 'Extract from RTF',
|
||||
description: 'Transform a table in an RTF file into output items',
|
||||
},
|
||||
{
|
||||
name: 'Extract From Text File',
|
||||
value: 'text',
|
||||
action: 'Extract from text file',
|
||||
description: 'Extracts the content of a text file',
|
||||
},
|
||||
{
|
||||
name: 'Extract From XML',
|
||||
value: 'xml',
|
||||
action: 'Extract from XLS',
|
||||
description: 'Extracts the content of an XML file',
|
||||
},
|
||||
{
|
||||
name: 'Extract From XLS',
|
||||
value: 'xls',
|
||||
action: 'Extract from XLS',
|
||||
description: 'Transform an Excel file into output items',
|
||||
},
|
||||
{
|
||||
name: 'Extract From XLSX',
|
||||
value: 'xlsx',
|
||||
action: 'Extract from XLSX',
|
||||
description: 'Transform an Excel file into output items',
|
||||
},
|
||||
{
|
||||
name: 'Move File to Base64 String',
|
||||
value: 'binaryToPropery',
|
||||
action: 'Move file to base64 string',
|
||||
description: 'Convert a file into a base64-encoded string',
|
||||
},
|
||||
],
|
||||
default: 'csv',
|
||||
},
|
||||
...spreadsheet.description,
|
||||
...moveTo.description,
|
||||
...pdf.description,
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions) {
|
||||
const items = this.getInputData();
|
||||
const operation = this.getNodeParameter('operation', 0);
|
||||
let returnData: INodeExecutionData[] = [];
|
||||
|
||||
if (spreadsheet.operations.includes(operation)) {
|
||||
returnData = await spreadsheet.execute.call(this, items, 'operation');
|
||||
}
|
||||
|
||||
if (['binaryToPropery', 'fromJson', 'text', 'fromIcs', 'xml'].includes(operation)) {
|
||||
returnData = await moveTo.execute.call(this, items, operation);
|
||||
}
|
||||
|
||||
if (operation === 'pdf') {
|
||||
returnData = await pdf.execute.call(this, items);
|
||||
}
|
||||
|
||||
return [returnData];
|
||||
}
|
||||
}
|
|
@ -0,0 +1,192 @@
|
|||
import type {
|
||||
IDataObject,
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { BINARY_ENCODING, NodeOperationError, deepCopy, jsonParse } from 'n8n-workflow';
|
||||
|
||||
import { encodeDecodeOptions } from '@utils/descriptions';
|
||||
import { updateDisplayOptions } from '@utils/utilities';
|
||||
|
||||
import get from 'lodash/get';
|
||||
import set from 'lodash/set';
|
||||
import unset from 'lodash/unset';
|
||||
|
||||
import iconv from 'iconv-lite';
|
||||
|
||||
import { icsCalendarToObject } from 'ts-ics';
|
||||
|
||||
export const properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: 'e.g data',
|
||||
hint: 'The name of the input field containing the file data to be processed',
|
||||
},
|
||||
{
|
||||
displayName: 'Destination Output Field',
|
||||
name: 'destinationKey',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: 'e.g data',
|
||||
description: 'The name of the output field that will contain the extracted data',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'File Encoding',
|
||||
name: 'encoding',
|
||||
type: 'options',
|
||||
options: encodeDecodeOptions,
|
||||
default: 'utf8',
|
||||
description: 'Specify the encoding of the file, defaults to UTF-8',
|
||||
},
|
||||
{
|
||||
displayName: 'Strip BOM',
|
||||
name: 'stripBOM',
|
||||
displayOptions: {
|
||||
show: {
|
||||
encoding: ['utf8', 'cesu8', 'ucs2'],
|
||||
},
|
||||
},
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description:
|
||||
'Whether to strip the BOM (Byte Order Mark) from the file, this could help in an environment where the presence of the BOM is causing issues or inconsistencies',
|
||||
},
|
||||
{
|
||||
displayName: 'Keep Source',
|
||||
name: 'keepSource',
|
||||
type: 'options',
|
||||
default: 'json',
|
||||
options: [
|
||||
{
|
||||
name: 'JSON',
|
||||
value: 'json',
|
||||
description: 'Include JSON data of the input item',
|
||||
},
|
||||
{
|
||||
name: 'Binary',
|
||||
value: 'binary',
|
||||
description: 'Include binary data of the input item',
|
||||
},
|
||||
{
|
||||
name: 'Both',
|
||||
value: 'both',
|
||||
description: 'Include both JSON and binary data of the input item',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const displayOptions = {
|
||||
show: {
|
||||
operation: ['binaryToPropery', 'fromJson', 'text', 'fromIcs', 'xml'],
|
||||
},
|
||||
};
|
||||
|
||||
export const description = updateDisplayOptions(displayOptions, properties);
|
||||
|
||||
export async function execute(
|
||||
this: IExecuteFunctions,
|
||||
items: INodeExecutionData[],
|
||||
operation: string,
|
||||
) {
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
try {
|
||||
const item = items[itemIndex];
|
||||
const options = this.getNodeParameter('options', itemIndex);
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', itemIndex);
|
||||
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
pairedItem: { item: itemIndex },
|
||||
};
|
||||
|
||||
const value = get(item.binary, binaryPropertyName);
|
||||
|
||||
if (!value) continue;
|
||||
|
||||
const encoding = (options.encoding as string) || 'utf8';
|
||||
const buffer = await this.helpers.getBinaryDataBuffer(itemIndex, binaryPropertyName);
|
||||
|
||||
if (options.keepSource && options.keepSource !== 'binary') {
|
||||
newItem.json = deepCopy(item.json);
|
||||
}
|
||||
|
||||
let convertedValue: string | IDataObject;
|
||||
if (operation !== 'binaryToPropery') {
|
||||
convertedValue = iconv.decode(buffer, encoding, {
|
||||
stripBOM: options.stripBOM as boolean,
|
||||
});
|
||||
} else {
|
||||
convertedValue = Buffer.from(buffer).toString(BINARY_ENCODING);
|
||||
}
|
||||
|
||||
if (operation === 'fromJson') {
|
||||
if (convertedValue === '') {
|
||||
convertedValue = {};
|
||||
} else {
|
||||
convertedValue = jsonParse(convertedValue);
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'fromIcs') {
|
||||
convertedValue = icsCalendarToObject(convertedValue as string);
|
||||
}
|
||||
|
||||
const destinationKey = this.getNodeParameter('destinationKey', itemIndex, '') as string;
|
||||
set(newItem.json, destinationKey, convertedValue);
|
||||
|
||||
if (options.keepSource === 'binary' || options.keepSource === 'both') {
|
||||
newItem.binary = item.binary;
|
||||
} else {
|
||||
// this binary data would not be included, but there also might be other binary data
|
||||
// which should be included, copy it over and unset current binary data
|
||||
newItem.binary = deepCopy(item.binary);
|
||||
unset(newItem.binary, binaryPropertyName);
|
||||
}
|
||||
|
||||
returnData.push(newItem);
|
||||
} catch (error) {
|
||||
let errorDescription;
|
||||
if (error.message.includes('Unexpected token')) {
|
||||
error.message = "The file selected in 'Input Binary Field' is not in JSON format";
|
||||
errorDescription =
|
||||
"Try to change the operation or select a JSON file in 'Input Binary Field'";
|
||||
}
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem: {
|
||||
item: itemIndex,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error, {
|
||||
itemIndex,
|
||||
description: errorDescription,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
|
@ -0,0 +1,141 @@
|
|||
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
|
||||
import { NodeOperationError, deepCopy } from 'n8n-workflow';
|
||||
|
||||
import unset from 'lodash/unset';
|
||||
|
||||
import { extractDataFromPDF } from '@utils/binary';
|
||||
import { updateDisplayOptions } from '@utils/utilities';
|
||||
|
||||
export const properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: 'e.g data',
|
||||
hint: 'The name of the input binary field containing the file to be extracted',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Join Pages',
|
||||
name: 'joinPages',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description:
|
||||
'Whether to join the text from all pages or return an array of text from each page',
|
||||
},
|
||||
{
|
||||
displayName: 'Keep Source',
|
||||
name: 'keepSource',
|
||||
type: 'options',
|
||||
default: 'json',
|
||||
options: [
|
||||
{
|
||||
name: 'JSON',
|
||||
value: 'json',
|
||||
description: 'Include JSON data of the input item',
|
||||
},
|
||||
{
|
||||
name: 'Binary',
|
||||
value: 'binary',
|
||||
description: 'Include binary data of the input item',
|
||||
},
|
||||
{
|
||||
name: 'Both',
|
||||
value: 'both',
|
||||
description: 'Include both JSON and binary data of the input item',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Max Pages',
|
||||
name: 'maxPages',
|
||||
type: 'number',
|
||||
default: 0,
|
||||
description: 'Maximum number of pages to include',
|
||||
},
|
||||
{
|
||||
displayName: 'Password',
|
||||
name: 'password',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
default: '',
|
||||
description: 'Prowide password, if the PDF is encrypted',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const displayOptions = {
|
||||
show: {
|
||||
operation: ['pdf'],
|
||||
},
|
||||
};
|
||||
|
||||
export const description = updateDisplayOptions(displayOptions, properties);
|
||||
|
||||
export async function execute(this: IExecuteFunctions, items: INodeExecutionData[]) {
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
try {
|
||||
const item = items[itemIndex];
|
||||
const options = this.getNodeParameter('options', itemIndex);
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', itemIndex);
|
||||
|
||||
const json = await extractDataFromPDF.call(
|
||||
this,
|
||||
binaryPropertyName,
|
||||
options.password as string,
|
||||
options.maxPages as number,
|
||||
options.joinPages as boolean,
|
||||
itemIndex,
|
||||
);
|
||||
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
pairedItem: { item: itemIndex },
|
||||
};
|
||||
|
||||
if (options.keepSource && options.keepSource !== 'binary') {
|
||||
newItem.json = { ...deepCopy(item.json), ...json };
|
||||
} else {
|
||||
newItem.json = json;
|
||||
}
|
||||
|
||||
if (options.keepSource === 'binary' || options.keepSource === 'both') {
|
||||
newItem.binary = item.binary;
|
||||
} else {
|
||||
// this binary data would not be included, but there also might be other binary data
|
||||
// which should be included, copy it over and unset current binary data
|
||||
newItem.binary = deepCopy(item.binary);
|
||||
unset(newItem.binary, binaryPropertyName);
|
||||
}
|
||||
|
||||
returnData.push(newItem);
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem: {
|
||||
item: itemIndex,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error, { itemIndex });
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
|
||||
import * as fromFile from '../../../SpreadsheetFile/v2/fromFile.operation';
|
||||
|
||||
export const operations = ['csv', 'html', 'rtf', 'ods', 'xls', 'xlsx'];
|
||||
|
||||
export const description: INodeProperties[] = fromFile.description
|
||||
.filter((property) => property.name !== 'fileFormat')
|
||||
.map((property) => {
|
||||
const newProperty = { ...property };
|
||||
newProperty.displayOptions = {
|
||||
show: {
|
||||
operation: operations,
|
||||
},
|
||||
};
|
||||
|
||||
if (newProperty.name === 'options') {
|
||||
newProperty.options = (newProperty.options as INodeProperties[]).map((option) => {
|
||||
let newOption = option;
|
||||
if (['delimiter', 'fromLine', 'maxRowCount', 'enableBOM'].includes(option.name)) {
|
||||
newOption = { ...option, displayOptions: { show: { '/operation': ['csv'] } } };
|
||||
}
|
||||
if (option.name === 'sheetName') {
|
||||
newOption = {
|
||||
...option,
|
||||
displayOptions: { show: { '/operation': ['ods', 'xls', 'xlsx'] } },
|
||||
description: 'Name of the sheet to read from in the spreadsheet',
|
||||
};
|
||||
}
|
||||
if (option.name === 'range') {
|
||||
newOption = {
|
||||
...option,
|
||||
displayOptions: { show: { '/operation': ['ods', 'xls', 'xlsx'] } },
|
||||
};
|
||||
}
|
||||
if (['includeEmptyCells', 'headerRow'].includes(option.name)) {
|
||||
newOption = {
|
||||
...option,
|
||||
displayOptions: { show: { '/operation': ['ods', 'xls', 'xlsx', 'csv', 'html'] } },
|
||||
};
|
||||
}
|
||||
return newOption;
|
||||
});
|
||||
}
|
||||
return newProperty;
|
||||
});
|
||||
|
||||
export async function execute(
|
||||
this: IExecuteFunctions,
|
||||
items: INodeExecutionData[],
|
||||
fileFormatProperty: string,
|
||||
) {
|
||||
const returnData: INodeExecutionData[] = await fromFile.execute.call(
|
||||
this,
|
||||
items,
|
||||
fileFormatProperty,
|
||||
);
|
||||
return returnData;
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
<svg width="512" height="512" viewBox="0 0 512 512" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M0 39.8158C0 33.2901 5.28667 28 11.8081 28H168.41V189.704C168.41 196.331 173.782 201.704 180.41 201.704H342L342 287H268C243.699 287 224 306.699 224 331V355C224 379.301 243.699 399 268 399L342 399L342 473.184C342 479.71 336.713 485 330.192 485H11.8081C5.28667 485 0 479.71 0 473.184V39.8158Z" fill="#003355"/>
|
||||
<path d="M199.898 34C199.898 30.6863 202.584 28 205.898 28H208.564C211.7 28 214.708 29.2487 216.923 31.4707L338.551 153.468C340.76 155.683 342 158.684 342 161.813L342 164.195C342 167.509 339.314 170.195 336 170.195H205.898C202.584 170.195 199.898 167.509 199.898 164.195V34Z" fill="#003355"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M381.077 415.982C371.678 425.329 371.636 440.525 380.982 449.923C390.329 459.322 405.525 459.364 414.923 450.018L504.923 360.518C509.453 356.013 512 349.888 512 343.5C512 337.112 509.453 330.987 504.923 326.482L414.923 236.982C405.525 227.636 390.329 227.678 380.982 237.077C371.636 246.475 371.678 261.671 381.077 271.018L429.327 319L268 319C261.373 319 256 324.373 256 331L256 355C256 361.627 261.373 367 268 367L430.333 367L381.077 415.982Z" fill="#003355"/>
|
||||
</svg>
|
After Width: | Height: | Size: 1.2 KiB |
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"node": "n8n-nodes-base.readWriteFile",
|
||||
"nodeVersion": "1.0",
|
||||
"codexVersion": "1.0",
|
||||
"categories": ["Core Nodes"],
|
||||
"resources": {
|
||||
"primaryDocumentation": [
|
||||
{
|
||||
"url": "https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.filesreadwrite/"
|
||||
}
|
||||
]
|
||||
},
|
||||
"alias": ["Binary", "File", "Text", "Open", "Import", "Save", "Export", "Disk", "Transfer"],
|
||||
"subcategories": {
|
||||
"Core Nodes": ["Files"]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as read from './actions/read.operation';
|
||||
import * as write from './actions/write.operation';
|
||||
|
||||
export class ReadWriteFile implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Read/Write Files from Disk',
|
||||
name: 'readWriteFile',
|
||||
icon: 'file:readWriteFile.svg',
|
||||
group: ['input'],
|
||||
version: 1,
|
||||
description: 'Read or write files from the computer that runs n8n',
|
||||
defaults: {
|
||||
name: 'Read/Write Files from Disk',
|
||||
},
|
||||
inputs: ['main'],
|
||||
outputs: ['main'],
|
||||
properties: [
|
||||
{
|
||||
displayName:
|
||||
'Use this node to read and write files on the same computer running n8n. To handle files between different computers please use other nodes (e.g. FTP, HTTP Request, AWS).',
|
||||
name: 'info',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'Read File(s) From Disk',
|
||||
value: 'read',
|
||||
description: 'Retrieve one or more files from the computer that runs n8n',
|
||||
action: 'Read File(s) From Disk',
|
||||
},
|
||||
{
|
||||
name: 'Write File to Disk',
|
||||
value: 'write',
|
||||
description: 'Create a binary file on the computer that runs n8n',
|
||||
action: 'Write File to Disk',
|
||||
},
|
||||
],
|
||||
default: 'read',
|
||||
},
|
||||
...read.description,
|
||||
...write.description,
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions) {
|
||||
const operation = this.getNodeParameter('operation', 0, 'read');
|
||||
const items = this.getInputData();
|
||||
let returnData: INodeExecutionData[] = [];
|
||||
|
||||
if (operation === 'read') {
|
||||
returnData = await read.execute.call(this, items);
|
||||
}
|
||||
|
||||
if (operation === 'write') {
|
||||
returnData = await write.execute.call(this, items);
|
||||
}
|
||||
|
||||
return [returnData];
|
||||
}
|
||||
}
|
|
@ -0,0 +1,144 @@
|
|||
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
|
||||
import glob from 'fast-glob';
|
||||
import { updateDisplayOptions } from '@utils/utilities';
|
||||
import { errorMapper } from '../helpers/utils';
|
||||
|
||||
export const properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'File(s) Selector',
|
||||
name: 'fileSelector',
|
||||
type: 'string',
|
||||
default: '',
|
||||
required: true,
|
||||
placeholder: 'e.g. /home/user/Pictures/**/*.png',
|
||||
hint: 'Supports patterns, learn more <a href="https://github.com/micromatch/picomatch#basic-globbing" target="_blank">here</a>',
|
||||
description: "Specify a file's path or path pattern to read multiple files",
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'File Extension',
|
||||
name: 'fileExtension',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. zip',
|
||||
description: 'Extension of the file in the output binary',
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. data.zip',
|
||||
description: 'Name of the file in the output binary',
|
||||
},
|
||||
{
|
||||
displayName: 'Mime Type',
|
||||
name: 'mimeType',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. application/zip',
|
||||
description: 'Mime type of the file in the output binary',
|
||||
},
|
||||
{
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'dataPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
placeholder: 'e.g. data',
|
||||
description: "By default 'data' is used",
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const displayOptions = {
|
||||
show: {
|
||||
operation: ['read'],
|
||||
},
|
||||
};
|
||||
|
||||
export const description = updateDisplayOptions(displayOptions, properties);
|
||||
|
||||
export async function execute(this: IExecuteFunctions, items: INodeExecutionData[]) {
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
let fileSelector;
|
||||
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
try {
|
||||
fileSelector = this.getNodeParameter('fileSelector', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {});
|
||||
|
||||
let dataPropertyName = 'data';
|
||||
|
||||
if (options.dataPropertyName) {
|
||||
dataPropertyName = options.dataPropertyName as string;
|
||||
}
|
||||
|
||||
const files = await glob(fileSelector);
|
||||
|
||||
const newItems: INodeExecutionData[] = [];
|
||||
for (const filePath of files) {
|
||||
const stream = await this.helpers.createReadStream(filePath);
|
||||
const binaryData = await this.helpers.prepareBinaryData(stream, filePath);
|
||||
|
||||
if (options.fileName !== undefined) {
|
||||
binaryData.fileName = options.fileName as string;
|
||||
}
|
||||
|
||||
if (options.fileExtension !== undefined) {
|
||||
binaryData.fileExtension = options.fileExtension as string;
|
||||
}
|
||||
|
||||
if (options.mimeType !== undefined) {
|
||||
binaryData.mimeType = options.mimeType as string;
|
||||
}
|
||||
|
||||
newItems.push({
|
||||
binary: {
|
||||
[dataPropertyName]: binaryData,
|
||||
},
|
||||
json: {
|
||||
mimeType: binaryData.mimeType,
|
||||
fileType: binaryData.fileType,
|
||||
fileName: binaryData.fileName,
|
||||
directory: binaryData.directory,
|
||||
fileExtension: binaryData.fileExtension,
|
||||
fileSize: binaryData.fileSize,
|
||||
},
|
||||
pairedItem: {
|
||||
item: itemIndex,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
returnData.push(...newItems);
|
||||
} catch (error) {
|
||||
const nodeOperatioinError = errorMapper.call(this, error, itemIndex, {
|
||||
filePath: fileSelector,
|
||||
operation: 'read',
|
||||
});
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: nodeOperatioinError.message,
|
||||
},
|
||||
pairedItem: {
|
||||
item: itemIndex,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw nodeOperatioinError;
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
import { BINARY_ENCODING } from 'n8n-workflow';
|
||||
|
||||
import type { Readable } from 'stream';
|
||||
|
||||
import { updateDisplayOptions } from '@utils/utilities';
|
||||
import { errorMapper } from '../helpers/utils';
|
||||
|
||||
export const properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'File Path and Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
required: true,
|
||||
placeholder: 'e.g. /data/example.jpg',
|
||||
description:
|
||||
'Path and name of the file that should be written. Also include the file extension.',
|
||||
},
|
||||
{
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'dataPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
placeholder: 'e.g. data',
|
||||
required: true,
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Append',
|
||||
name: 'append',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description:
|
||||
"Whether to append to an existing file. While it's commonly used with text files, it's not limited to them, however, it wouldn't be applicable for file types that have a specific structure like most binary formats.",
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const displayOptions = {
|
||||
show: {
|
||||
operation: ['write'],
|
||||
},
|
||||
};
|
||||
|
||||
export const description = updateDisplayOptions(displayOptions, properties);
|
||||
|
||||
export async function execute(this: IExecuteFunctions, items: INodeExecutionData[]) {
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
let fileName;
|
||||
|
||||
let item: INodeExecutionData;
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
try {
|
||||
const dataPropertyName = this.getNodeParameter('dataPropertyName', itemIndex);
|
||||
fileName = this.getNodeParameter('fileName', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {});
|
||||
const flag: string = options.append ? 'a' : 'w';
|
||||
|
||||
item = items[itemIndex];
|
||||
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
pairedItem: {
|
||||
item: itemIndex,
|
||||
},
|
||||
};
|
||||
Object.assign(newItem.json, item.json);
|
||||
|
||||
const binaryData = this.helpers.assertBinaryData(itemIndex, dataPropertyName);
|
||||
|
||||
let fileContent: Buffer | Readable;
|
||||
if (binaryData.id) {
|
||||
fileContent = await this.helpers.getBinaryStream(binaryData.id);
|
||||
} else {
|
||||
fileContent = Buffer.from(binaryData.data, BINARY_ENCODING);
|
||||
}
|
||||
|
||||
// Write the file to disk
|
||||
await this.helpers.writeContentToFile(fileName, fileContent, flag);
|
||||
|
||||
if (item.binary !== undefined) {
|
||||
// Create a shallow copy of the binary data so that the old
|
||||
// data references which do not get changed still stay behind
|
||||
// but the incoming data does not get changed.
|
||||
newItem.binary = {};
|
||||
Object.assign(newItem.binary, item.binary);
|
||||
}
|
||||
|
||||
// Add the file name to data
|
||||
newItem.json.fileName = fileName;
|
||||
|
||||
returnData.push(newItem);
|
||||
} catch (error) {
|
||||
const nodeOperatioinError = errorMapper.call(this, error, itemIndex, {
|
||||
filePath: fileName,
|
||||
operation: 'write',
|
||||
});
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: nodeOperatioinError.message,
|
||||
},
|
||||
pairedItem: {
|
||||
item: itemIndex,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw nodeOperatioinError;
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
import type { IDataObject, IExecuteFunctions } from 'n8n-workflow';
|
||||
import { NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
export function errorMapper(
|
||||
this: IExecuteFunctions,
|
||||
error: Error,
|
||||
itemIndex: number,
|
||||
context?: IDataObject,
|
||||
) {
|
||||
let message;
|
||||
let description;
|
||||
|
||||
if (error.message.includes('Cannot create a string longer than')) {
|
||||
message = 'The file is too large';
|
||||
description =
|
||||
'The binary file you are attempting to read exceeds 512MB, which is limit when using default binary data mode, try using the filesystem binary mode. More information <a href="https://docs.n8n.io/hosting/scaling/binary-data/" target="_blank">here</a>.';
|
||||
} else if (error.message.includes('EACCES') && context?.operation === 'read') {
|
||||
const path =
|
||||
((error as unknown as IDataObject).path as string) || (context?.filePath as string);
|
||||
message = `You don't have the permissions to access ${path}`;
|
||||
description =
|
||||
"Verify that the path specified in 'File(s) Selector' is correct, or change the file(s) permissions if needed";
|
||||
} else if (error.message.includes('EACCES') && context?.operation === 'write') {
|
||||
const path =
|
||||
((error as unknown as IDataObject).path as string) || (context?.filePath as string);
|
||||
message = `You don't have the permissions to write the file ${path}`;
|
||||
description =
|
||||
"Specify another destination folder in 'File Path and Name', or change the permissions of the parent folder";
|
||||
}
|
||||
|
||||
return new NodeOperationError(this.getNode(), error, { itemIndex, message, description });
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
<svg width="512" height="512" viewBox="0 0 512 512" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_1141_1547)">
|
||||
<path d="M0 12C0 5.37258 5.37258 0 12 0H159V154C159 160.627 164.373 166 171 166H325V242H228.562C210.895 242 194.656 251.705 186.288 267.264L129.203 373.407C125.131 380.978 123 389.44 123 398.037V434H12C5.37257 434 0 428.627 0 422V12Z" fill="#44AA44"/>
|
||||
<path d="M325 134V127.401C325 124.223 323.74 121.175 321.495 118.925L206.369 3.52481C204.118 1.2682 201.061 0 197.873 0H191V134H325Z" fill="#44AA44"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M228.563 274C222.674 274 217.261 277.235 214.472 282.421L172.211 361H492.64L444.67 281.717C441.772 276.927 436.58 274 430.981 274H228.563Z" fill="#44AA44"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M155 409C155 400.163 162.163 393 171 393H496C504.837 393 512 400.163 512 409V496C512 504.837 504.837 512 496 512H171C162.163 512 155 504.837 155 496V409ZM397 453C397 466.255 386.255 477 373 477C359.745 477 349 466.255 349 453C349 439.745 359.745 429 373 429C386.255 429 397 439.745 397 453ZM445 477C458.255 477 469 466.255 469 453C469 439.745 458.255 429 445 429C431.745 429 421 439.745 421 453C421 466.255 431.745 477 445 477Z" fill="#44AA44"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_1141_1547">
|
||||
<rect width="512" height="512" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
After Width: | Height: | Size: 1.3 KiB |
|
@ -0,0 +1,101 @@
|
|||
/* eslint-disable @typescript-eslint/no-loop-func */
|
||||
import * as Helpers from '@test/nodes/Helpers';
|
||||
import type { WorkflowTestData } from '@test/nodes/types';
|
||||
import { executeWorkflow } from '@test/nodes/ExecuteWorkflow';
|
||||
|
||||
describe('Test ReadWriteFile Node', () => {
|
||||
beforeEach(async () => {
|
||||
await Helpers.initBinaryDataService();
|
||||
});
|
||||
|
||||
const temporaryDir = Helpers.createTemporaryDir();
|
||||
const directory = __dirname.replace(/\\/gi, '/');
|
||||
|
||||
const workflow = Helpers.readJsonFileSync(
|
||||
'nodes/Files/ReadWriteFile/test/ReadWriteFile.workflow.json',
|
||||
);
|
||||
|
||||
const readFileNode = workflow.nodes.find((n: any) => n.name === 'Read from Disk');
|
||||
readFileNode.parameters.fileSelector = `${directory}/image.jpg`;
|
||||
|
||||
const writeFileNode = workflow.nodes.find((n: any) => n.name === 'Write to Disk');
|
||||
writeFileNode.parameters.fileName = `${temporaryDir}/image-written.jpg`;
|
||||
|
||||
const tests: WorkflowTestData[] = [
|
||||
{
|
||||
description: 'nodes/Files/ReadWriteFile/test/ReadWriteFile.workflow.json',
|
||||
input: {
|
||||
workflowData: workflow,
|
||||
},
|
||||
output: {
|
||||
nodeData: {
|
||||
'Read from Disk': [
|
||||
[
|
||||
{
|
||||
json: {
|
||||
directory,
|
||||
fileExtension: 'jpg',
|
||||
fileName: 'image.jpg',
|
||||
fileSize: '1.04 kB',
|
||||
fileType: 'image',
|
||||
mimeType: 'image/jpeg',
|
||||
},
|
||||
binary: {
|
||||
data: {
|
||||
mimeType: 'image/jpeg',
|
||||
fileType: 'image',
|
||||
fileExtension: 'jpg',
|
||||
data: '/9j/4AAQSkZJRgABAQEASABIAAD/4QBmRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUAAAABAAAARgEoAAMAAAABAAIAAAExAAIAAAAQAAAATgAAAAAAARlJAAAD6AABGUkAAAPocGFpbnQubmV0IDUuMC4xAP/bAEMAIBYYHBgUIBwaHCQiICYwUDQwLCwwYkZKOlB0Znp4cmZwboCQuJyAiK6KbnCg2qKuvsTO0M58muLy4MjwuMrOxv/bAEMBIiQkMCowXjQ0XsaEcITGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxv/AABEIAB8AOwMBEgACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2gAMAwEAAhEDEQA/AOgqgrXF2zNHJ5aKcD3oNPZ23di/VKG82bkuTh1OMgdaAdOSLtZ6G5ut0iSeWoOAKAdO27NCqUN8oQrcHDqccDrQDpyRNPdRwEKcsx7CobIebPLORwThc0inGMF724jagNpxG4OOM1dIDAgjIPBpkqUOxnR2pmh85pW3nJB9KkNi4yqTssZ6rSNXNX0ehHFfusYDLuI7+tXY4I40ChQcdzQRKcL7Fb7PcQO32cqUY5we1XqZPtH11KsFoFDGYK7sckkZxVqgTnJlEQXMBZYGUoTkZ7VeoH7RvcqwWaIh80K7k5JIq1QJzkyhbMtvdSxMdqnlc1amgjmx5i5I70inNSVpFdrmaWRltkBVerHvUW57B2AUNGxyOaC+VW9xXLVrcGbcjrtkXqKZZxvveeTAL9APSgiooq1ty3RTMj//2Q==',
|
||||
directory,
|
||||
fileName: 'image.jpg',
|
||||
fileSize: '1.04 kB',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
'Write to Disk': [
|
||||
[
|
||||
{
|
||||
json: {
|
||||
directory,
|
||||
fileExtension: 'jpg',
|
||||
fileName: writeFileNode.parameters.fileName,
|
||||
fileSize: '1.04 kB',
|
||||
fileType: 'image',
|
||||
mimeType: 'image/jpeg',
|
||||
},
|
||||
binary: {
|
||||
data: {
|
||||
mimeType: 'image/jpeg',
|
||||
fileType: 'image',
|
||||
fileExtension: 'jpg',
|
||||
data: '/9j/4AAQSkZJRgABAQEASABIAAD/4QBmRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUAAAABAAAARgEoAAMAAAABAAIAAAExAAIAAAAQAAAATgAAAAAAARlJAAAD6AABGUkAAAPocGFpbnQubmV0IDUuMC4xAP/bAEMAIBYYHBgUIBwaHCQiICYwUDQwLCwwYkZKOlB0Znp4cmZwboCQuJyAiK6KbnCg2qKuvsTO0M58muLy4MjwuMrOxv/bAEMBIiQkMCowXjQ0XsaEcITGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxv/AABEIAB8AOwMBEgACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2gAMAwEAAhEDEQA/AOgqgrXF2zNHJ5aKcD3oNPZ23di/VKG82bkuTh1OMgdaAdOSLtZ6G5ut0iSeWoOAKAdO27NCqUN8oQrcHDqccDrQDpyRNPdRwEKcsx7CobIebPLORwThc0inGMF724jagNpxG4OOM1dIDAgjIPBpkqUOxnR2pmh85pW3nJB9KkNi4yqTssZ6rSNXNX0ehHFfusYDLuI7+tXY4I40ChQcdzQRKcL7Fb7PcQO32cqUY5we1XqZPtH11KsFoFDGYK7sckkZxVqgTnJlEQXMBZYGUoTkZ7VeoH7RvcqwWaIh80K7k5JIq1QJzkyhbMtvdSxMdqnlc1amgjmx5i5I70inNSVpFdrmaWRltkBVerHvUW57B2AUNGxyOaC+VW9xXLVrcGbcjrtkXqKZZxvveeTAL9APSgiooq1ty3RTMj//2Q==',
|
||||
directory,
|
||||
fileName: 'image.jpg',
|
||||
fileSize: '1.04 kB',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const nodeTypes = Helpers.setup(tests);
|
||||
|
||||
for (const testData of tests) {
|
||||
test(testData.description, async () => {
|
||||
const { result } = await executeWorkflow(testData, nodeTypes);
|
||||
|
||||
const resultNodeData = Helpers.getResultNodeData(result, testData);
|
||||
resultNodeData.forEach(({ nodeName, resultData }) => {
|
||||
expect(resultData).toEqual(testData.output.nodeData[nodeName]);
|
||||
});
|
||||
|
||||
expect(result.finished).toEqual(true);
|
||||
});
|
||||
}
|
||||
});
|
|
@ -0,0 +1,72 @@
|
|||
{
|
||||
"meta": {
|
||||
"instanceId": "104a4d08d8897b8bdeb38aaca515021075e0bd8544c983c2bb8c86e6a8e6081c"
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
"parameters": {},
|
||||
"id": "01b8609f-a345-41de-80bf-6d84276b5e7a",
|
||||
"name": "When clicking \"Execute Workflow\"",
|
||||
"type": "n8n-nodes-base.manualTrigger",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
700,
|
||||
320
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fileSelector": "C:/Test/image.jpg",
|
||||
"options": {}
|
||||
},
|
||||
"id": "a1ea0fd0-cc95-4de2-bc58-bc980cb1d97e",
|
||||
"name": "Read from Disk",
|
||||
"type": "n8n-nodes-base.readWriteFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
920,
|
||||
320
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "write",
|
||||
"fileName": "C:/Test/image-written.jpg",
|
||||
"options": {}
|
||||
},
|
||||
"id": "94abac52-bd10-4b57-85b0-691c70989137",
|
||||
"name": "Write to Disk",
|
||||
"type": "n8n-nodes-base.readWriteFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1140,
|
||||
320
|
||||
]
|
||||
}
|
||||
],
|
||||
"connections": {
|
||||
"When clicking \"Execute Workflow\"": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Read from Disk",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Read from Disk": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Write to Disk",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"pinData": {}
|
||||
}
|
BIN
packages/nodes-base/nodes/Files/ReadWriteFile/test/image.jpg
Normal file
BIN
packages/nodes-base/nodes/Files/ReadWriteFile/test/image.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 1 KiB |
|
@ -0,0 +1,5 @@
|
|||
import { testWorkflows, getWorkflowFilenames } from '@test/nodes/Helpers';
|
||||
|
||||
const workflows = getWorkflowFilenames(__dirname);
|
||||
|
||||
describe('Test Convert to File Node', () => testWorkflows(workflows));
|
|
@ -0,0 +1,707 @@
|
|||
{
|
||||
"name": "convert to tests",
|
||||
"nodes": [
|
||||
{
|
||||
"parameters": {},
|
||||
"id": "35cce987-aa4f-4738-bfcd-b85098948341",
|
||||
"name": "When clicking \"Execute Workflow\"",
|
||||
"type": "n8n-nodes-base.manualTrigger",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
680,
|
||||
1100
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"name": "row_number",
|
||||
"type": "numberValue",
|
||||
"numberValue": "2"
|
||||
},
|
||||
{
|
||||
"name": "country",
|
||||
"stringValue": "uk"
|
||||
},
|
||||
{
|
||||
"name": "browser",
|
||||
"stringValue": "firefox"
|
||||
},
|
||||
{
|
||||
"name": "session_duration",
|
||||
"type": "numberValue",
|
||||
"numberValue": "1"
|
||||
},
|
||||
{
|
||||
"name": "visits",
|
||||
"type": "numberValue",
|
||||
"numberValue": "1"
|
||||
}
|
||||
]
|
||||
},
|
||||
"include": "none",
|
||||
"options": {}
|
||||
},
|
||||
"id": "13305747-c966-4f46-90b3-ffff6835b714",
|
||||
"name": "Edit Fields",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [
|
||||
980,
|
||||
880
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"options": {}
|
||||
},
|
||||
"id": "b08b269d-0735-4dc4-b5a7-7870f5e115f9",
|
||||
"name": "Convert to File",
|
||||
"type": "n8n-nodes-base.convertToFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1380,
|
||||
420
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "html",
|
||||
"options": {}
|
||||
},
|
||||
"id": "c68c209f-771f-4d25-b832-3d55ee97e6ff",
|
||||
"name": "Convert to File1",
|
||||
"type": "n8n-nodes-base.convertToFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1380,
|
||||
600
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "toJson",
|
||||
"options": {}
|
||||
},
|
||||
"id": "2b6f27ed-a4dc-4a29-905f-f0a921ea6ee7",
|
||||
"name": "Convert to File2",
|
||||
"type": "n8n-nodes-base.convertToFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1380,
|
||||
780
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "toJson",
|
||||
"mode": "each",
|
||||
"options": {}
|
||||
},
|
||||
"id": "cd482d12-7547-4eb6-880b-bd2c31ef06d5",
|
||||
"name": "Convert to File3",
|
||||
"type": "n8n-nodes-base.convertToFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1380,
|
||||
960
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "xlsx",
|
||||
"options": {}
|
||||
},
|
||||
"id": "1fd693e3-4286-49f4-ba45-70584c1d67f7",
|
||||
"name": "Convert to File5",
|
||||
"type": "n8n-nodes-base.convertToFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1380,
|
||||
1140
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"options": {}
|
||||
},
|
||||
"id": "636866e9-ca0d-44a3-b27a-90cf33e26485",
|
||||
"name": "Extract From File",
|
||||
"type": "n8n-nodes-base.extractFromFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1600,
|
||||
420
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "html",
|
||||
"options": {}
|
||||
},
|
||||
"id": "5c66b2ea-94b2-4a1b-a34d-acb07fe33e13",
|
||||
"name": "Extract From File1",
|
||||
"type": "n8n-nodes-base.extractFromFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1600,
|
||||
600
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "fromJson",
|
||||
"options": {}
|
||||
},
|
||||
"id": "a03752f0-e3bb-4dd3-9aae-dc4d1471c281",
|
||||
"name": "Extract From File2",
|
||||
"type": "n8n-nodes-base.extractFromFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1600,
|
||||
780
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "fromJson",
|
||||
"options": {}
|
||||
},
|
||||
"id": "eb10c006-60d7-4842-b9e5-a364f42dd1ab",
|
||||
"name": "Extract From File3",
|
||||
"type": "n8n-nodes-base.extractFromFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1600,
|
||||
960
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "xlsx",
|
||||
"options": {}
|
||||
},
|
||||
"id": "64c98172-2a77-4e83-b19b-232899df8113",
|
||||
"name": "Extract From File4",
|
||||
"type": "n8n-nodes-base.extractFromFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1600,
|
||||
1140
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "xls",
|
||||
"options": {}
|
||||
},
|
||||
"id": "edbfd57e-d36b-470d-9e8d-9c7f67c131b4",
|
||||
"name": "Convert to File6",
|
||||
"type": "n8n-nodes-base.convertToFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1380,
|
||||
1320
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "xls",
|
||||
"options": {}
|
||||
},
|
||||
"id": "86713806-8dc6-45ec-a710-e80b839ec193",
|
||||
"name": "Extract From File5",
|
||||
"type": "n8n-nodes-base.extractFromFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1600,
|
||||
1320
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"name": "base64",
|
||||
"stringValue": "VGhpcyBpcyB0ZXh0IGNvbnZlcnRlZCB0byBiYXNlIDY0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"include": "none",
|
||||
"options": {}
|
||||
},
|
||||
"id": "c205d380-2459-4d16-bb56-f862c53c25de",
|
||||
"name": "Edit Fields1",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [
|
||||
980,
|
||||
1060
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "toBinary",
|
||||
"sourceProperty": "base64",
|
||||
"options": {}
|
||||
},
|
||||
"id": "3f5fe04c-63cf-47d0-a7ca-d427b95a0c52",
|
||||
"name": "Convert to File7",
|
||||
"type": "n8n-nodes-base.convertToFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1380,
|
||||
1880
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "text",
|
||||
"options": {}
|
||||
},
|
||||
"id": "dbb4eae0-f0ee-4453-aed7-7d8322974c94",
|
||||
"name": "Extract From File6",
|
||||
"type": "n8n-nodes-base.extractFromFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1600,
|
||||
1880
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "ods",
|
||||
"options": {}
|
||||
},
|
||||
"id": "e3bf810d-7795-4663-82d1-768f76adc0d9",
|
||||
"name": "Convert to File8",
|
||||
"type": "n8n-nodes-base.convertToFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1380,
|
||||
1520
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "ods",
|
||||
"options": {}
|
||||
},
|
||||
"id": "7713d273-2d51-4e3e-8ac9-9a4e6013c690",
|
||||
"name": "Extract From File7",
|
||||
"type": "n8n-nodes-base.extractFromFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1600,
|
||||
1520
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "rtf",
|
||||
"options": {}
|
||||
},
|
||||
"id": "631b29cb-dcde-42cc-a514-45622923dab6",
|
||||
"name": "Convert to File9",
|
||||
"type": "n8n-nodes-base.convertToFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1380,
|
||||
1700
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "rtf",
|
||||
"options": {}
|
||||
},
|
||||
"id": "168b6586-c89d-4b0e-880e-4ddb8ea7cb2f",
|
||||
"name": "Extract From File8",
|
||||
"type": "n8n-nodes-base.extractFromFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1600,
|
||||
1700
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "iCal",
|
||||
"start": "2024-01-03T00:00:00",
|
||||
"end": "2024-01-04T00:00:00",
|
||||
"allDay": true,
|
||||
"additionalFields": {
|
||||
"description": "event description"
|
||||
}
|
||||
},
|
||||
"id": "2ba4acd9-2677-4b25-9379-48433ac5e9cc",
|
||||
"name": "Convert to File10",
|
||||
"type": "n8n-nodes-base.convertToFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1380,
|
||||
2100
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"operation": "fromIcs",
|
||||
"options": {}
|
||||
},
|
||||
"id": "c0179d40-7de0-4e42-a6bf-97c5bb764665",
|
||||
"name": "Extract From File9",
|
||||
"type": "n8n-nodes-base.extractFromFile",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
1600,
|
||||
2100
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"name": "description",
|
||||
"stringValue": "={{ $json.data.events[0].description }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"include": "none",
|
||||
"options": {}
|
||||
},
|
||||
"id": "8ee98090-f3b0-41d5-8122-d27e5559738f",
|
||||
"name": "Edit Fields2",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [
|
||||
1820,
|
||||
2100
|
||||
]
|
||||
}
|
||||
],
|
||||
"pinData": {
|
||||
"Extract From File": [
|
||||
{
|
||||
"json": {
|
||||
"row_number": "2",
|
||||
"country": "uk",
|
||||
"browser": "firefox",
|
||||
"session_duration": "1",
|
||||
"visits": "1"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Extract From File1": [
|
||||
{
|
||||
"json": {
|
||||
"row_number": 2,
|
||||
"country": "uk",
|
||||
"browser": "firefox",
|
||||
"session_duration": 1,
|
||||
"visits": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"Extract From File2": [
|
||||
{
|
||||
"json": {
|
||||
"data": [
|
||||
{
|
||||
"row_number": 2,
|
||||
"country": "uk",
|
||||
"browser": "firefox",
|
||||
"session_duration": 1,
|
||||
"visits": 1
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"Extract From File3": [
|
||||
{
|
||||
"json": {
|
||||
"data": {
|
||||
"row_number": 2,
|
||||
"country": "uk",
|
||||
"browser": "firefox",
|
||||
"session_duration": 1,
|
||||
"visits": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"Extract From File4": [
|
||||
{
|
||||
"json": {
|
||||
"row_number": 2,
|
||||
"country": "uk",
|
||||
"browser": "firefox",
|
||||
"session_duration": 1,
|
||||
"visits": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"Extract From File5": [
|
||||
{
|
||||
"json": {
|
||||
"row_number": 2,
|
||||
"country": "uk",
|
||||
"browser": "firefox",
|
||||
"session_duration": 1,
|
||||
"visits": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"Extract From File7": [
|
||||
{
|
||||
"json": {
|
||||
"row_number": 2,
|
||||
"country": "uk",
|
||||
"browser": "firefox",
|
||||
"session_duration": 1,
|
||||
"visits": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"Extract From File6": [
|
||||
{
|
||||
"json": {
|
||||
"data": "This is text converted to base 64"
|
||||
}
|
||||
}
|
||||
],
|
||||
"Extract From File8": [
|
||||
{
|
||||
"json": {
|
||||
"row_number": 2,
|
||||
"country": "uk",
|
||||
"browser": "firefox",
|
||||
"session_duration": 1,
|
||||
"visits": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"Edit Fields2": [
|
||||
{
|
||||
"json": {
|
||||
"description": "event description"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"connections": {
|
||||
"When clicking \"Execute Workflow\"": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Edit Fields",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Edit Fields1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Convert to File10",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Edit Fields": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Convert to File",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Convert to File1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Convert to File2",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Convert to File3",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Convert to File5",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Convert to File6",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Convert to File8",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
},
|
||||
{
|
||||
"node": "Convert to File9",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Convert to File": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Extract From File",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Convert to File1": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Extract From File1",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Convert to File2": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Extract From File2",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Convert to File3": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Extract From File3",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Convert to File5": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Extract From File4",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Convert to File6": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Extract From File5",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Convert to File7": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Extract From File6",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Edit Fields1": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Convert to File7",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Convert to File8": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Extract From File7",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Convert to File9": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Extract From File8",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Convert to File10": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Extract From File9",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Extract From File9": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Edit Fields2",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"active": false,
|
||||
"settings": {
|
||||
"executionOrder": "v1"
|
||||
},
|
||||
"versionId": "3287cce3-f02f-45df-9d3b-2f116852c1fb",
|
||||
"meta": {
|
||||
"instanceId": "b888bd11cd1ddbb95450babf3e199556799d999b896f650de768b8370ee50363"
|
||||
},
|
||||
"id": "ZzoOtOee7hxaNcmp",
|
||||
"tags": []
|
||||
}
|
|
@ -15,7 +15,7 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"alias": ["SFTP"],
|
||||
"alias": ["SFTP", "FTP", "Binary", "File", "Transfer"],
|
||||
"subcategories": {
|
||||
"Core Nodes": ["Files"]
|
||||
}
|
||||
|
|
|
@ -120,7 +120,7 @@ export class Ftp implements INodeType {
|
|||
group: ['input'],
|
||||
version: 1,
|
||||
subtitle: '={{$parameter["protocol"] + ": " + $parameter["operation"]}}',
|
||||
description: 'Transfers files via FTP or SFTP',
|
||||
description: 'Transfer files via FTP or SFTP',
|
||||
defaults: {
|
||||
name: 'FTP',
|
||||
color: '#303050',
|
||||
|
@ -223,6 +223,7 @@ export class Ftp implements INodeType {
|
|||
type: 'string',
|
||||
default: '',
|
||||
description: 'The file path of the file to delete. Has to contain the full path.',
|
||||
placeholder: 'e.g. /public/documents/file-to-delete.txt',
|
||||
required: true,
|
||||
},
|
||||
|
||||
|
@ -273,12 +274,12 @@ export class Ftp implements INodeType {
|
|||
name: 'path',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: '/documents/invoice.txt',
|
||||
description: 'The file path of the file to download. Has to contain the full path.',
|
||||
placeholder: 'e.g. /public/documents/file-to-download.txt',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['download'],
|
||||
|
@ -287,7 +288,7 @@ export class Ftp implements INodeType {
|
|||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
description: 'Object property name which holds binary data',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
required: true,
|
||||
},
|
||||
|
||||
|
@ -304,6 +305,7 @@ export class Ftp implements INodeType {
|
|||
name: 'oldPath',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. /public/documents/old-file.txt',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
|
@ -316,6 +318,7 @@ export class Ftp implements INodeType {
|
|||
name: 'newPath',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. /public/documents/new-file.txt',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
|
@ -355,10 +358,11 @@ export class Ftp implements INodeType {
|
|||
type: 'string',
|
||||
default: '',
|
||||
description: 'The file path of the file to upload. Has to contain the full path.',
|
||||
placeholder: 'e.g. /public/documents/file-to-upload.txt',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['upload'],
|
||||
|
@ -371,7 +375,7 @@ export class Ftp implements INodeType {
|
|||
description: 'The text content of the file to upload',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['upload'],
|
||||
|
@ -381,7 +385,7 @@ export class Ftp implements INodeType {
|
|||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
description: 'Object property name which holds binary data',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
|
@ -411,6 +415,7 @@ export class Ftp implements INodeType {
|
|||
name: 'path',
|
||||
type: 'string',
|
||||
default: '/',
|
||||
placeholder: 'e.g. /public/folder',
|
||||
description: 'Path of directory to list contents of',
|
||||
required: true,
|
||||
},
|
||||
|
|
|
@ -550,7 +550,7 @@ export class Github implements INodeType {
|
|||
// file:create/edit
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -580,7 +580,7 @@ export class Github implements INodeType {
|
|||
description: 'The text content of the file',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -593,7 +593,7 @@ export class Github implements INodeType {
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description: 'Name of the binary property which contains the data for the file',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
},
|
||||
{
|
||||
displayName: 'Commit Message',
|
||||
|
@ -699,7 +699,7 @@ export class Github implements INodeType {
|
|||
'Whether to set the data of the file as binary property instead of returning the raw API response',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -712,8 +712,7 @@ export class Github implements INodeType {
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property in which to save the binary data of the received file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
|
||||
{
|
||||
|
|
|
@ -1138,7 +1138,7 @@ export class Gitlab implements INodeType {
|
|||
'Whether to set the data of the file as binary property instead of returning the raw API response',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -1151,8 +1151,7 @@ export class Gitlab implements INodeType {
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property in which to save the binary data of the received file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
{
|
||||
displayName: 'Additional Parameters',
|
||||
|
@ -1184,7 +1183,7 @@ export class Gitlab implements INodeType {
|
|||
// file:create/edit
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -1214,7 +1213,7 @@ export class Gitlab implements INodeType {
|
|||
description: 'The text content of the file',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -1227,7 +1226,7 @@ export class Gitlab implements INodeType {
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description: 'Name of the binary property which contains the data for the file',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
},
|
||||
{
|
||||
displayName: 'Commit Message',
|
||||
|
|
|
@ -42,7 +42,7 @@ export const mediaFields: INodeProperties[] = [
|
|||
description: 'Name of the media that is being downloaded',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -53,6 +53,6 @@ export const mediaFields: INodeProperties[] = [
|
|||
operation: ['download'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
];
|
||||
|
|
|
@ -496,7 +496,7 @@ export const objectFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Use Binary Property',
|
||||
displayName: 'Use Input Binary Field',
|
||||
name: 'createFromBinary',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
|
@ -510,9 +510,10 @@ export const objectFields: INodeProperties[] = [
|
|||
description: 'Whether the data for creating a file should come from a binary field',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'createBinaryPropertyName',
|
||||
type: 'string',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['object'],
|
||||
|
@ -537,9 +538,10 @@ export const objectFields: INodeProperties[] = [
|
|||
description: 'Content of the file to be uploaded',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['object'],
|
||||
|
|
|
@ -355,7 +355,7 @@ const versionDescription: INodeTypeDescription = {
|
|||
// file:download
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -366,7 +366,7 @@ const versionDescription: INodeTypeDescription = {
|
|||
resource: ['file'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
|
@ -833,7 +833,7 @@ const versionDescription: INodeTypeDescription = {
|
|||
},
|
||||
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -861,7 +861,7 @@ const versionDescription: INodeTypeDescription = {
|
|||
description: 'The text content of the file to upload',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -874,8 +874,7 @@ const versionDescription: INodeTypeDescription = {
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property which contains the data for the file to be uploaded',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
|
|
|
@ -23,13 +23,13 @@ const properties: INodeProperties[] = [
|
|||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
placeholder: 'e.g. data',
|
||||
default: 'data',
|
||||
description: 'Use this field name in the following nodes, to use the binary file data',
|
||||
hint: 'The name of the output field to put the binary file data in',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
{
|
||||
displayName: 'Google File Conversion',
|
||||
|
|
|
@ -341,12 +341,12 @@ export class GoogleSlides implements INodeType {
|
|||
description: 'Name of the binary property to which to write the data of the read page',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryProperty',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: 'data',
|
||||
description: 'Name of the binary property to which to write to',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['page'],
|
||||
|
|
|
@ -545,10 +545,11 @@ export const channelFields: INodeProperties[] = [
|
|||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryProperty',
|
||||
type: 'string',
|
||||
required: true,
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['uploadBanner'],
|
||||
|
|
|
@ -107,10 +107,11 @@ export const videoFields: INodeProperties[] = [
|
|||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryProperty',
|
||||
type: 'string',
|
||||
required: true,
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['upload'],
|
||||
|
|
|
@ -46,7 +46,7 @@ export const cameraProxyFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -57,6 +57,6 @@ export const cameraProxyFields: INodeProperties[] = [
|
|||
resource: ['cameraProxy'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
];
|
||||
|
|
|
@ -112,7 +112,7 @@ export class Html implements INodeType {
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'dataPropertyName',
|
||||
type: 'string',
|
||||
requiresDataPath: 'single',
|
||||
|
@ -124,8 +124,7 @@ export class Html implements INodeType {
|
|||
},
|
||||
default: 'data',
|
||||
required: true,
|
||||
description:
|
||||
'Name of the binary property in which the HTML to extract the data from can be found',
|
||||
hint: 'The name of the input binary field containing the file to be extracted',
|
||||
},
|
||||
{
|
||||
displayName: 'JSON Property',
|
||||
|
|
|
@ -78,7 +78,7 @@ export class HtmlExtract implements INodeType {
|
|||
description: 'If HTML should be read from binary or JSON data',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'dataPropertyName',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
|
@ -88,8 +88,7 @@ export class HtmlExtract implements INodeType {
|
|||
},
|
||||
default: 'data',
|
||||
required: true,
|
||||
description:
|
||||
'Name of the binary property in which the HTML to extract the data from can be found',
|
||||
hint: 'The name of the input binary field containing the file to be extracted',
|
||||
},
|
||||
{
|
||||
displayName: 'JSON Property',
|
||||
|
|
|
@ -232,7 +232,7 @@ export class HttpRequestV1 implements INodeType {
|
|||
description: 'Name of the property to which to write the response data',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'dataPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -242,7 +242,7 @@ export class HttpRequestV1 implements INodeType {
|
|||
responseFormat: ['file'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
|
||||
{
|
||||
|
@ -397,7 +397,7 @@ export class HttpRequestV1 implements INodeType {
|
|||
|
||||
// Body Parameter
|
||||
{
|
||||
displayName: 'Send Binary Data',
|
||||
displayName: 'Send Binary File',
|
||||
name: 'sendBinaryData',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
|
@ -414,7 +414,7 @@ export class HttpRequestV1 implements INodeType {
|
|||
description: 'Whether binary data should be send as body',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -428,8 +428,9 @@ export class HttpRequestV1 implements INodeType {
|
|||
requestMethod: ['PATCH', 'POST', 'PUT'],
|
||||
},
|
||||
},
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
description:
|
||||
'Name of the binary property which contains the data for the file to be uploaded. For Form-Data Multipart, they can be provided in the format: <code>"sendKey1:binaryProperty1,sendKey2:binaryProperty2</code>',
|
||||
'For Form-Data Multipart, they can be provided in the format: <code>"sendKey1:binaryProperty1,sendKey2:binaryProperty2</code>',
|
||||
},
|
||||
{
|
||||
displayName: 'Body Parameters',
|
||||
|
|
|
@ -247,7 +247,7 @@ export class HttpRequestV2 implements INodeType {
|
|||
description: 'Name of the property to which to write the response data',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'dataPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -257,7 +257,7 @@ export class HttpRequestV2 implements INodeType {
|
|||
responseFormat: ['file'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
|
||||
{
|
||||
|
@ -412,7 +412,7 @@ export class HttpRequestV2 implements INodeType {
|
|||
|
||||
// Body Parameter
|
||||
{
|
||||
displayName: 'Send Binary Data',
|
||||
displayName: 'Send Binary File',
|
||||
name: 'sendBinaryData',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
|
@ -429,7 +429,7 @@ export class HttpRequestV2 implements INodeType {
|
|||
description: 'Whether binary data should be send as body',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -443,8 +443,9 @@ export class HttpRequestV2 implements INodeType {
|
|||
requestMethod: ['PATCH', 'POST', 'PUT'],
|
||||
},
|
||||
},
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
description:
|
||||
'Name of the binary property which contains the data for the file to be uploaded. For Form-Data Multipart, they can be provided in the format: <code>"sendKey1:binaryProperty1,sendKey2:binaryProperty2</code>',
|
||||
'For Form-Data Multipart, they can be provided in the format: <code>"sendKey1:binaryProperty1,sendKey2:binaryProperty2</code>',
|
||||
},
|
||||
{
|
||||
displayName: 'Body Parameters',
|
||||
|
|
|
@ -372,7 +372,7 @@ export class HttpRequestV3 implements INodeType {
|
|||
},
|
||||
{
|
||||
// eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
|
||||
name: 'n8n Binary Data',
|
||||
name: 'n8n Binary File',
|
||||
value: 'binaryData',
|
||||
},
|
||||
{
|
||||
|
@ -502,7 +502,7 @@ export class HttpRequestV3 implements INodeType {
|
|||
options: [
|
||||
{
|
||||
// eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
|
||||
name: 'n8n Binary Data',
|
||||
name: 'n8n Binary File',
|
||||
value: 'formBinaryData',
|
||||
},
|
||||
{
|
||||
|
|
|
@ -68,7 +68,7 @@ export const profileFields: INodeProperties[] = [
|
|||
description: 'Whether to send a resume for a resume based analysis',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -79,7 +79,7 @@ export const profileFields: INodeProperties[] = [
|
|||
sendResume: [true],
|
||||
},
|
||||
},
|
||||
description: 'The resume in PDF or DOCX format',
|
||||
hint: 'The name of the input binary field containing the resume in PDF or DOCX format',
|
||||
},
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
@ -180,7 +180,7 @@ export const profileFields: INodeProperties[] = [
|
|||
description: 'Additional text written by the user',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -191,6 +191,6 @@ export const profileFields: INodeProperties[] = [
|
|||
sendResume: [true],
|
||||
},
|
||||
},
|
||||
description: 'The resume in PDF or DOCX format',
|
||||
hint: 'The name of the input binary field containing the resume in PDF or DOCX format',
|
||||
},
|
||||
];
|
||||
|
|
|
@ -1,21 +1,16 @@
|
|||
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
|
||||
import { promisify } from 'util';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
IDataObject,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import moment from 'moment-timezone';
|
||||
|
||||
import * as ics from 'ics';
|
||||
|
||||
const createEvent = promisify(ics.createEvent);
|
||||
import * as createEvent from './createEvent.operation';
|
||||
|
||||
export class ICalendar implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
hidden: true,
|
||||
displayName: 'iCalendar',
|
||||
name: 'iCal',
|
||||
icon: 'fa:calendar',
|
||||
|
@ -44,330 +39,20 @@ export class ICalendar implements INodeType {
|
|||
],
|
||||
default: 'createEventFile',
|
||||
},
|
||||
{
|
||||
displayName: 'Event Title',
|
||||
name: 'title',
|
||||
type: 'string',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Start',
|
||||
name: 'start',
|
||||
type: 'dateTime',
|
||||
default: '',
|
||||
required: true,
|
||||
description:
|
||||
'Date and time at which the event begins. (For all-day events, the time will be ignored.).',
|
||||
},
|
||||
{
|
||||
displayName: 'End',
|
||||
name: 'end',
|
||||
type: 'dateTime',
|
||||
default: '',
|
||||
required: true,
|
||||
description:
|
||||
'Date and time at which the event ends. (For all-day events, the time will be ignored.).',
|
||||
},
|
||||
{
|
||||
displayName: 'All Day',
|
||||
name: 'allDay',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Whether the event lasts all day or not',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
description: 'The field that your iCalendar file will be available under in the output',
|
||||
},
|
||||
{
|
||||
displayName: 'Additional Fields',
|
||||
name: 'additionalFields',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Field',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['createEventFile'],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Attendees',
|
||||
name: 'attendeesUi',
|
||||
type: 'fixedCollection',
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
},
|
||||
placeholder: 'Add Attendee',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Attendees',
|
||||
name: 'attendeeValues',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Email',
|
||||
name: 'email',
|
||||
type: 'string',
|
||||
placeholder: 'name@email.com',
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'RSVP',
|
||||
name: 'rsvp',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Whether the attendee has to confirm attendance or not',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Busy Status',
|
||||
name: 'busyStatus',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Busy',
|
||||
value: 'BUSY',
|
||||
},
|
||||
{
|
||||
name: 'Tentative',
|
||||
value: 'TENTATIVE',
|
||||
},
|
||||
],
|
||||
default: '',
|
||||
description: 'Used to specify busy status for Microsoft applications, like Outlook',
|
||||
},
|
||||
{
|
||||
displayName: 'Calendar Name',
|
||||
name: 'calName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'Specifies the calendar (not event) name. Used by Apple iCal and Microsoft Outlook (<a href="https://docs.microsoft.com/en-us/openspecs/exchange_server_protocols/ms-oxcical/1da58449-b97e-46bd-b018-a1ce576f3e6d">spec</a>).',
|
||||
},
|
||||
{
|
||||
displayName: 'Description',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'The name of the file to be generated. Default value is event.ics.',
|
||||
},
|
||||
{
|
||||
displayName: 'Geolocation',
|
||||
name: 'geolocationUi',
|
||||
type: 'fixedCollection',
|
||||
typeOptions: {
|
||||
multipleValues: false,
|
||||
},
|
||||
placeholder: 'Add Geolocation',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Geolocation',
|
||||
name: 'geolocationValues',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Latitude',
|
||||
name: 'lat',
|
||||
type: 'string',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Longitude',
|
||||
name: 'lon',
|
||||
type: 'string',
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Location',
|
||||
name: 'location',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'The intended venue',
|
||||
},
|
||||
{
|
||||
displayName: 'Recurrence Rule',
|
||||
name: 'recurrenceRule',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'A rule to define the repeat pattern of the event (RRULE). (<a href="https://icalendar.org/rrule-tool.html">Rule generator</a>).',
|
||||
},
|
||||
{
|
||||
displayName: 'Organizer',
|
||||
name: 'organizerUi',
|
||||
type: 'fixedCollection',
|
||||
typeOptions: {
|
||||
multipleValues: false,
|
||||
},
|
||||
placeholder: 'Add Organizer',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Organizer',
|
||||
name: 'organizerValues',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Email',
|
||||
name: 'email',
|
||||
type: 'string',
|
||||
placeholder: 'name@email.com',
|
||||
default: '',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Sequence',
|
||||
name: 'sequence',
|
||||
type: 'number',
|
||||
default: 0,
|
||||
description:
|
||||
'When sending an update for an event (with the same uid), defines the revision sequence number',
|
||||
},
|
||||
{
|
||||
displayName: 'Status',
|
||||
name: 'status',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Confirmed',
|
||||
value: 'CONFIRMED',
|
||||
},
|
||||
{
|
||||
name: 'Cancelled',
|
||||
value: 'CANCELLED',
|
||||
},
|
||||
{
|
||||
name: 'Tentative',
|
||||
value: 'TENTATIVE',
|
||||
},
|
||||
],
|
||||
default: 'CONFIRMED',
|
||||
},
|
||||
{
|
||||
displayName: 'UID',
|
||||
name: 'uid',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'Universally unique ID for the event (will be auto-generated if not specified here). Should be globally unique.',
|
||||
},
|
||||
{
|
||||
displayName: 'URL',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'URL associated with event',
|
||||
},
|
||||
],
|
||||
},
|
||||
...createEvent.description,
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
async execute(this: IExecuteFunctions) {
|
||||
const items = this.getInputData();
|
||||
const length = items.length;
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
const operation = this.getNodeParameter('operation', 0);
|
||||
|
||||
let returnData: INodeExecutionData[] = [];
|
||||
|
||||
if (operation === 'createEventFile') {
|
||||
for (let i = 0; i < length; i++) {
|
||||
const title = this.getNodeParameter('title', i) as string;
|
||||
const allDay = this.getNodeParameter('allDay', i) as boolean;
|
||||
const start = this.getNodeParameter('start', i) as string;
|
||||
let end = this.getNodeParameter('end', i) as string;
|
||||
end = allDay ? moment(end).utc().add(1, 'day').format() : end;
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i);
|
||||
const additionalFields = this.getNodeParameter('additionalFields', i);
|
||||
let fileName = 'event.ics';
|
||||
|
||||
const eventStart = moment(start)
|
||||
.toArray()
|
||||
.splice(0, allDay ? 3 : 6) as ics.DateArray;
|
||||
eventStart[1]++;
|
||||
const eventEnd = moment(end)
|
||||
.toArray()
|
||||
.splice(0, allDay ? 3 : 6) as ics.DateArray;
|
||||
eventEnd[1]++;
|
||||
|
||||
if (additionalFields.fileName) {
|
||||
fileName = additionalFields.fileName as string;
|
||||
}
|
||||
|
||||
const data: ics.EventAttributes = {
|
||||
title,
|
||||
start: eventStart,
|
||||
end: eventEnd,
|
||||
startInputType: 'utc',
|
||||
endInputType: 'utc',
|
||||
};
|
||||
|
||||
if (additionalFields.geolocationUi) {
|
||||
data.geo = (additionalFields.geolocationUi as IDataObject)
|
||||
.geolocationValues as ics.GeoCoordinates;
|
||||
delete additionalFields.geolocationUi;
|
||||
}
|
||||
|
||||
if (additionalFields.organizerUi) {
|
||||
data.organizer = (additionalFields.organizerUi as IDataObject)
|
||||
.organizerValues as ics.Person;
|
||||
delete additionalFields.organizerUi;
|
||||
}
|
||||
|
||||
if (additionalFields.attendeesUi) {
|
||||
data.attendees = (additionalFields.attendeesUi as IDataObject)
|
||||
.attendeeValues as ics.Attendee[];
|
||||
delete additionalFields.attendeesUi;
|
||||
}
|
||||
|
||||
Object.assign(data, additionalFields);
|
||||
const buffer = Buffer.from((await createEvent(data)) as string);
|
||||
const binaryData = await this.helpers.prepareBinaryData(buffer, fileName, 'text/calendar');
|
||||
returnData.push({
|
||||
json: {},
|
||||
binary: {
|
||||
[binaryPropertyName]: binaryData,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
});
|
||||
}
|
||||
returnData = await createEvent.execute.call(this, items);
|
||||
}
|
||||
|
||||
return [returnData];
|
||||
}
|
||||
}
|
||||
|
|
376
packages/nodes-base/nodes/ICalendar/createEvent.operation.ts
Normal file
376
packages/nodes-base/nodes/ICalendar/createEvent.operation.ts
Normal file
|
@ -0,0 +1,376 @@
|
|||
import {
|
||||
type IExecuteFunctions,
|
||||
type IDataObject,
|
||||
type INodeExecutionData,
|
||||
type INodeProperties,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import moment from 'moment-timezone';
|
||||
import * as ics from 'ics';
|
||||
import { promisify } from 'util';
|
||||
|
||||
const createEvent = promisify(ics.createEvent);
|
||||
|
||||
export const description: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Event Title',
|
||||
name: 'title',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. New Event',
|
||||
},
|
||||
{
|
||||
displayName: 'Start',
|
||||
name: 'start',
|
||||
type: 'dateTime',
|
||||
default: '',
|
||||
required: true,
|
||||
description:
|
||||
'Date and time at which the event begins. (For all-day events, the time will be ignored.).',
|
||||
validateType: 'dateTime',
|
||||
},
|
||||
{
|
||||
displayName: 'End',
|
||||
name: 'end',
|
||||
type: 'dateTime',
|
||||
default: '',
|
||||
required: true,
|
||||
description:
|
||||
'Date and time at which the event ends. (For all-day events, the time will be ignored.).',
|
||||
hint: 'If not set, will be equal to the start date',
|
||||
},
|
||||
{
|
||||
displayName: 'All Day',
|
||||
name: 'allDay',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Whether the event lasts all day or not',
|
||||
},
|
||||
{
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
description: 'The field that your iCalendar file will be available under in the output',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'additionalFields',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Attendees',
|
||||
name: 'attendeesUi',
|
||||
type: 'fixedCollection',
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
},
|
||||
placeholder: 'Add Attendee',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Attendees',
|
||||
name: 'attendeeValues',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Email',
|
||||
name: 'email',
|
||||
type: 'string',
|
||||
placeholder: 'e.g. name@email.com',
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'RSVP',
|
||||
name: 'rsvp',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Whether the attendee has to confirm attendance or not',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Busy Status',
|
||||
name: 'busyStatus',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Busy',
|
||||
value: 'BUSY',
|
||||
},
|
||||
{
|
||||
name: 'Tentative',
|
||||
value: 'TENTATIVE',
|
||||
},
|
||||
],
|
||||
default: '',
|
||||
description: 'Used to specify busy status for Microsoft applications, like Outlook',
|
||||
},
|
||||
{
|
||||
displayName: 'Calendar Name',
|
||||
name: 'calName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'Specifies the calendar (not event) name. Used by Apple iCal and Microsoft Outlook. <a href="https://docs.microsoft.com/en-us/openspecs/exchange_server_protocols/ms-oxcical/1da58449-b97e-46bd-b018-a1ce576f3e6d">More info</a>.',
|
||||
},
|
||||
{
|
||||
displayName: 'Description',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. event.ics',
|
||||
description: 'The name of the file to be generated. Default name is event.ics.',
|
||||
},
|
||||
{
|
||||
displayName: 'Geolocation',
|
||||
name: 'geolocationUi',
|
||||
type: 'fixedCollection',
|
||||
typeOptions: {
|
||||
multipleValues: false,
|
||||
},
|
||||
placeholder: 'Add Geolocation',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Geolocation',
|
||||
name: 'geolocationValues',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Latitude',
|
||||
name: 'lat',
|
||||
type: 'string',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Longitude',
|
||||
name: 'lon',
|
||||
type: 'string',
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Location',
|
||||
name: 'location',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'The intended venue',
|
||||
},
|
||||
{
|
||||
displayName: 'Recurrence Rule',
|
||||
name: 'recurrenceRule',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'A rule to define the repeat pattern of the event (RRULE). (<a href="https://icalendar.org/rrule-tool.html">Rule generator</a>).',
|
||||
},
|
||||
{
|
||||
displayName: 'Organizer',
|
||||
name: 'organizerUi',
|
||||
type: 'fixedCollection',
|
||||
typeOptions: {
|
||||
multipleValues: false,
|
||||
},
|
||||
placeholder: 'Add Organizer',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Organizer',
|
||||
name: 'organizerValues',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Email',
|
||||
name: 'email',
|
||||
type: 'string',
|
||||
placeholder: 'e.g. name@email.com',
|
||||
default: '',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Sequence',
|
||||
name: 'sequence',
|
||||
type: 'number',
|
||||
default: 0,
|
||||
description:
|
||||
'When sending an update for an event (with the same uid), defines the revision sequence number',
|
||||
},
|
||||
{
|
||||
displayName: 'Status',
|
||||
name: 'status',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Confirmed',
|
||||
value: 'CONFIRMED',
|
||||
},
|
||||
{
|
||||
name: 'Cancelled',
|
||||
value: 'CANCELLED',
|
||||
},
|
||||
{
|
||||
name: 'Tentative',
|
||||
value: 'TENTATIVE',
|
||||
},
|
||||
],
|
||||
default: 'CONFIRMED',
|
||||
},
|
||||
{
|
||||
displayName: 'UID',
|
||||
name: 'uid',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'Universally unique ID for the event (will be auto-generated if not specified here). Should be globally unique.',
|
||||
},
|
||||
{
|
||||
displayName: 'URL',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'URL associated with event',
|
||||
},
|
||||
{
|
||||
displayName: 'Use Workflow Timezone',
|
||||
name: 'useWorkflowTimezone',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: "Whether to use the workflow timezone set in node's settings rather than UTC",
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
export async function execute(this: IExecuteFunctions, items: INodeExecutionData[]) {
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
const workflowTimezone = this.getTimezone();
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
try {
|
||||
const title = this.getNodeParameter('title', i) as string;
|
||||
const allDay = this.getNodeParameter('allDay', i) as boolean;
|
||||
|
||||
let start = this.getNodeParameter('start', i) as string;
|
||||
let end = this.getNodeParameter('end', i) as string;
|
||||
|
||||
if (!end) {
|
||||
end = start;
|
||||
}
|
||||
|
||||
end = allDay ? moment(end).utc().add(1, 'day').format() : end;
|
||||
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i);
|
||||
const options = this.getNodeParameter('additionalFields', i);
|
||||
|
||||
if (options.useWorkflowTimezone) {
|
||||
start = moment(start).tz(workflowTimezone).format();
|
||||
end = moment(end).tz(workflowTimezone).format();
|
||||
delete options.useWorkflowTimezone;
|
||||
}
|
||||
|
||||
let fileName = 'event.ics';
|
||||
|
||||
const eventStart = moment(start)
|
||||
.toArray()
|
||||
.splice(0, allDay ? 3 : 6) as ics.DateArray;
|
||||
eventStart[1]++;
|
||||
|
||||
const eventEnd = moment(end)
|
||||
.toArray()
|
||||
.splice(0, allDay ? 3 : 6) as ics.DateArray;
|
||||
eventEnd[1]++;
|
||||
|
||||
if (options.fileName) {
|
||||
fileName = options.fileName as string;
|
||||
}
|
||||
|
||||
const data: ics.EventAttributes = {
|
||||
title,
|
||||
start: eventStart,
|
||||
end: eventEnd,
|
||||
startInputType: 'utc',
|
||||
endInputType: 'utc',
|
||||
};
|
||||
|
||||
if (options.geolocationUi) {
|
||||
data.geo = (options.geolocationUi as IDataObject).geolocationValues as ics.GeoCoordinates;
|
||||
delete options.geolocationUi;
|
||||
}
|
||||
|
||||
if (options.organizerUi) {
|
||||
data.organizer = (options.organizerUi as IDataObject).organizerValues as ics.Person;
|
||||
delete options.organizerUi;
|
||||
}
|
||||
|
||||
if (options.attendeesUi) {
|
||||
data.attendees = (options.attendeesUi as IDataObject).attendeeValues as ics.Attendee[];
|
||||
delete options.attendeesUi;
|
||||
}
|
||||
|
||||
Object.assign(data, options);
|
||||
const buffer = Buffer.from((await createEvent(data)) as string);
|
||||
const binaryData = await this.helpers.prepareBinaryData(buffer, fileName, 'text/calendar');
|
||||
returnData.push({
|
||||
json: {},
|
||||
binary: {
|
||||
[binaryPropertyName]: binaryData,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
const errorDescription = error.description;
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error, {
|
||||
itemIndex: i,
|
||||
description: errorDescription,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
|
@ -59,7 +59,7 @@ export const issueAttachmentFields: INodeProperties[] = [
|
|||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['issueAttachment'],
|
||||
|
@ -69,7 +69,7 @@ export const issueAttachmentFields: INodeProperties[] = [
|
|||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
description: 'Object property name which holds binary data',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
required: true,
|
||||
},
|
||||
|
||||
|
@ -104,7 +104,7 @@ export const issueAttachmentFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryProperty',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -115,7 +115,7 @@ export const issueAttachmentFields: INodeProperties[] = [
|
|||
download: [true],
|
||||
},
|
||||
},
|
||||
description: 'Object property name which holds binary data',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
required: true,
|
||||
},
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
@ -179,7 +179,7 @@ export const issueAttachmentFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryProperty',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -190,7 +190,7 @@ export const issueAttachmentFields: INodeProperties[] = [
|
|||
download: [true],
|
||||
},
|
||||
},
|
||||
description: 'Object property name which holds binary data',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
required: true,
|
||||
},
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
|
|
@ -40,7 +40,7 @@ export const fileFields: INodeProperties[] = [
|
|||
/* file:upload */
|
||||
/* -------------------------------------------------------------------------- */
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -53,7 +53,7 @@ export const fileFields: INodeProperties[] = [
|
|||
description: 'Whether the data to upload should be taken from binary field',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -65,7 +65,7 @@ export const fileFields: INodeProperties[] = [
|
|||
binaryData: [true],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property which contains the data for the file to be uploaded',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
{
|
||||
displayName: 'File Association',
|
||||
|
|
|
@ -121,7 +121,7 @@ export const fileFields: INodeProperties[] = [
|
|||
},
|
||||
options: [
|
||||
{
|
||||
name: 'Binary Data',
|
||||
name: 'Binary File',
|
||||
value: 'binary',
|
||||
},
|
||||
{
|
||||
|
|
|
@ -68,7 +68,7 @@ export const notificationFields: INodeProperties[] = [
|
|||
displayName: 'Image',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -98,7 +98,7 @@ export const notificationFields: INodeProperties[] = [
|
|||
description: 'HTTP/HTTPS URL. Maximum size of 240×240px JPEG.',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryProperty',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
|
@ -107,7 +107,7 @@ export const notificationFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
default: 'data',
|
||||
description: 'Name of the property that holds the binary data',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
|
@ -121,7 +121,7 @@ export const postFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['create'],
|
||||
|
@ -132,7 +132,7 @@ export const postFields: INodeProperties[] = [
|
|||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
description: 'Object property name which holds binary data',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
|
@ -173,11 +173,11 @@ export const postFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Thumbnail Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'thumbnailBinaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
description: 'Object property name which holds binary data for the article thumbnail',
|
||||
hint: 'The name of the input binary field containing the file for the article thumbnail',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/shareMediaCategory': ['ARTICLE'],
|
||||
|
|
|
@ -46,11 +46,12 @@ export const mediaFields: INodeProperties[] = [
|
|||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['media'],
|
||||
|
|
|
@ -27,7 +27,7 @@ export const versionDescription: INodeTypeDescription = {
|
|||
properties: [
|
||||
{
|
||||
displayName:
|
||||
'This node connects to the Microsoft 365 cloud platform. Use the \'Spreadsheet File\' node to directly manipulate spreadsheet files (.xls, .csv, etc). <a href="/templates/890" target="_blank">More info</a>.',
|
||||
'This node connects to the Microsoft 365 cloud platform. Use the \'Extract From File\' and \'Convert to File\' nodes to directly manipulate spreadsheet files (.xls, .csv, etc). <a href="/templates/890" target="_blank">More info</a>.',
|
||||
name: 'notice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
|
|
|
@ -209,7 +209,7 @@ export const fileFields: INodeProperties[] = [
|
|||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -220,7 +220,7 @@ export const fileFields: INodeProperties[] = [
|
|||
resource: ['file'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
/* -------------------------------------------------------------------------- */
|
||||
/* file:get */
|
||||
|
@ -380,7 +380,7 @@ export const fileFields: INodeProperties[] = [
|
|||
description: 'ID of the parent folder that will contain the file',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -410,7 +410,7 @@ export const fileFields: INodeProperties[] = [
|
|||
description: 'The text content of the file',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -423,6 +423,6 @@ export const fileFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description: 'Name of the binary property which contains the data for the file',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
},
|
||||
];
|
||||
|
|
|
@ -106,9 +106,9 @@ export const messageAttachmentFields: INodeProperties[] = [
|
|||
|
||||
// File operations
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: 'data',
|
||||
|
|
|
@ -559,9 +559,9 @@ export const messageFields: INodeProperties[] = [
|
|||
|
||||
// File operations
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: 'data',
|
||||
|
|
|
@ -156,7 +156,7 @@ export class Mindee implements INodeType {
|
|||
default: 'predict',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -167,8 +167,7 @@ export class Mindee implements INodeType {
|
|||
resource: ['receipt', 'invoice'],
|
||||
},
|
||||
},
|
||||
description:
|
||||
'Name of the binary property which containsthe data for the file to be uploaded',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
{
|
||||
displayName: 'RAW Data',
|
||||
|
|
|
@ -42,6 +42,7 @@ encodeDecodeOptions.sort((a, b) => {
|
|||
|
||||
export class MoveBinaryData implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
hidden: true,
|
||||
displayName: 'Convert to/from binary data',
|
||||
name: 'moveBinaryData',
|
||||
icon: 'fa:exchange-alt',
|
||||
|
@ -179,6 +180,20 @@ export class MoveBinaryData implements INodeType {
|
|||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Add Byte Order Mark (BOM)',
|
||||
name: 'addBOM',
|
||||
description:
|
||||
'Whether to add special marker at the start of your text file. This marker helps some programs understand how to read the file correctly.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/mode': ['jsonToBinary'],
|
||||
encoding: bomAware,
|
||||
},
|
||||
},
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
displayName: 'Data Is Base64',
|
||||
name: 'dataIsBase64',
|
||||
|
@ -206,7 +221,7 @@ export class MoveBinaryData implements INodeType {
|
|||
},
|
||||
},
|
||||
default: 'utf8',
|
||||
description: 'Set the encoding of the data stream',
|
||||
description: 'Choose the character set to use to encode the data',
|
||||
},
|
||||
{
|
||||
displayName: 'Strip BOM',
|
||||
|
@ -220,18 +235,6 @@ export class MoveBinaryData implements INodeType {
|
|||
type: 'boolean',
|
||||
default: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Add BOM',
|
||||
name: 'addBOM',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/mode': ['jsonToBinary'],
|
||||
encoding: bomAware,
|
||||
},
|
||||
},
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
|
|
|
@ -570,7 +570,7 @@ export class Nasa implements INodeType {
|
|||
'By default just the URL of the image is returned. When set to true the image will be downloaded.',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -582,7 +582,7 @@ export class Nasa implements INodeType {
|
|||
download: [true],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write to',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
|
||||
/* date for astronomyPictureOfTheDay */
|
||||
|
@ -766,7 +766,7 @@ export class Nasa implements INodeType {
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -777,7 +777,7 @@ export class Nasa implements INodeType {
|
|||
resource: ['earthImagery'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write to',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
|
||||
//aqui
|
||||
|
|
|
@ -350,7 +350,7 @@ export class NextCloud implements INodeType {
|
|||
'The file path of the file to download. Has to contain the full path. The path should start with "/".',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -361,7 +361,7 @@ export class NextCloud implements INodeType {
|
|||
resource: ['file'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
|
@ -384,7 +384,7 @@ export class NextCloud implements INodeType {
|
|||
'The absolute file path of the file to upload. Has to contain the full path. The parent folder has to exist. Existing files get overwritten.',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryDataUpload',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -412,7 +412,7 @@ export class NextCloud implements INodeType {
|
|||
description: 'The text content of the file to upload',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -425,8 +425,7 @@ export class NextCloud implements INodeType {
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property which contains the data for the file to be uploaded',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
|
|
|
@ -495,7 +495,7 @@ export const operationFields: INodeProperties[] = [
|
|||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Is Binary Data',
|
||||
displayName: 'Is Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
|
|
@ -137,7 +137,7 @@ const createOperations: INodeProperties[] = [
|
|||
},
|
||||
options: [
|
||||
{
|
||||
name: 'Binary Data',
|
||||
name: 'Binary File',
|
||||
value: 'binaryData',
|
||||
},
|
||||
{
|
||||
|
|
|
@ -1890,7 +1890,7 @@ export class Pipedrive implements INodeType {
|
|||
// file:create
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -1902,8 +1902,7 @@ export class Pipedrive implements INodeType {
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property which contains the data for the file to be created',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
},
|
||||
{
|
||||
displayName: 'Additional Fields',
|
||||
|
@ -1996,7 +1995,7 @@ export class Pipedrive implements INodeType {
|
|||
description: 'ID of the file to download',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -2007,8 +2006,7 @@ export class Pipedrive implements INodeType {
|
|||
resource: ['file'],
|
||||
},
|
||||
},
|
||||
description:
|
||||
'Name of the binary property to which to write the data of the downloaded file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
|
|
|
@ -156,7 +156,7 @@ export class Pushbullet implements INodeType {
|
|||
description: 'URL of the push',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -169,8 +169,7 @@ export class Pushbullet implements INodeType {
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property which contains the data for the file to be created',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
},
|
||||
{
|
||||
displayName: 'Target',
|
||||
|
|
|
@ -196,13 +196,12 @@ export class Pushover implements INodeType {
|
|||
displayName: 'Attachment Property',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'data',
|
||||
description:
|
||||
'Name of the binary properties which contain data which should be added to email as attachment',
|
||||
hint: 'The name of the input binary field containing the file which should be added to email as attachment',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
|
@ -90,7 +90,7 @@ export const fileFields: INodeProperties[] = [
|
|||
description: 'The file attachment version number',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['file'],
|
||||
|
@ -100,7 +100,7 @@ export const fileFields: INodeProperties[] = [
|
|||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
description: 'Object property name which holds binary data',
|
||||
hint: 'The name of the input binary field containing the file to be written',
|
||||
required: true,
|
||||
},
|
||||
];
|
||||
|
|
|
@ -210,12 +210,12 @@ export const estimateFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryProperty',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: 'data',
|
||||
description: 'Name of the binary property to which to write to',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['estimate'],
|
||||
|
|
|
@ -215,12 +215,12 @@ export const invoiceFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryProperty',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: 'data',
|
||||
description: 'Name of the binary property to which to write to',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['invoice'],
|
||||
|
|
|
@ -154,12 +154,12 @@ export const paymentFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryProperty',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: 'data',
|
||||
description: 'Name of the binary property to which to write to',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['payment'],
|
||||
|
|
|
@ -10,6 +10,7 @@ import { generatePairedItemData } from '../../utils/utilities';
|
|||
|
||||
export class ReadBinaryFiles implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
hidden: true,
|
||||
displayName: 'Read Binary Files',
|
||||
name: 'readBinaryFiles',
|
||||
icon: 'fa:file-import',
|
||||
|
|
|
@ -1,35 +1,15 @@
|
|||
import {
|
||||
BINARY_ENCODING,
|
||||
NodeOperationError,
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { getDocument as readPDF, version as pdfJsVersion } from 'pdfjs-dist';
|
||||
|
||||
type Document = Awaited<ReturnType<Awaited<typeof readPDF>>['promise']>;
|
||||
type Page = Awaited<ReturnType<Awaited<Document['getPage']>>>;
|
||||
type TextContent = Awaited<ReturnType<Page['getTextContent']>>;
|
||||
|
||||
const parseText = (textContent: TextContent) => {
|
||||
let lastY = undefined;
|
||||
const text = [];
|
||||
for (const item of textContent.items) {
|
||||
if ('str' in item) {
|
||||
if (lastY == item.transform[5] || !lastY) {
|
||||
text.push(item.str);
|
||||
} else {
|
||||
text.push(`\n${item.str}`);
|
||||
}
|
||||
lastY = item.transform[5];
|
||||
}
|
||||
}
|
||||
return text.join('');
|
||||
};
|
||||
import { extractDataFromPDF } from '@utils/binary';
|
||||
|
||||
export class ReadPDF implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
hidden: true,
|
||||
displayName: 'Read PDF',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'readPDF',
|
||||
|
@ -45,7 +25,7 @@ export class ReadPDF implements INodeType {
|
|||
outputs: ['main'],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -84,46 +64,26 @@ export class ReadPDF implements INodeType {
|
|||
for (let itemIndex = 0; itemIndex < length; itemIndex++) {
|
||||
try {
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', itemIndex);
|
||||
const binaryData = this.helpers.assertBinaryData(itemIndex, binaryPropertyName);
|
||||
|
||||
const params: { password?: string; url?: URL; data?: ArrayBuffer } = {};
|
||||
|
||||
let password;
|
||||
if (this.getNodeParameter('encrypted', itemIndex) === true) {
|
||||
params.password = this.getNodeParameter('password', itemIndex) as string;
|
||||
password = this.getNodeParameter('password', itemIndex) as string;
|
||||
}
|
||||
|
||||
if (binaryData.id) {
|
||||
const binaryPath = this.helpers.getBinaryPath(binaryData.id);
|
||||
params.url = new URL(`file://${binaryPath}`);
|
||||
} else {
|
||||
params.data = Buffer.from(binaryData.data, BINARY_ENCODING).buffer;
|
||||
}
|
||||
|
||||
const document = await readPDF(params).promise;
|
||||
const { info, metadata } = await document
|
||||
.getMetadata()
|
||||
.catch(() => ({ info: null, metadata: null }));
|
||||
|
||||
const pages = [];
|
||||
for (let i = 1; i <= document.numPages; i++) {
|
||||
const page = await document.getPage(i);
|
||||
const text = await page.getTextContent().then(parseText);
|
||||
pages.push(text);
|
||||
}
|
||||
const json = await extractDataFromPDF.call(
|
||||
this,
|
||||
binaryPropertyName,
|
||||
password,
|
||||
undefined,
|
||||
undefined,
|
||||
itemIndex,
|
||||
);
|
||||
|
||||
returnData.push({
|
||||
binary: items[itemIndex].binary,
|
||||
json: {
|
||||
numpages: document.numPages,
|
||||
numrender: document.numPages,
|
||||
info,
|
||||
metadata: metadata?.getAll(),
|
||||
text: pages.join('\n\n'),
|
||||
version: pdfJsVersion,
|
||||
},
|
||||
json,
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
|
@ -135,7 +95,7 @@ export class ReadPDF implements INodeType {
|
|||
});
|
||||
continue;
|
||||
}
|
||||
throw error;
|
||||
throw new NodeOperationError(this.getNode(), error, { itemIndex });
|
||||
}
|
||||
}
|
||||
return [returnData];
|
||||
|
|
|
@ -86,7 +86,7 @@ export const attachmentFields: INodeProperties[] = [
|
|||
'Required. Name of the attached file. Maximum size is 255 characters. Label is File Name.',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -98,7 +98,7 @@ export const attachmentFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description: 'Name of the binary property which contains the data for the file to be uploaded',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
{
|
||||
displayName: 'Additional Fields',
|
||||
|
@ -173,13 +173,12 @@ export const attachmentFields: INodeProperties[] = [
|
|||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property which contains the data for the file to be uploaded',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
{
|
||||
displayName: 'Description',
|
||||
|
|
|
@ -42,7 +42,7 @@ export const documentFields: INodeProperties[] = [
|
|||
description: 'Name of the file',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -54,7 +54,7 @@ export const documentFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description: 'Name of the binary property which contains the data for the file to be uploaded',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
{
|
||||
displayName: 'Additional Fields',
|
||||
|
|
|
@ -299,7 +299,7 @@ export const reportFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
required: true,
|
||||
|
@ -310,6 +310,6 @@ export const reportFields: INodeProperties[] = [
|
|||
operation: ['download'],
|
||||
},
|
||||
},
|
||||
description: 'Name of the binary property to which to write the data of the read file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
];
|
||||
|
|
|
@ -121,7 +121,7 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"alias": ["Set", "JSON", "Filter", "Transform", "Map"],
|
||||
"alias": ["Set", "JS", "JSON", "Filter", "Transform", "Map"],
|
||||
"subcategories": {
|
||||
"Core Nodes": ["Data Transformation"]
|
||||
}
|
||||
|
|
|
@ -157,7 +157,7 @@ const versionDescription: INodeTypeDescription = {
|
|||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Include Binary Data',
|
||||
displayName: 'Include Binary File',
|
||||
name: 'includeBinary',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
|
|
|
@ -40,7 +40,7 @@ export const fileFields: INodeProperties[] = [
|
|||
/* file:upload */
|
||||
/* -------------------------------------------------------------------------- */
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -68,7 +68,7 @@ export const fileFields: INodeProperties[] = [
|
|||
description: 'The text content of the file to upload',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -81,7 +81,7 @@ export const fileFields: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description: 'Name of the binary property which contains the data for the file to be uploaded',
|
||||
hint: 'The name of the input binary field containing the file to be uploaded',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
|
|
|
@ -39,7 +39,7 @@ export const fileFields: INodeProperties[] = [
|
|||
/* file:upload */
|
||||
/* -------------------------------------------------------------------------- */
|
||||
{
|
||||
displayName: 'Binary Data',
|
||||
displayName: 'Binary File',
|
||||
name: 'binaryData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
|
@ -66,7 +66,7 @@ export const fileFields: INodeProperties[] = [
|
|||
placeholder: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'File Property',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
|
|
@ -7,6 +7,7 @@ import { SpreadsheetFileV2 } from './v2/SpreadsheetFileV2.node';
|
|||
export class SpreadsheetFile extends VersionedNodeType {
|
||||
constructor() {
|
||||
const baseDescription: INodeTypeBaseDescription = {
|
||||
hidden: true,
|
||||
displayName: 'Spreadsheet File',
|
||||
name: 'spreadsheetFile',
|
||||
icon: 'fa:table',
|
||||
|
|
|
@ -1,97 +1,41 @@
|
|||
import type { INodeProperties } from 'n8n-workflow';
|
||||
|
||||
export const operationProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'Read From File',
|
||||
value: 'fromFile',
|
||||
description: 'Reads data from a spreadsheet file',
|
||||
action: 'Read data from a spreadsheet file',
|
||||
},
|
||||
{
|
||||
name: 'Write to File',
|
||||
value: 'toFile',
|
||||
description: 'Writes the workflow data to a spreadsheet file',
|
||||
action: 'Write data to a spreadsheet file',
|
||||
},
|
||||
],
|
||||
default: 'fromFile',
|
||||
},
|
||||
];
|
||||
|
||||
export const fromFileProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['fromFile'],
|
||||
},
|
||||
export const operationProperty: INodeProperties = {
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'Read From File',
|
||||
value: 'fromFile',
|
||||
description: 'Reads data from a spreadsheet file',
|
||||
action: 'Read data from a spreadsheet file',
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property from which to read the binary data of the spreadsheet file',
|
||||
},
|
||||
];
|
||||
|
||||
export const fromFileV2Properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'File Format',
|
||||
name: 'fileFormat',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Autodetect',
|
||||
value: 'autodetect',
|
||||
},
|
||||
{
|
||||
name: 'CSV',
|
||||
value: 'csv',
|
||||
description: 'Comma-separated values',
|
||||
},
|
||||
{
|
||||
name: 'HTML',
|
||||
value: 'html',
|
||||
description: 'HTML Table',
|
||||
},
|
||||
{
|
||||
name: 'ODS',
|
||||
value: 'ods',
|
||||
description: 'OpenDocument Spreadsheet',
|
||||
},
|
||||
{
|
||||
name: 'RTF',
|
||||
value: 'rtf',
|
||||
description: 'Rich Text Format',
|
||||
},
|
||||
{
|
||||
name: 'XLS',
|
||||
value: 'xls',
|
||||
description: 'Excel',
|
||||
},
|
||||
{
|
||||
name: 'XLSX',
|
||||
value: 'xlsx',
|
||||
description: 'Excel',
|
||||
},
|
||||
],
|
||||
default: 'autodetect',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['fromFile'],
|
||||
},
|
||||
{
|
||||
name: 'Write to File',
|
||||
value: 'toFile',
|
||||
description: 'Writes the workflow data to a spreadsheet file',
|
||||
action: 'Write data to a spreadsheet file',
|
||||
},
|
||||
],
|
||||
default: 'fromFile',
|
||||
};
|
||||
|
||||
export const binaryProperty: INodeProperties = {
|
||||
displayName: 'Input Binary Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: '',
|
||||
hint: 'The name of the input field containing the file data to be processed',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['fromFile'],
|
||||
},
|
||||
description: 'The format of the binary data to read from',
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
export const toFileProperties: INodeProperties[] = [
|
||||
{
|
||||
|
@ -139,7 +83,7 @@ export const toFileProperties: INodeProperties[] = [
|
|||
description: 'The format of the file to save the data as',
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Property',
|
||||
displayName: 'Put Output File in Field',
|
||||
name: 'binaryPropertyName',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
|
@ -150,189 +94,176 @@ export const toFileProperties: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
placeholder: '',
|
||||
description:
|
||||
'Name of the binary property in which to save the binary data of the spreadsheet file',
|
||||
hint: 'The name of the output binary field to put the file in',
|
||||
},
|
||||
];
|
||||
|
||||
export const optionsProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Compression',
|
||||
name: 'compression',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['toFile'],
|
||||
'/fileFormat': ['xlsx', 'ods'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description: 'Whether compression will be applied or not',
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['toFile'],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
description:
|
||||
'File name to set in binary data. By default will "spreadsheet.<fileFormat>" be used.',
|
||||
},
|
||||
{
|
||||
displayName: 'Header Row',
|
||||
name: 'headerRow',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile', 'toFile'],
|
||||
},
|
||||
},
|
||||
default: true,
|
||||
description: 'Whether the first row of the file contains the header names',
|
||||
},
|
||||
{
|
||||
displayName: 'Delimiter',
|
||||
name: 'delimiter',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: ',',
|
||||
description: 'Set the field delimiter',
|
||||
},
|
||||
{
|
||||
displayName: 'Starting Line',
|
||||
name: 'fromLine',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: 0,
|
||||
description: 'Start handling records from the requested line number',
|
||||
},
|
||||
{
|
||||
displayName: 'Max Number of Rows to Load',
|
||||
name: 'maxRowCount',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: -1,
|
||||
description: 'Stop handling records after the requested number of rows are read',
|
||||
},
|
||||
{
|
||||
displayName: 'Exclude Byte Order Mark (BOM)',
|
||||
name: 'enableBOM',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description:
|
||||
'Whether to detect and exclude the byte-order-mark from the CSV Input if present',
|
||||
},
|
||||
{
|
||||
displayName: 'Include Empty Cells',
|
||||
name: 'includeEmptyCells',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
// eslint-disable-next-line n8n-nodes-base/node-param-description-boolean-without-whether
|
||||
description:
|
||||
'When reading from file the empty cells will be filled with an empty string in the JSON',
|
||||
},
|
||||
{
|
||||
displayName: 'RAW Data',
|
||||
name: 'rawData',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description: 'Whether the data should be returned RAW instead of parsed',
|
||||
},
|
||||
{
|
||||
displayName: 'Read As String',
|
||||
name: 'readAsString',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
// eslint-disable-next-line n8n-nodes-base/node-param-description-boolean-without-whether
|
||||
description:
|
||||
'In some cases and file formats, it is necessary to read specifically as string else some special character get interpreted wrong',
|
||||
},
|
||||
{
|
||||
displayName: 'Range',
|
||||
name: 'range',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
description:
|
||||
'The range to read from the table. If set to a number it will be the starting row. If set to string it will be used as A1-style bounded range.',
|
||||
},
|
||||
{
|
||||
displayName: 'Sheet Name',
|
||||
name: 'sheetName',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['fromFile'],
|
||||
},
|
||||
},
|
||||
default: 'Sheet',
|
||||
description:
|
||||
'Name of the sheet to read from in the spreadsheet (if supported). If not set, the first one gets chosen.',
|
||||
},
|
||||
{
|
||||
displayName: 'Sheet Name',
|
||||
name: 'sheetName',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': ['toFile'],
|
||||
'/fileFormat': ['ods', 'xls', 'xlsx'],
|
||||
},
|
||||
},
|
||||
default: 'Sheet',
|
||||
description: 'Name of the sheet to create in the spreadsheet',
|
||||
},
|
||||
],
|
||||
export const toFileOptions: INodeProperties = {
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['toFile'],
|
||||
},
|
||||
},
|
||||
];
|
||||
options: [
|
||||
{
|
||||
displayName: 'Compression',
|
||||
name: 'compression',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['xlsx', 'ods'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description: 'Whether compression will be applied or not',
|
||||
},
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'File name to set in binary data. By default will "spreadsheet.<fileFormat>" be used.',
|
||||
},
|
||||
{
|
||||
displayName: 'Header Row',
|
||||
name: 'headerRow',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description: 'Whether the first row of the file contains the header names',
|
||||
},
|
||||
{
|
||||
displayName: 'Sheet Name',
|
||||
name: 'sheetName',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['ods', 'xls', 'xlsx'],
|
||||
},
|
||||
},
|
||||
default: 'Sheet',
|
||||
description: 'Name of the sheet to create in the spreadsheet',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const fromFileOptions: INodeProperties = {
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['fromFile'],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Delimiter',
|
||||
name: 'delimiter',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: ',',
|
||||
placeholder: 'e.g. ,',
|
||||
description: 'Set the field delimiter, usually a comma',
|
||||
},
|
||||
{
|
||||
displayName: 'Exclude Byte Order Mark (BOM)',
|
||||
name: 'enableBOM',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description:
|
||||
'Whether to detect and exclude the byte-order-mark from the CSV Input if present',
|
||||
},
|
||||
{
|
||||
displayName: 'Header Row',
|
||||
name: 'headerRow',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description: 'Whether the first row of the file contains the header names',
|
||||
},
|
||||
{
|
||||
displayName: 'Include Empty Cells',
|
||||
name: 'includeEmptyCells',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description:
|
||||
'Whether to include empty cells when reading from file. They will be filled with an empty string.',
|
||||
},
|
||||
{
|
||||
displayName: 'Max Number of Rows to Load',
|
||||
name: 'maxRowCount',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: -1,
|
||||
placeholder: 'e.g. 10',
|
||||
description:
|
||||
'Stop handling records after the requested number of rows are read. Use -1 if you want to load all rows.',
|
||||
},
|
||||
{
|
||||
displayName: 'Range',
|
||||
name: 'range',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'The range to read from the table. If set to a number it will be the starting row. If set to string it will be used as A1-style notation range.',
|
||||
},
|
||||
{
|
||||
displayName: 'RAW Data',
|
||||
name: 'rawData',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Whether to return RAW data, instead of parsing it',
|
||||
},
|
||||
{
|
||||
displayName: 'Read As String',
|
||||
name: 'readAsString',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
// eslint-disable-next-line n8n-nodes-base/node-param-description-boolean-without-whether
|
||||
description:
|
||||
'In some cases and file formats, it is necessary to read as string to ensure special characters are interpreted correctly',
|
||||
},
|
||||
{
|
||||
displayName: 'Sheet Name',
|
||||
name: 'sheetName',
|
||||
type: 'string',
|
||||
default: 'Sheet',
|
||||
placeholder: 'e.g. mySheet',
|
||||
description:
|
||||
'Name of the sheet to read from in the spreadsheet (if supported). If not set, the first one will be chosen.',
|
||||
},
|
||||
{
|
||||
displayName: 'Starting Line',
|
||||
name: 'fromLine',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/fileFormat': ['csv'],
|
||||
},
|
||||
},
|
||||
default: 0,
|
||||
placeholder: 'e.g. 0',
|
||||
description: 'Start handling records from the requested line number. Starts at 0.',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
|
@ -23,10 +23,11 @@ import {
|
|||
} from 'xlsx';
|
||||
|
||||
import {
|
||||
operationProperties,
|
||||
fromFileProperties,
|
||||
operationProperty,
|
||||
binaryProperty,
|
||||
toFileProperties,
|
||||
optionsProperties,
|
||||
fromFileOptions,
|
||||
toFileOptions,
|
||||
} from '../description';
|
||||
import { flattenObject, generatePairedItemData } from '@utils/utilities';
|
||||
import { oldVersionNotice } from '@utils/descriptions';
|
||||
|
@ -46,10 +47,11 @@ export class SpreadsheetFileV1 implements INodeType {
|
|||
outputs: ['main'],
|
||||
properties: [
|
||||
oldVersionNotice,
|
||||
...operationProperties,
|
||||
...fromFileProperties,
|
||||
operationProperty,
|
||||
binaryProperty,
|
||||
...toFileProperties,
|
||||
...optionsProperties,
|
||||
fromFileOptions,
|
||||
toFileOptions,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,37 +1,14 @@
|
|||
import type {
|
||||
IDataObject,
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeBaseDescription,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
import { BINARY_ENCODING, NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
import type {
|
||||
JSON2SheetOpts,
|
||||
Sheet2JSONOpts,
|
||||
WorkBook,
|
||||
WritingOptions,
|
||||
ParsingOptions,
|
||||
} from 'xlsx';
|
||||
|
||||
import {
|
||||
read as xlsxRead,
|
||||
readFile as xlsxReadFile,
|
||||
utils as xlsxUtils,
|
||||
write as xlsxWrite,
|
||||
} from 'xlsx';
|
||||
import { parse as createCSVParser } from 'csv-parse';
|
||||
|
||||
import {
|
||||
operationProperties,
|
||||
fromFileProperties,
|
||||
toFileProperties,
|
||||
optionsProperties,
|
||||
fromFileV2Properties,
|
||||
} from '../description';
|
||||
import { flattenObject, generatePairedItemData } from '@utils/utilities';
|
||||
import { operationProperty } from '../description';
|
||||
import * as fromFile from './fromFile.operation';
|
||||
import * as toFile from './toFile.operation';
|
||||
|
||||
export class SpreadsheetFileV2 implements INodeType {
|
||||
description: INodeTypeDescription;
|
||||
|
@ -46,271 +23,23 @@ export class SpreadsheetFileV2 implements INodeType {
|
|||
},
|
||||
inputs: ['main'],
|
||||
outputs: ['main'],
|
||||
properties: [
|
||||
...operationProperties,
|
||||
...fromFileProperties,
|
||||
...fromFileV2Properties,
|
||||
...toFileProperties,
|
||||
...optionsProperties,
|
||||
],
|
||||
properties: [operationProperty, ...fromFile.description, ...toFile.description],
|
||||
};
|
||||
}
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
async execute(this: IExecuteFunctions) {
|
||||
const items = this.getInputData();
|
||||
|
||||
const operation = this.getNodeParameter('operation', 0);
|
||||
|
||||
const newItems: INodeExecutionData[] = [];
|
||||
let returnData: INodeExecutionData[] = [];
|
||||
|
||||
if (operation === 'fromFile') {
|
||||
// Read data from spreadsheet file to workflow
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
try {
|
||||
const options = this.getNodeParameter('options', i, {});
|
||||
let fileFormat = this.getNodeParameter('fileFormat', i, {});
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i);
|
||||
const binaryData = this.helpers.assertBinaryData(i, binaryPropertyName);
|
||||
|
||||
let rows: unknown[] = [];
|
||||
|
||||
if (
|
||||
fileFormat === 'autodetect' &&
|
||||
(binaryData.mimeType === 'text/csv' ||
|
||||
(binaryData.mimeType === 'text/plain' && binaryData.fileExtension === 'csv'))
|
||||
) {
|
||||
fileFormat = 'csv';
|
||||
}
|
||||
|
||||
if (fileFormat === 'csv') {
|
||||
const maxRowCount = options.maxRowCount as number;
|
||||
const parser = createCSVParser({
|
||||
delimiter: options.delimiter as string,
|
||||
fromLine: options.fromLine as number,
|
||||
bom: options.enableBOM as boolean,
|
||||
to: maxRowCount > -1 ? maxRowCount : undefined,
|
||||
columns: options.headerRow !== false,
|
||||
onRecord: (record) => {
|
||||
if (!options.includeEmptyCells) {
|
||||
record = Object.fromEntries(
|
||||
Object.entries(record).filter(([_key, value]) => value !== ''),
|
||||
);
|
||||
}
|
||||
rows.push(record);
|
||||
},
|
||||
});
|
||||
if (binaryData.id) {
|
||||
const stream = await this.helpers.getBinaryStream(binaryData.id);
|
||||
await new Promise<void>(async (resolve, reject) => {
|
||||
parser.on('error', reject);
|
||||
parser.on('readable', () => {
|
||||
stream.unpipe(parser);
|
||||
stream.destroy();
|
||||
resolve();
|
||||
});
|
||||
stream.pipe(parser);
|
||||
});
|
||||
} else {
|
||||
parser.write(binaryData.data, BINARY_ENCODING);
|
||||
parser.end();
|
||||
}
|
||||
} else {
|
||||
let workbook: WorkBook;
|
||||
const xlsxOptions: ParsingOptions = { raw: options.rawData as boolean };
|
||||
if (options.readAsString) xlsxOptions.type = 'string';
|
||||
|
||||
if (binaryData.id) {
|
||||
const binaryPath = this.helpers.getBinaryPath(binaryData.id);
|
||||
workbook = xlsxReadFile(binaryPath, xlsxOptions);
|
||||
} else {
|
||||
const binaryDataBuffer = Buffer.from(binaryData.data, BINARY_ENCODING);
|
||||
workbook = xlsxRead(
|
||||
options.readAsString ? binaryDataBuffer.toString() : binaryDataBuffer,
|
||||
xlsxOptions,
|
||||
);
|
||||
}
|
||||
|
||||
if (workbook.SheetNames.length === 0) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'Spreadsheet does not have any sheets!',
|
||||
{
|
||||
itemIndex: i,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let sheetName = workbook.SheetNames[0];
|
||||
if (options.sheetName) {
|
||||
if (!workbook.SheetNames.includes(options.sheetName as string)) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
`Spreadsheet does not contain sheet called "${options.sheetName}"!`,
|
||||
{ itemIndex: i },
|
||||
);
|
||||
}
|
||||
sheetName = options.sheetName as string;
|
||||
}
|
||||
|
||||
// Convert it to json
|
||||
const sheetToJsonOptions: Sheet2JSONOpts = {};
|
||||
if (options.range) {
|
||||
if (isNaN(options.range as number)) {
|
||||
sheetToJsonOptions.range = options.range;
|
||||
} else {
|
||||
sheetToJsonOptions.range = parseInt(options.range as string, 10);
|
||||
}
|
||||
}
|
||||
|
||||
if (options.includeEmptyCells) {
|
||||
sheetToJsonOptions.defval = '';
|
||||
}
|
||||
|
||||
if (options.headerRow === false) {
|
||||
sheetToJsonOptions.header = 1; // Consider the first row as a data row
|
||||
}
|
||||
|
||||
rows = xlsxUtils.sheet_to_json(workbook.Sheets[sheetName], sheetToJsonOptions);
|
||||
|
||||
// Check if data could be found in file
|
||||
if (rows.length === 0) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Add all the found data columns to the workflow data
|
||||
if (options.headerRow === false) {
|
||||
// Data was returned as an array - https://github.com/SheetJS/sheetjs#json
|
||||
for (const rowData of rows) {
|
||||
newItems.push({
|
||||
json: {
|
||||
row: rowData,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
} as INodeExecutionData);
|
||||
}
|
||||
} else {
|
||||
for (const rowData of rows) {
|
||||
newItems.push({
|
||||
json: rowData,
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
} as INodeExecutionData);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
newItems.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error, { itemIndex: i });
|
||||
}
|
||||
}
|
||||
|
||||
return [newItems];
|
||||
} else if (operation === 'toFile') {
|
||||
const pairedItem = generatePairedItemData(items.length);
|
||||
try {
|
||||
// Write the workflow data to spreadsheet file
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', 0);
|
||||
const fileFormat = this.getNodeParameter('fileFormat', 0) as string;
|
||||
const options = this.getNodeParameter('options', 0, {});
|
||||
const sheetToJsonOptions: JSON2SheetOpts = {};
|
||||
if (options.headerRow === false) {
|
||||
sheetToJsonOptions.skipHeader = true;
|
||||
}
|
||||
// Get the json data of the items and flatten it
|
||||
let item: INodeExecutionData;
|
||||
const itemData: IDataObject[] = [];
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
item = items[itemIndex];
|
||||
itemData.push(flattenObject(item.json));
|
||||
}
|
||||
|
||||
const ws = xlsxUtils.json_to_sheet(itemData, sheetToJsonOptions);
|
||||
|
||||
const wopts: WritingOptions = {
|
||||
bookSST: false,
|
||||
type: 'buffer',
|
||||
};
|
||||
|
||||
if (fileFormat === 'csv') {
|
||||
wopts.bookType = 'csv';
|
||||
} else if (fileFormat === 'html') {
|
||||
wopts.bookType = 'html';
|
||||
} else if (fileFormat === 'rtf') {
|
||||
wopts.bookType = 'rtf';
|
||||
} else if (fileFormat === 'ods') {
|
||||
wopts.bookType = 'ods';
|
||||
if (options.compression) {
|
||||
wopts.compression = true;
|
||||
}
|
||||
} else if (fileFormat === 'xls') {
|
||||
wopts.bookType = 'xls';
|
||||
} else if (fileFormat === 'xlsx') {
|
||||
wopts.bookType = 'xlsx';
|
||||
if (options.compression) {
|
||||
wopts.compression = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the data in the correct format
|
||||
const sheetName = (options.sheetName as string) || 'Sheet';
|
||||
const wb: WorkBook = {
|
||||
SheetNames: [sheetName],
|
||||
Sheets: {
|
||||
[sheetName]: ws,
|
||||
},
|
||||
};
|
||||
const wbout: Buffer = xlsxWrite(wb, wopts);
|
||||
|
||||
// Create a new item with only the binary spreadsheet data
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
binary: {},
|
||||
pairedItem,
|
||||
};
|
||||
|
||||
let fileName = `spreadsheet.${fileFormat}`;
|
||||
if (options.fileName !== undefined) {
|
||||
fileName = options.fileName as string;
|
||||
}
|
||||
|
||||
newItem.binary![binaryPropertyName] = await this.helpers.prepareBinaryData(wbout, fileName);
|
||||
|
||||
newItems.push(newItem);
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
newItems.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem,
|
||||
});
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (this.continueOnFail()) {
|
||||
return [[{ json: { error: `The operation "${operation}" is not supported!` } }]];
|
||||
} else {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
`The operation "${operation}" is not supported!`,
|
||||
);
|
||||
}
|
||||
returnData = await fromFile.execute.call(this, items);
|
||||
}
|
||||
return [newItems];
|
||||
|
||||
if (operation === 'toFile') {
|
||||
returnData = await toFile.execute.call(this, items);
|
||||
}
|
||||
|
||||
return [returnData];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,230 @@
|
|||
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
import { BINARY_ENCODING, NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
import type { Sheet2JSONOpts, WorkBook, ParsingOptions } from 'xlsx';
|
||||
import { read as xlsxRead, readFile as xlsxReadFile, utils as xlsxUtils } from 'xlsx';
|
||||
|
||||
import { parse as createCSVParser } from 'csv-parse';
|
||||
import { binaryProperty, fromFileOptions } from '../description';
|
||||
|
||||
export const description: INodeProperties[] = [
|
||||
binaryProperty,
|
||||
{
|
||||
displayName: 'File Format',
|
||||
name: 'fileFormat',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Autodetect',
|
||||
value: 'autodetect',
|
||||
},
|
||||
{
|
||||
name: 'CSV',
|
||||
value: 'csv',
|
||||
description: 'Comma-separated values',
|
||||
},
|
||||
{
|
||||
name: 'HTML',
|
||||
value: 'html',
|
||||
description: 'HTML Table',
|
||||
},
|
||||
{
|
||||
name: 'ODS',
|
||||
value: 'ods',
|
||||
description: 'OpenDocument Spreadsheet',
|
||||
},
|
||||
{
|
||||
name: 'RTF',
|
||||
value: 'rtf',
|
||||
description: 'Rich Text Format',
|
||||
},
|
||||
{
|
||||
name: 'XLS',
|
||||
value: 'xls',
|
||||
description: 'Excel',
|
||||
},
|
||||
{
|
||||
name: 'XLSX',
|
||||
value: 'xlsx',
|
||||
description: 'Excel',
|
||||
},
|
||||
],
|
||||
default: 'autodetect',
|
||||
description: 'The format of the binary data to read from',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: ['fromFile'],
|
||||
},
|
||||
},
|
||||
},
|
||||
fromFileOptions,
|
||||
];
|
||||
|
||||
export async function execute(
|
||||
this: IExecuteFunctions,
|
||||
items: INodeExecutionData[],
|
||||
fileFormatProperty = 'fileFormat',
|
||||
) {
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
let fileExtension;
|
||||
let fileFormat;
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
try {
|
||||
const options = this.getNodeParameter('options', i, {});
|
||||
fileFormat = this.getNodeParameter(fileFormatProperty, i, '');
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i);
|
||||
const binaryData = this.helpers.assertBinaryData(i, binaryPropertyName);
|
||||
fileExtension = binaryData.fileExtension;
|
||||
|
||||
let rows: unknown[] = [];
|
||||
|
||||
if (
|
||||
fileFormat === 'autodetect' &&
|
||||
(binaryData.mimeType === 'text/csv' ||
|
||||
(binaryData.mimeType === 'text/plain' && binaryData.fileExtension === 'csv'))
|
||||
) {
|
||||
fileFormat = 'csv';
|
||||
}
|
||||
|
||||
if (fileFormat === 'csv') {
|
||||
const maxRowCount = options.maxRowCount as number;
|
||||
const parser = createCSVParser({
|
||||
delimiter: options.delimiter as string,
|
||||
fromLine: options.fromLine as number,
|
||||
bom: options.enableBOM as boolean,
|
||||
to: maxRowCount > -1 ? maxRowCount : undefined,
|
||||
columns: options.headerRow !== false,
|
||||
onRecord: (record) => {
|
||||
if (!options.includeEmptyCells) {
|
||||
record = Object.fromEntries(
|
||||
Object.entries(record).filter(([_key, value]) => value !== ''),
|
||||
);
|
||||
}
|
||||
rows.push(record);
|
||||
},
|
||||
});
|
||||
if (binaryData.id) {
|
||||
const stream = await this.helpers.getBinaryStream(binaryData.id);
|
||||
await new Promise<void>(async (resolve, reject) => {
|
||||
parser.on('error', reject);
|
||||
parser.on('readable', () => {
|
||||
stream.unpipe(parser);
|
||||
stream.destroy();
|
||||
resolve();
|
||||
});
|
||||
stream.pipe(parser);
|
||||
});
|
||||
} else {
|
||||
parser.write(binaryData.data, BINARY_ENCODING);
|
||||
parser.end();
|
||||
}
|
||||
} else {
|
||||
let workbook: WorkBook;
|
||||
const xlsxOptions: ParsingOptions = { raw: options.rawData as boolean };
|
||||
if (options.readAsString) xlsxOptions.type = 'string';
|
||||
|
||||
if (binaryData.id) {
|
||||
const binaryPath = this.helpers.getBinaryPath(binaryData.id);
|
||||
workbook = xlsxReadFile(binaryPath, xlsxOptions);
|
||||
} else {
|
||||
const binaryDataBuffer = Buffer.from(binaryData.data, BINARY_ENCODING);
|
||||
workbook = xlsxRead(
|
||||
options.readAsString ? binaryDataBuffer.toString() : binaryDataBuffer,
|
||||
xlsxOptions,
|
||||
);
|
||||
}
|
||||
|
||||
if (workbook.SheetNames.length === 0) {
|
||||
throw new NodeOperationError(this.getNode(), 'Spreadsheet does not have any sheets!', {
|
||||
itemIndex: i,
|
||||
});
|
||||
}
|
||||
|
||||
let sheetName = workbook.SheetNames[0];
|
||||
if (options.sheetName) {
|
||||
if (!workbook.SheetNames.includes(options.sheetName as string)) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
`Spreadsheet does not contain sheet called "${options.sheetName}"!`,
|
||||
{ itemIndex: i },
|
||||
);
|
||||
}
|
||||
sheetName = options.sheetName as string;
|
||||
}
|
||||
|
||||
// Convert it to json
|
||||
const sheetToJsonOptions: Sheet2JSONOpts = {};
|
||||
if (options.range) {
|
||||
if (isNaN(options.range as number)) {
|
||||
sheetToJsonOptions.range = options.range;
|
||||
} else {
|
||||
sheetToJsonOptions.range = parseInt(options.range as string, 10);
|
||||
}
|
||||
}
|
||||
|
||||
if (options.includeEmptyCells) {
|
||||
sheetToJsonOptions.defval = '';
|
||||
}
|
||||
|
||||
if (options.headerRow === false) {
|
||||
sheetToJsonOptions.header = 1; // Consider the first row as a data row
|
||||
}
|
||||
|
||||
rows = xlsxUtils.sheet_to_json(workbook.Sheets[sheetName], sheetToJsonOptions);
|
||||
|
||||
// Check if data could be found in file
|
||||
if (rows.length === 0) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Add all the found data columns to the workflow data
|
||||
if (options.headerRow === false) {
|
||||
// Data was returned as an array - https://github.com/SheetJS/sheetjs#json
|
||||
for (const rowData of rows) {
|
||||
returnData.push({
|
||||
json: {
|
||||
row: rowData,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
} as INodeExecutionData);
|
||||
}
|
||||
} else {
|
||||
for (const rowData of rows) {
|
||||
returnData.push({
|
||||
json: rowData,
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
} as INodeExecutionData);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
let errorDescription = error.description;
|
||||
if (fileExtension && fileExtension !== fileFormat) {
|
||||
error.message = `The file selected in 'Input Binary Field' is not in ${fileFormat} format`;
|
||||
errorDescription = `Try to change the operation or select a ${fileFormat} file in 'Input Binary Field'`;
|
||||
}
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({
|
||||
json: {
|
||||
error: error.message,
|
||||
},
|
||||
pairedItem: {
|
||||
item: i,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error, {
|
||||
itemIndex: i,
|
||||
description: errorDescription,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue