merge latest

This commit is contained in:
Mutasem 2021-07-07 15:57:43 +02:00
commit fdbc0bb650
149 changed files with 9438 additions and 1310 deletions

85
.github/workflows/test-workflows.yml vendored Normal file
View file

@ -0,0 +1,85 @@
name: Run test workflows
on:
schedule:
- cron: "0 2 * * *"
workflow_dispatch:
jobs:
run-test-workflows:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [14.x]
steps:
-
name: Checkout
uses: actions/checkout@v2
with:
path: n8n
-
name: Checkout workflows repo
uses: actions/checkout@v2
with:
repository: n8n-io/test-workflows
path: test-workflows
-
name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
-
name: npm install and build
run: |
cd n8n
npm install
npm run bootstrap
npm run build --if-present
env:
CI: true
shell: bash
-
name: Import credentials
run: n8n/packages/cli/bin/n8n import:credentials --input=test-workflows/credentials.json
shell: bash
env:
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
-
name: Import workflows
run: n8n/packages/cli/bin/n8n import:workflow --separate --input=test-workflows/workflows
shell: bash
env:
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
-
name: Copy static assets
run: |
cp n8n/assets/n8n-logo.png /tmp/n8n-logo.png
cp n8n/assets/n8n-screenshot.png /tmp/n8n-screenshot.png
cp n8n/node_modules/pdf-parse/test/data/05-versions-space.pdf /tmp/05-versions-space.pdf
cp n8n/node_modules/pdf-parse/test/data/04-valid.pdf /tmp/04-valid.pdf
shell: bash
-
name: Run tests
run: n8n/packages/cli/bin/n8n executeBatch --shallow --skipList=test-workflows/skipList.txt --shortOutput --concurrency=16 --compare=test-workflows/snapshots
shell: bash
env:
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
-
name: Export credentials
if: always()
run: n8n/packages/cli/bin/n8n export:credentials --output=test-workflows/credentials.json --all --pretty
shell: bash
env:
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
-
name: Commit and push credential changes
if: always()
run: |
cd test-workflows
git config --global user.name 'n8n test bot'
git config --global user.email 'n8n-test-bot@users.noreply.github.com'
git commit -am "Automated credential update"
git push --force --quiet "https://janober:${{ secrets.TOKEN }}@github.com/n8n-io/test-workflows.git" main:main

View file

@ -2,6 +2,16 @@
This list shows all the versions which include breaking changes and how to upgrade.
## 0.127.0
### What changed?
For the Zoho node, the `lead:create` operation now requires a "Company" parameter, the parameter "Address" is now inside "Additional Options", and the parameters "Title" and "Is Duplicate Record" were removed. Also, the `lead:delete` operation now returns only the `id` of the deleted lead.
### When is action necessary?
If you are using `lead:create` with "Company" or "Address", reset the parameters; for the other two parameters, no action needed. If you are using the response from `lead:delete`, reselect the `id` key.
## 0.118.0
### What changed?

54
packages/cli/commands/Interfaces.d.ts vendored Normal file
View file

@ -0,0 +1,54 @@
interface IResult {
totalWorkflows: number;
summary: {
failedExecutions: number,
successfulExecutions: number,
warningExecutions: number,
errors: IExecutionError[],
warnings: IExecutionError[],
};
coveredNodes: {
[nodeType: string]: number
};
executions: IExecutionResult[];
}
interface IExecutionResult {
workflowId: string | number;
workflowName: string;
executionTime: number; // Given in seconds with decimals for milisseconds
finished: boolean;
executionStatus: ExecutionStatus;
error?: string;
changes?: string;
coveredNodes: {
[nodeType: string]: number
};
}
interface IExecutionError {
workflowId: string | number;
error: string;
}
interface IWorkflowExecutionProgress {
workflowId: string | number;
status: ExecutionStatus;
}
interface INodeSpecialCases {
[nodeName: string]: INodeSpecialCase;
}
interface INodeSpecialCase {
ignoredProperties?: string[];
capResults?: number;
}
type ExecutionStatus = 'success' | 'error' | 'warning' | 'running';
declare module 'json-diff' {
interface IDiffOptions {
keysOnly?: boolean;
}
export function diff(obj1: unknown, obj2: unknown, diffOptions: IDiffOptions): string;
}

View file

@ -22,7 +22,7 @@ import {
WorkflowRunner,
} from '../src';
import {
import {
getLogger,
} from '../src/Logger';
@ -46,6 +46,9 @@ export class Execute extends Command {
id: flags.string({
description: 'id of the workflow to execute',
}),
rawOutput: flags.boolean({
description: 'Outputs only JSON data, with no other text',
}),
};
@ -183,10 +186,11 @@ export class Execute extends Command {
stack: error.stack,
};
}
console.info('Execution was successful:');
console.info('====================================');
console.info(JSON.stringify(data, null, 2));
if (flags.rawOutput === undefined) {
this.log('Execution was successful:');
this.log('====================================');
}
this.log(JSON.stringify(data, null, 2));
} catch (e) {
console.error('Error executing workflow. See log messages for details.');
logger.error('\nExecution error:');

View file

@ -0,0 +1,796 @@
import * as fs from 'fs';
import {
Command,
flags,
} from '@oclif/command';
import {
UserSettings,
} from 'n8n-core';
import {
INode,
INodeExecutionData,
ITaskData,
} from 'n8n-workflow';
import {
ActiveExecutions,
CredentialsOverwrites,
CredentialTypes,
Db,
ExternalHooks,
IExecutionsCurrentSummary,
IWorkflowDb,
IWorkflowExecutionDataProcess,
LoadNodesAndCredentials,
NodeTypes,
WorkflowCredentials,
WorkflowRunner,
} from '../src';
import {
sep,
} from 'path';
import {
diff,
} from 'json-diff';
import {
getLogger,
} from '../src/Logger';
import {
LoggerProxy,
} from 'n8n-workflow';
export class ExecuteBatch extends Command {
static description = '\nExecutes multiple workflows once';
static cancelled = false;
static workflowExecutionsProgress: IWorkflowExecutionProgress[][];
static shallow = false;
static compare: string;
static snapshot: string;
static concurrency = 1;
static debug = false;
static executionTimeout = 3 * 60 * 1000;
static examples = [
`$ n8n executeAll`,
`$ n8n executeAll --concurrency=10 --skipList=/data/skipList.txt`,
`$ n8n executeAll --debug --output=/data/output.json`,
`$ n8n executeAll --ids=10,13,15 --shortOutput`,
`$ n8n executeAll --snapshot=/data/snapshots --shallow`,
`$ n8n executeAll --compare=/data/previousExecutionData --retries=2`,
];
static flags = {
help: flags.help({ char: 'h' }),
debug: flags.boolean({
description: 'Toggles on displaying all errors and debug messages.',
}),
ids: flags.string({
description: 'Specifies workflow IDs to get executed, separated by a comma.',
}),
concurrency: flags.integer({
default: 1,
description: 'How many workflows can run in parallel. Defaults to 1 which means no concurrency.',
}),
output: flags.string({
description: 'Enable execution saving, You must inform an existing folder to save execution via this param',
}),
snapshot: flags.string({
description: 'Enables snapshot saving. You must inform an existing folder to save snapshots via this param.',
}),
compare: flags.string({
description: 'Compares current execution with an existing snapshot. You must inform an existing folder where the snapshots are saved.',
}),
shallow: flags.boolean({
description: 'Compares only if attributes output from node are the same, with no regards to neste JSON objects.',
}),
skipList: flags.string({
description: 'File containing a comma separated list of workflow IDs to skip.',
}),
retries: flags.integer({
description: 'Retries failed workflows up to N tries. Default is 1. Set 0 to disable.',
default: 1,
}),
shortOutput: flags.boolean({
description: 'Omits the full execution information from output, displaying only summary.',
}),
};
/**
* Gracefully handles exit.
* @param {boolean} skipExit Whether to skip exit or number according to received signal
*/
static async stopProcess(skipExit: boolean | number = false) {
if (ExecuteBatch.cancelled === true) {
process.exit(0);
}
ExecuteBatch.cancelled = true;
const activeExecutionsInstance = ActiveExecutions.getInstance();
const stopPromises = activeExecutionsInstance.getActiveExecutions().map(async execution => {
activeExecutionsInstance.stopExecution(execution.id);
});
await Promise.allSettled(stopPromises);
setTimeout(() => {
process.exit(0);
}, 30000);
let executingWorkflows = activeExecutionsInstance.getActiveExecutions() as IExecutionsCurrentSummary[];
let count = 0;
while (executingWorkflows.length !== 0) {
if (count++ % 4 === 0) {
console.log(`Waiting for ${executingWorkflows.length} active executions to finish...`);
executingWorkflows.map(execution => {
console.log(` - Execution ID ${execution.id}, workflow ID: ${execution.workflowId}`);
});
}
await new Promise((resolve) => {
setTimeout(resolve, 500);
});
executingWorkflows = activeExecutionsInstance.getActiveExecutions();
}
// We may receive true but when called from `process.on`
// we get the signal (SIGNIT, etc.)
if (skipExit !== true) {
process.exit(0);
}
}
formatJsonOutput(data: object) {
return JSON.stringify(data, null, 2);
}
shouldBeConsideredAsWarning(errorMessage: string) {
const warningStrings = [
'refresh token is invalid',
'unable to connect to',
'econnreset',
'429',
'econnrefused',
'missing a required parameter',
];
errorMessage = errorMessage.toLowerCase();
for (let i = 0; i < warningStrings.length; i++) {
if (errorMessage.includes(warningStrings[i])) {
return true;
}
}
return false;
}
async run() {
process.on('SIGTERM', ExecuteBatch.stopProcess);
process.on('SIGINT', ExecuteBatch.stopProcess);
const logger = getLogger();
LoggerProxy.init(logger);
const { flags } = this.parse(ExecuteBatch);
ExecuteBatch.debug = flags.debug === true;
ExecuteBatch.concurrency = flags.concurrency || 1;
const ids: number[] = [];
const skipIds: number[] = [];
if (flags.snapshot !== undefined) {
if (fs.existsSync(flags.snapshot)) {
if (!fs.lstatSync(flags.snapshot).isDirectory()) {
console.log(`The parameter --snapshot must be an existing directory`);
return;
}
} else {
console.log(`The parameter --snapshot must be an existing directory`);
return;
}
ExecuteBatch.snapshot = flags.snapshot;
}
if (flags.compare !== undefined) {
if (fs.existsSync(flags.compare)) {
if (!fs.lstatSync(flags.compare).isDirectory()) {
console.log(`The parameter --compare must be an existing directory`);
return;
}
} else {
console.log(`The parameter --compare must be an existing directory`);
return;
}
ExecuteBatch.compare = flags.compare;
}
if (flags.output !== undefined) {
if (fs.existsSync(flags.output)) {
if (fs.lstatSync(flags.output).isDirectory()) {
console.log(`The parameter --output must be a writable file`);
return;
}
}
}
if (flags.ids !== undefined) {
const paramIds = flags.ids.split(',');
const re = /\d+/;
const matchedIds = paramIds.filter(id => id.match(re)).map(id => parseInt(id.trim(), 10));
if (matchedIds.length === 0) {
console.log(`The parameter --ids must be a list of numeric IDs separated by a comma.`);
return;
}
ids.push(...matchedIds);
}
if (flags.skipList !== undefined) {
if (fs.existsSync(flags.skipList)) {
const contents = fs.readFileSync(flags.skipList, { encoding: 'utf-8' });
skipIds.push(...contents.split(',').map(id => parseInt(id.trim(), 10)));
} else {
console.log('Skip list file not found. Exiting.');
return;
}
}
if (flags.shallow === true) {
ExecuteBatch.shallow = true;
}
// Start directly with the init of the database to improve startup time
const startDbInitPromise = Db.init();
// Load all node and credential types
const loadNodesAndCredentials = LoadNodesAndCredentials();
const loadNodesAndCredentialsPromise = loadNodesAndCredentials.init();
// Make sure the settings exist
await UserSettings.prepareUserSettings();
// Wait till the database is ready
await startDbInitPromise;
let allWorkflows;
const query = Db.collections!.Workflow!.createQueryBuilder('workflows');
if (ids.length > 0) {
query.andWhere(`workflows.id in (:...ids)`, { ids });
}
if (skipIds.length > 0) {
query.andWhere(`workflows.id not in (:...skipIds)`, { skipIds });
}
allWorkflows = await query.getMany() as IWorkflowDb[];
if (ExecuteBatch.debug === true) {
process.stdout.write(`Found ${allWorkflows.length} workflows to execute.\n`);
}
// Wait till the n8n-packages have been read
await loadNodesAndCredentialsPromise;
// Load the credentials overwrites if any exist
await CredentialsOverwrites().init();
// Load all external hooks
const externalHooks = ExternalHooks();
await externalHooks.init();
// Add the found types to an instance other parts of the application can use
const nodeTypes = NodeTypes();
await nodeTypes.init(loadNodesAndCredentials.nodeTypes);
const credentialTypes = CredentialTypes();
await credentialTypes.init(loadNodesAndCredentials.credentialTypes);
// Send a shallow copy of allWorkflows so we still have all workflow data.
const results = await this.runTests([...allWorkflows]);
let { retries } = flags;
while (retries > 0 && (results.summary.warningExecutions + results.summary.failedExecutions > 0) && ExecuteBatch.cancelled === false) {
const failedWorkflowIds = results.summary.errors.map(execution => execution.workflowId);
failedWorkflowIds.push(...results.summary.warnings.map(execution => execution.workflowId));
const newWorkflowList = allWorkflows.filter(workflow => failedWorkflowIds.includes(workflow.id));
const retryResults = await this.runTests(newWorkflowList);
this.mergeResults(results, retryResults);
// By now, `results` has been updated with the new successful executions.
retries--;
}
if (flags.output !== undefined) {
fs.writeFileSync(flags.output, this.formatJsonOutput(results));
console.log('\nExecution finished.');
console.log('Summary:');
console.log(`\tSuccess: ${results.summary.successfulExecutions}`);
console.log(`\tFailures: ${results.summary.failedExecutions}`);
console.log(`\tWarnings: ${results.summary.warningExecutions}`);
console.log('\nNodes successfully tested:');
Object.entries(results.coveredNodes).forEach(([nodeName, nodeCount]) => {
console.log(`\t${nodeName}: ${nodeCount}`);
});
console.log('\nCheck the JSON file for more details.');
} else {
if (flags.shortOutput === true) {
console.log(this.formatJsonOutput({ ...results, executions: results.executions.filter(execution => execution.executionStatus !== 'success') }));
} else {
console.log(this.formatJsonOutput(results));
}
}
await ExecuteBatch.stopProcess(true);
if (results.summary.failedExecutions > 0) {
this.exit(1);
}
this.exit(0);
}
mergeResults(results: IResult, retryResults: IResult) {
if (retryResults.summary.successfulExecutions === 0) {
// Nothing to replace.
return;
}
// Find successful executions and replace them on previous result.
retryResults.executions.forEach(newExecution => {
if (newExecution.executionStatus === 'success') {
// Remove previous execution from list.
results.executions = results.executions.filter(previousExecutions => previousExecutions.workflowId !== newExecution.workflowId);
const errorIndex = results.summary.errors.findIndex(summaryInformation => summaryInformation.workflowId === newExecution.workflowId);
if (errorIndex !== -1) {
// This workflow errored previously. Decrement error count.
results.summary.failedExecutions--;
// Remove from the list of errors.
results.summary.errors.splice(errorIndex, 1);
}
const warningIndex = results.summary.warnings.findIndex(summaryInformation => summaryInformation.workflowId === newExecution.workflowId);
if (warningIndex !== -1) {
// This workflow errored previously. Decrement error count.
results.summary.warningExecutions--;
// Remove from the list of errors.
results.summary.warnings.splice(warningIndex, 1);
}
// Increment successful executions count and push it to all executions array.
results.summary.successfulExecutions++;
results.executions.push(newExecution);
}
});
}
async runTests(allWorkflows: IWorkflowDb[]): Promise<IResult> {
const result: IResult = {
totalWorkflows: allWorkflows.length,
summary: {
failedExecutions: 0,
warningExecutions: 0,
successfulExecutions: 0,
errors: [],
warnings: [],
},
coveredNodes: {},
executions: [],
};
if (ExecuteBatch.debug) {
this.initializeLogs();
}
return new Promise(async (res) => {
const promisesArray = [];
for (let i = 0; i < ExecuteBatch.concurrency; i++) {
const promise = new Promise(async (resolve) => {
let workflow: IWorkflowDb | undefined;
while (allWorkflows.length > 0) {
workflow = allWorkflows.shift();
if (ExecuteBatch.cancelled === true) {
process.stdout.write(`Thread ${i + 1} resolving and quitting.`);
resolve(true);
break;
}
// This if shouldn't be really needed
// but it's a concurrency precaution.
if (workflow === undefined) {
resolve(true);
return;
}
if (ExecuteBatch.debug) {
ExecuteBatch.workflowExecutionsProgress[i].push({
workflowId: workflow.id,
status: 'running',
});
this.updateStatus();
}
await this.startThread(workflow).then((executionResult) => {
if (ExecuteBatch.debug) {
ExecuteBatch.workflowExecutionsProgress[i].pop();
}
result.executions.push(executionResult);
if (executionResult.executionStatus === 'success') {
if (ExecuteBatch.debug) {
ExecuteBatch.workflowExecutionsProgress[i].push({
workflowId: workflow!.id,
status: 'success',
});
this.updateStatus();
}
result.summary.successfulExecutions++;
const nodeNames = Object.keys(executionResult.coveredNodes);
nodeNames.map(nodeName => {
if (result.coveredNodes[nodeName] === undefined) {
result.coveredNodes[nodeName] = 0;
}
result.coveredNodes[nodeName] += executionResult.coveredNodes[nodeName];
});
} else if (executionResult.executionStatus === 'warning') {
result.summary.warningExecutions++;
result.summary.warnings.push({
workflowId: executionResult.workflowId,
error: executionResult.error!,
});
if (ExecuteBatch.debug) {
ExecuteBatch.workflowExecutionsProgress[i].push({
workflowId: workflow!.id,
status: 'warning',
});
this.updateStatus();
}
} else if (executionResult.executionStatus === 'error') {
result.summary.failedExecutions++;
result.summary.errors.push({
workflowId: executionResult.workflowId,
error: executionResult.error!,
});
if (ExecuteBatch.debug) {
ExecuteBatch.workflowExecutionsProgress[i].push({
workflowId: workflow!.id,
status: 'error',
});
this.updateStatus();
}
} else {
throw new Error('Wrong execution status - cannot proceed');
}
});
}
resolve(true);
});
promisesArray.push(promise);
}
await Promise.allSettled(promisesArray);
res(result);
});
}
updateStatus() {
if (ExecuteBatch.cancelled === true) {
return;
}
if (process.stdout.isTTY === true) {
process.stdout.moveCursor(0, - (ExecuteBatch.concurrency));
process.stdout.cursorTo(0);
process.stdout.clearLine(0);
}
ExecuteBatch.workflowExecutionsProgress.map((concurrentThread, index) => {
let message = `${index + 1}: `;
concurrentThread.map((executionItem, workflowIndex) => {
let openColor = '\x1b[0m';
const closeColor = '\x1b[0m';
switch (executionItem.status) {
case 'success':
openColor = '\x1b[32m';
break;
case 'error':
openColor = '\x1b[31m';
break;
case 'warning':
openColor = '\x1b[33m';
break;
default:
break;
}
message += (workflowIndex > 0 ? ', ' : '') + `${openColor}${executionItem.workflowId}${closeColor}`;
});
if (process.stdout.isTTY === true) {
process.stdout.cursorTo(0);
process.stdout.clearLine(0);
}
process.stdout.write(message + '\n');
});
}
initializeLogs() {
process.stdout.write('**********************************************\n');
process.stdout.write(' n8n test workflows\n');
process.stdout.write('**********************************************\n');
process.stdout.write('\n');
process.stdout.write('Batch number:\n');
ExecuteBatch.workflowExecutionsProgress = [];
for (let i = 0; i < ExecuteBatch.concurrency; i++) {
ExecuteBatch.workflowExecutionsProgress.push([]);
process.stdout.write(`${i + 1}: \n`);
}
}
startThread(workflowData: IWorkflowDb): Promise<IExecutionResult> {
// This will be the object returned by the promise.
// It will be updated according to execution progress below.
const executionResult: IExecutionResult = {
workflowId: workflowData.id,
workflowName: workflowData.name,
executionTime: 0,
finished: false,
executionStatus: 'running',
coveredNodes: {},
};
const requiredNodeTypes = ['n8n-nodes-base.start'];
let startNode: INode | undefined = undefined;
for (const node of workflowData.nodes) {
if (requiredNodeTypes.includes(node.type)) {
startNode = node;
break;
}
}
// We have a cool feature here.
// On each node, on the Settings tab in the node editor you can change
// the `Notes` field to add special cases for comparison and snapshots.
// You need to set one configuration per line with the following possible keys:
// CAP_RESULTS_LENGTH=x where x is a number. Cap the number of rows from this node to x.
// This means if you set CAP_RESULTS_LENGTH=1 we will have only 1 row in the output
// IGNORED_PROPERTIES=x,y,z where x, y and z are JSON property names. Removes these
// properties from the JSON object (useful for optional properties that can
// cause the comparison to detect changes when not true).
const nodeEdgeCases = {} as INodeSpecialCases;
workflowData.nodes.forEach(node => {
executionResult.coveredNodes[node.type] = (executionResult.coveredNodes[node.type] || 0) + 1;
if (node.notes !== undefined && node.notes !== '') {
node.notes.split('\n').forEach(note => {
const parts = note.split('=');
if (parts.length === 2) {
if (nodeEdgeCases[node.name] === undefined) {
nodeEdgeCases[node.name] = {} as INodeSpecialCase;
}
if (parts[0] === 'CAP_RESULTS_LENGTH') {
nodeEdgeCases[node.name].capResults = parseInt(parts[1], 10);
} else if (parts[0] === 'IGNORED_PROPERTIES') {
nodeEdgeCases[node.name].ignoredProperties = parts[1].split(',').map(property => property.trim());
}
}
});
}
});
return new Promise(async (resolve) => {
if (startNode === undefined) {
// If the workflow does not contain a start-node we can not know what
// should be executed and with which data to start.
executionResult.error = 'Workflow cannot be started as it does not contain a "Start" node.';
executionResult.executionStatus = 'warning';
resolve(executionResult);
}
let gotCancel = false;
// Timeouts execution after 5 minutes.
const timeoutTimer = setTimeout(() => {
gotCancel = true;
executionResult.error = 'Workflow execution timed out.';
executionResult.executionStatus = 'warning';
resolve(executionResult);
}, ExecuteBatch.executionTimeout);
try {
const credentials = await WorkflowCredentials(workflowData!.nodes);
const runData: IWorkflowExecutionDataProcess = {
credentials,
executionMode: 'cli',
startNodes: [startNode!.name],
workflowData: workflowData!,
};
const workflowRunner = new WorkflowRunner();
const executionId = await workflowRunner.run(runData);
const activeExecutions = ActiveExecutions.getInstance();
const data = await activeExecutions.getPostExecutePromise(executionId);
if (gotCancel || ExecuteBatch.cancelled === true) {
clearTimeout(timeoutTimer);
// The promise was settled already so we simply ignore.
return;
}
if (data === undefined) {
executionResult.error = 'Workflow did not return any data.';
executionResult.executionStatus = 'error';
} else {
executionResult.executionTime = (Date.parse(data.stoppedAt as unknown as string) - Date.parse(data.startedAt as unknown as string)) / 1000;
executionResult.finished = (data?.finished !== undefined) as boolean;
if (data.data.resultData.error) {
executionResult.error =
data.data.resultData.error.hasOwnProperty('description') ?
// @ts-ignore
data.data.resultData.error.description : data.data.resultData.error.message;
if (data.data.resultData.lastNodeExecuted !== undefined) {
executionResult.error += ` on node ${data.data.resultData.lastNodeExecuted}`;
}
executionResult.executionStatus = 'error';
if (this.shouldBeConsideredAsWarning(executionResult.error || '')) {
executionResult.executionStatus = 'warning';
}
} else {
if (ExecuteBatch.shallow === true) {
// What this does is guarantee that top-level attributes
// from the JSON are kept and the are the same type.
// We convert nested JSON objects to a simple {object:true}
// and we convert nested arrays to ['json array']
// This reduces the chance of false positives but may
// result in not detecting deeper changes.
Object.keys(data.data.resultData.runData).map((nodeName: string) => {
data.data.resultData.runData[nodeName].map((taskData: ITaskData) => {
if (taskData.data === undefined) {
return;
}
Object.keys(taskData.data).map(connectionName => {
const connection = taskData.data![connectionName] as Array<INodeExecutionData[] | null>;
connection.map(executionDataArray => {
if (executionDataArray === null) {
return;
}
if (nodeEdgeCases[nodeName] !== undefined && nodeEdgeCases[nodeName].capResults !== undefined) {
executionDataArray.splice(nodeEdgeCases[nodeName].capResults!);
}
executionDataArray.map(executionData => {
if (executionData.json === undefined) {
return;
}
if (nodeEdgeCases[nodeName] !== undefined && nodeEdgeCases[nodeName].ignoredProperties !== undefined) {
nodeEdgeCases[nodeName].ignoredProperties!.forEach(ignoredProperty => delete executionData.json[ignoredProperty]);
}
const jsonProperties = executionData.json;
const nodeOutputAttributes = Object.keys(jsonProperties);
nodeOutputAttributes.map(attributeName => {
if (Array.isArray(jsonProperties[attributeName])) {
jsonProperties[attributeName] = ['json array'];
} else if (typeof jsonProperties[attributeName] === 'object') {
jsonProperties[attributeName] = { object: true };
}
});
});
});
});
});
});
} else {
// If not using shallow comparison then we only treat nodeEdgeCases.
const specialCases = Object.keys(nodeEdgeCases);
specialCases.forEach(nodeName => {
data.data.resultData.runData[nodeName].map((taskData: ITaskData) => {
if (taskData.data === undefined) {
return;
}
Object.keys(taskData.data).map(connectionName => {
const connection = taskData.data![connectionName] as Array<INodeExecutionData[] | null>;
connection.map(executionDataArray => {
if (executionDataArray === null) {
return;
}
if (nodeEdgeCases[nodeName].capResults !== undefined) {
executionDataArray.splice(nodeEdgeCases[nodeName].capResults!);
}
if (nodeEdgeCases[nodeName].ignoredProperties !== undefined) {
executionDataArray.map(executionData => {
if (executionData.json === undefined) {
return;
}
nodeEdgeCases[nodeName].ignoredProperties!.forEach(ignoredProperty => delete executionData.json[ignoredProperty]);
});
}
});
});
});
});
}
const serializedData = this.formatJsonOutput(data);
if (ExecuteBatch.compare === undefined) {
executionResult.executionStatus = 'success';
} else {
const fileName = (ExecuteBatch.compare.endsWith(sep) ? ExecuteBatch.compare : ExecuteBatch.compare + sep) + `${workflowData.id}-snapshot.json`;
if (fs.existsSync(fileName) === true) {
const contents = fs.readFileSync(fileName, { encoding: 'utf-8' });
const changes = diff(JSON.parse(contents), data, { keysOnly: true });
if (changes !== undefined) {
// we have structural changes. Report them.
executionResult.error = `Workflow may contain breaking changes`;
executionResult.changes = changes;
executionResult.executionStatus = 'error';
} else {
executionResult.executionStatus = 'success';
}
} else {
executionResult.error = 'Snapshot for not found.';
executionResult.executionStatus = 'warning';
}
}
// Save snapshots only after comparing - this is to make sure we're updating
// After comparing to existing verion.
if (ExecuteBatch.snapshot !== undefined) {
const fileName = (ExecuteBatch.snapshot.endsWith(sep) ? ExecuteBatch.snapshot : ExecuteBatch.snapshot + sep) + `${workflowData.id}-snapshot.json`;
fs.writeFileSync(fileName, serializedData);
}
}
}
} catch (e) {
executionResult.error = 'Workflow failed to execute.';
executionResult.executionStatus = 'error';
}
clearTimeout(timeoutTimer);
resolve(executionResult);
});
}
}

View file

@ -65,6 +65,9 @@ export class ImportCredentialsCommand extends Command {
try {
await Db.init();
// Make sure the settings exist
await UserSettings.prepareUserSettings();
let i;
const encryptionKey = await UserSettings.getEncryptionKey();

View file

@ -18,6 +18,9 @@ import {
import * as fs from 'fs';
import * as glob from 'glob-promise';
import * as path from 'path';
import {
UserSettings,
} from 'n8n-core';
export class ImportWorkflowsCommand extends Command {
static description = 'Import workflows';
@ -60,6 +63,9 @@ export class ImportWorkflowsCommand extends Command {
try {
await Db.init();
// Make sure the settings exist
await UserSettings.prepareUserSettings();
let i;
if (flags.separate) {
const files = await glob((flags.input.endsWith(path.sep) ? flags.input : flags.input + path.sep) + '*.json');

View file

@ -0,0 +1,67 @@
import {
Command,
flags,
} from '@oclif/command';
import {
IDataObject
} from 'n8n-workflow';
import {
Db,
} from "../../src";
export class ListWorkflowCommand extends Command {
static description = '\nList workflows';
static examples = [
'$ n8n list:workflow',
'$ n8n list:workflow --active=true --onlyId',
'$ n8n list:workflow --active=false',
];
static flags = {
help: flags.help({ char: 'h' }),
active: flags.string({
description: 'Filters workflows by active status. Can be true or false',
}),
onlyId: flags.boolean({
description: 'Outputs workflow IDs only, one per line.',
}),
};
async run() {
const { flags } = this.parse(ListWorkflowCommand);
if (flags.active !== undefined && !['true', 'false'].includes(flags.active)) {
this.error('The --active flag has to be passed using true or false');
}
try {
await Db.init();
const findQuery: IDataObject = {};
if (flags.active !== undefined) {
findQuery.active = flags.active === 'true';
}
const workflows = await Db.collections.Workflow!.find(findQuery);
if (flags.onlyId) {
workflows.forEach(workflow => console.log(workflow.id));
} else {
workflows.forEach(workflow => console.log(workflow.id + "|" + workflow.name));
}
} catch (e) {
console.error('\nGOT ERROR');
console.log('====================================');
console.error(e.message);
console.error(e.stack);
this.exit(1);
}
this.exit();
}
}

View file

@ -1,6 +1,6 @@
{
"name": "n8n",
"version": "0.126.1",
"version": "0.127.0",
"description": "n8n Workflow Automation Tool",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://n8n.io",
@ -82,6 +82,7 @@
"dependencies": {
"@oclif/command": "^1.5.18",
"@oclif/errors": "^1.2.2",
"@types/json-diff": "^0.5.1",
"@types/jsonwebtoken": "^8.5.2",
"basic-auth": "^2.0.1",
"bcryptjs": "^2.4.3",
@ -101,15 +102,16 @@
"glob-promise": "^3.4.0",
"google-timezones-json": "^1.0.2",
"inquirer": "^7.0.1",
"json-diff": "^0.5.4",
"jsonwebtoken": "^8.5.1",
"jwks-rsa": "~1.12.1",
"localtunnel": "^2.0.0",
"lodash.get": "^4.4.2",
"mysql2": "~2.2.0",
"n8n-core": "~0.75.0",
"n8n-editor-ui": "~0.96.1",
"n8n-nodes-base": "~0.123.1",
"n8n-workflow": "~0.62.0",
"n8n-core": "~0.76.0",
"n8n-editor-ui": "~0.97.0",
"n8n-nodes-base": "~0.124.0",
"n8n-workflow": "~0.63.0",
"oauth-1.0a": "^2.2.6",
"open": "^7.0.0",
"pg": "^8.3.0",

View file

@ -188,6 +188,7 @@ export interface IExecutionsListResponse {
count: number;
// results: IExecutionShortResponse[];
results: IExecutionsSummary[];
estimated: boolean;
}
export interface IExecutionsStopData {

View file

@ -33,6 +33,7 @@ import {
CredentialsHelper,
CredentialsOverwrites,
CredentialTypes,
DatabaseType,
Db,
ExternalHooks,
GenericHelpers,
@ -88,6 +89,7 @@ import {
IRunData,
IWorkflowBase,
IWorkflowCredentials,
LoggerProxy,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
@ -1612,8 +1614,7 @@ class App {
executingWorkflowIds.push(...this.activeExecutionsInstance.getActiveExecutions().map(execution => execution.id.toString()) as string[]);
const countFilter = JSON.parse(JSON.stringify(filter));
countFilter.select = ['id'];
countFilter.where = {id: Not(In(executingWorkflowIds))};
countFilter.id = Not(In(executingWorkflowIds));
const resultsQuery = await Db.collections.Execution!
.createQueryBuilder("execution")
@ -1645,10 +1646,10 @@ class App {
const resultsPromise = resultsQuery.getMany();
const countPromise = Db.collections.Execution!.count(countFilter);
const countPromise = getExecutionsCount(countFilter);
const results: IExecutionFlattedDb[] = await resultsPromise;
const count = await countPromise;
const countedObjects = await countPromise;
const returnResults: IExecutionsSummary[] = [];
@ -1667,8 +1668,9 @@ class App {
}
return {
count,
count: countedObjects.count,
results: returnResults,
estimated: countedObjects.estimate,
};
}));
@ -2161,3 +2163,35 @@ export async function start(): Promise<void> {
await app.externalHooks.run('n8n.ready', [app]);
});
}
async function getExecutionsCount(countFilter: IDataObject): Promise<{ count: number; estimate: boolean; }> {
const dbType = await GenericHelpers.getConfigValue('database.type') as DatabaseType;
const filteredFields = Object.keys(countFilter).filter(field => field !== 'id');
// Do regular count for other databases than pgsql and
// if we are filtering based on workflowId or finished fields.
if (dbType !== 'postgresdb' || filteredFields.length > 0) {
const count = await Db.collections.Execution!.count(countFilter);
return { count, estimate: false };
}
try {
// Get an estimate of rows count.
const estimateRowsNumberSql = "SELECT n_live_tup FROM pg_stat_all_tables WHERE relname = 'execution_entity';";
const rows: Array<{ n_live_tup: string }> = await Db.collections.Execution!.query(estimateRowsNumberSql);
const estimate = parseInt(rows[0].n_live_tup, 10);
// If over 100k, return just an estimate.
if (estimate > 100000) {
// if less than 100k, we get the real count as even a full
// table scan should not take so long.
return { count: estimate, estimate: true };
}
} catch (err) {
LoggerProxy.warn('Unable to get executions count from postgres: ' + err);
}
const count = await Db.collections.Execution!.count(countFilter);
return { count, estimate: false };
}

View file

@ -1,7 +1,8 @@
{
"compilerOptions": {
"lib": [
"es2017"
"es2017",
"ES2020.Promise"
],
"types": [
"node",

View file

@ -1,6 +1,6 @@
{
"name": "n8n-core",
"version": "0.75.0",
"version": "0.76.0",
"description": "Core functionality of n8n",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://n8n.io",
@ -47,7 +47,7 @@
"file-type": "^14.6.2",
"lodash.get": "^4.4.2",
"mime-types": "^2.1.27",
"n8n-workflow": "~0.62.0",
"n8n-workflow": "~0.63.0",
"oauth-1.0a": "^2.2.6",
"p-cancelable": "^2.0.0",
"request": "^2.88.2",

View file

@ -1,6 +1,6 @@
{
"name": "n8n-editor-ui",
"version": "0.96.1",
"version": "0.97.0",
"description": "Workflow Editor UI for n8n",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://n8n.io",
@ -69,7 +69,7 @@
"lodash.debounce": "^4.0.8",
"lodash.get": "^4.4.2",
"lodash.set": "^4.3.2",
"n8n-workflow": "~0.62.0",
"n8n-workflow": "~0.63.0",
"node-sass": "^4.12.0",
"normalize-wheel": "^1.0.1",
"prismjs": "^1.17.1",

View file

@ -325,6 +325,7 @@ export interface IExecutionShortResponse {
export interface IExecutionsListResponse {
count: number;
results: IExecutionsSummary[];
estimated: boolean;
}
export interface IExecutionsCurrentSummaryExtended {

View file

@ -14,6 +14,10 @@
<div v-if="!binaryData">
Data to display did not get found
</div>
<video v-else-if="binaryData.mimeType && binaryData.mimeType.startsWith('video/')" controls autoplay>
<source :src="'data:' + binaryData.mimeType + ';base64,' + binaryData.data" :type="binaryData.mimeType">
Your browser does not support the video element. Kindly update it to latest version.
</video>
<embed v-else :src="'data:' + binaryData.mimeType + ';base64,' + binaryData.data" class="binary-data" :class="embedClass"/>
</div>

View file

@ -1,6 +1,6 @@
<template>
<span>
<el-dialog :visible="dialogVisible" append-to-body width="80%" :title="`Workflow Executions (${combinedExecutions.length}/${combinedExecutionsCount})`" :before-close="closeDialog">
<el-dialog :visible="dialogVisible" append-to-body width="80%" :title="`Workflow Executions ${combinedExecutions.length}/${finishedExecutionsCountEstimated === true ? '~' : ''}${combinedExecutionsCount}`" :before-close="closeDialog">
<div class="filters">
<el-row>
<el-col :span="4" class="filter-headline">
@ -38,7 +38,7 @@
<div class="selection-options">
<span v-if="checkAll === true || isIndeterminate === true">
Selected: {{numSelected}}/{{finishedExecutionsCount}}
Selected: {{numSelected}} / <span v-if="finishedExecutionsCountEstimated === true">~</span>{{finishedExecutionsCount}}
<el-button type="danger" title="Delete Selected" icon="el-icon-delete" size="mini" @click="handleDeleteSelected" circle></el-button>
</span>
</div>
@ -142,7 +142,7 @@
</el-table-column>
</el-table>
<div class="load-more" v-if="finishedExecutionsCount > finishedExecutions.length">
<div class="load-more" v-if="finishedExecutionsCount > finishedExecutions.length || finishedExecutionsCountEstimated === true">
<el-button title="Load More" @click="loadMore()" size="small" :disabled="isDataLoading">
<font-awesome-icon icon="sync" /> Load More
</el-button>
@ -200,6 +200,7 @@ export default mixins(
return {
finishedExecutions: [] as IExecutionsSummary[],
finishedExecutionsCount: 0,
finishedExecutionsCountEstimated: false,
checkAll: false,
autoRefresh: true,
@ -256,7 +257,7 @@ export default mixins(
return returnData;
},
combinedExecutionsCount (): number {
return this.activeExecutions.length + this.finishedExecutionsCount;
return 0 + this.activeExecutions.length + this.finishedExecutionsCount;
},
numSelected (): number {
if (this.checkAll === true) {
@ -489,16 +490,19 @@ export default mixins(
}
this.finishedExecutions = this.finishedExecutions.filter(execution => !gaps.includes(parseInt(execution.id, 10)) && lastId >= parseInt(execution.id, 10));
this.finishedExecutionsCount = results[0].count;
this.finishedExecutionsCountEstimated = results[0].estimated;
},
async loadFinishedExecutions (): Promise<void> {
if (this.filter.status === 'running') {
this.finishedExecutions = [];
this.finishedExecutionsCount = 0;
this.finishedExecutionsCountEstimated = false;
return;
}
const data = await this.restApi().getPastExecutions(this.workflowFilterPast, this.requestItemsPerRequest);
this.finishedExecutions = data.results;
this.finishedExecutionsCount = data.count;
this.finishedExecutionsCountEstimated = data.estimated;
},
async loadMore () {
if (this.filter.status === 'running') {
@ -526,6 +530,7 @@ export default mixins(
this.finishedExecutions.push.apply(this.finishedExecutions, data.results);
this.finishedExecutionsCount = data.count;
this.finishedExecutionsCountEstimated = data.estimated;
this.isDataLoading = false;
},

View file

@ -32,7 +32,7 @@
<template slot-scope="scope">
<div :key="scope.row.id">
<span class="name">{{scope.row.name}}</span>
<TagsContainer class="hidden-sm-and-down" :tagIds="getIds(scope.row.tags)" :limit="3" />
<TagsContainer class="hidden-sm-and-down" :tagIds="getIds(scope.row.tags)" :limit="3" @click="onTagClick" :clickable="true" :hoverable="true" />
</div>
</template>
</el-table-column>
@ -124,6 +124,11 @@ export default mixins(
updateTagsFilter(tags: string[]) {
this.filterTagIds = tags;
},
onTagClick(tagId: string) {
if (tagId !== 'count' && !this.filterTagIds.includes(tagId)) {
this.filterTagIds.push(tagId);
}
},
async openWorkflow (data: IWorkflowShortResponse, column: any) { // tslint:disable-line:no-any
if (column.label !== 'Active') {

View file

@ -1,6 +1,6 @@
{
"name": "n8n-node-dev",
"version": "0.15.0",
"version": "0.16.0",
"description": "CLI to simplify n8n credentials/node development",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://n8n.io",
@ -59,8 +59,8 @@
"change-case": "^4.1.1",
"copyfiles": "^2.1.1",
"inquirer": "^7.0.1",
"n8n-core": "~0.75.0",
"n8n-workflow": "~0.62.0",
"n8n-core": "~0.76.0",
"n8n-workflow": "~0.63.0",
"oauth-1.0a": "^2.2.6",
"replace-in-file": "^6.0.0",
"request": "^2.88.2",

View file

@ -57,5 +57,12 @@ export class MicrosoftSql implements ICredentialType {
default: 15000,
description: 'Connection timeout in ms.',
},
{
displayName: 'Request Timeout',
name: 'requestTimeout',
type: 'number',
default: 15000,
description: ' Request timeout in ms.',
},
];
}

View file

@ -390,7 +390,7 @@ export class Airtable implements INodeType {
},
// ----------------------------------
// append + update
// append + delete + update
// ----------------------------------
{
displayName: 'Options',
@ -401,12 +401,24 @@ export class Airtable implements INodeType {
show: {
operation: [
'append',
'delete',
'update',
],
},
},
default: {},
options: [
{
displayName: 'Bulk Size',
name: 'bulkSize',
type: 'number',
typeOptions: {
minValue: 1,
maxValue: 10,
},
default: 10,
description: `Number of records to process at once.`,
},
{
displayName: 'Ignore Fields',
name: 'ignoreFields',
@ -428,6 +440,14 @@ export class Airtable implements INodeType {
displayName: 'Typecast',
name: 'typecast',
type: 'boolean',
displayOptions: {
show: {
'/operation': [
'append',
'update',
],
},
},
default: false,
description: 'If the Airtable API should attempt mapping of string values for linked records & select options.',
},
@ -465,54 +485,81 @@ export class Airtable implements INodeType {
let fields: string[];
let options: IDataObject;
const rows: IDataObject[] = [];
let bulkSize = 10;
for (let i = 0; i < items.length; i++) {
addAllFields = this.getNodeParameter('addAllFields', i) as boolean;
options = this.getNodeParameter('options', i, {}) as IDataObject;
bulkSize = options.bulkSize as number || bulkSize;
const row: IDataObject = {};
if (addAllFields === true) {
// Add all the fields the item has
body.fields = items[i].json;
row.fields = { ...items[i].json };
// tslint:disable-next-line: no-any
delete (row.fields! as any).id;
} else {
// Add only the specified fields
body.fields = {} as IDataObject;
row.fields = {} as IDataObject;
fields = this.getNodeParameter('fields', i, []) as string[];
for (const fieldName of fields) {
// @ts-ignore
body.fields[fieldName] = items[i].json[fieldName];
row.fields[fieldName] = items[i].json[fieldName];
}
}
if (options.typecast === true) {
body['typecast'] = true;
rows.push(row);
if (rows.length === bulkSize || i === items.length - 1) {
if (options.typecast === true) {
body['typecast'] = true;
}
body['records'] = rows;
responseData = await apiRequest.call(this, requestMethod, endpoint, body, qs);
returnData.push(...responseData.records);
// empty rows
rows.length = 0;
}
responseData = await apiRequest.call(this, requestMethod, endpoint, body, qs);
returnData.push(responseData);
}
} else if (operation === 'delete') {
requestMethod = 'DELETE';
let id: string;
const rows: string[] = [];
const options = this.getNodeParameter('options', 0, {}) as IDataObject;
const bulkSize = options.bulkSize as number || 10;
for (let i = 0; i < items.length; i++) {
let id: string;
id = this.getNodeParameter('id', i) as string;
endpoint = `${application}/${table}`;
rows.push(id);
// Make one request after another. This is slower but makes
// sure that we do not run into the rate limit they have in
// place and so block for 30 seconds. Later some global
// functionality in core should make it easy to make requests
// according to specific rules like not more than 5 requests
// per seconds.
qs.records = [id];
if (rows.length === bulkSize || i === items.length - 1) {
endpoint = `${application}/${table}`;
responseData = await apiRequest.call(this, requestMethod, endpoint, body, qs);
// Make one request after another. This is slower but makes
// sure that we do not run into the rate limit they have in
// place and so block for 30 seconds. Later some global
// functionality in core should make it easy to make requests
// according to specific rules like not more than 5 requests
// per seconds.
qs.records = rows;
returnData.push(...responseData.records);
responseData = await apiRequest.call(this, requestMethod, endpoint, body, qs);
returnData.push(...responseData.records);
// empty rows
rows.length = 0;
}
}
} else if (operation === 'list') {
@ -585,55 +632,69 @@ export class Airtable implements INodeType {
requestMethod = 'PATCH';
let id: string;
let updateAllFields: boolean;
let fields: string[];
let options: IDataObject;
const rows: IDataObject[] = [];
let bulkSize = 10;
for (let i = 0; i < items.length; i++) {
updateAllFields = this.getNodeParameter('updateAllFields', i) as boolean;
options = this.getNodeParameter('options', i, {}) as IDataObject;
bulkSize = options.bulkSize as number || bulkSize;
const row: IDataObject = {};
row.fields = {} as IDataObject;
if (updateAllFields === true) {
// Update all the fields the item has
body.fields = items[i].json;
row.fields = { ...items[i].json };
// remove id field
// tslint:disable-next-line: no-any
delete (row.fields! as any).id;
if (options.ignoreFields && options.ignoreFields !== '') {
const ignoreFields = (options.ignoreFields as string).split(',').map(field => field.trim()).filter(field => !!field);
if (ignoreFields.length) {
// From: https://stackoverflow.com/questions/17781472/how-to-get-a-subset-of-a-javascript-objects-properties
body.fields = Object.entries(items[i].json)
row.fields = Object.entries(items[i].json)
.filter(([key]) => !ignoreFields.includes(key))
.reduce((obj, [key, val]) => Object.assign(obj, { [key]: val }), {});
}
}
} else {
// Update only the specified fields
body.fields = {} as IDataObject;
fields = this.getNodeParameter('fields', i, []) as string[];
for (const fieldName of fields) {
// @ts-ignore
body.fields[fieldName] = items[i].json[fieldName];
row.fields[fieldName] = items[i].json[fieldName];
}
}
id = this.getNodeParameter('id', i) as string;
row.id = this.getNodeParameter('id', i) as string;
endpoint = `${application}/${table}`;
rows.push(row);
// Make one request after another. This is slower but makes
// sure that we do not run into the rate limit they have in
// place and so block for 30 seconds. Later some global
// functionality in core should make it easy to make requests
// according to specific rules like not more than 5 requests
// per seconds.
if (rows.length === bulkSize || i === items.length - 1) {
endpoint = `${application}/${table}`;
const data = { records: [{ id, fields: body.fields }], typecast: (options.typecast) ? true : false };
// Make one request after another. This is slower but makes
// sure that we do not run into the rate limit they have in
// place and so block for 30 seconds. Later some global
// functionality in core should make it easy to make requests
// according to specific rules like not more than 5 requests
// per seconds.
responseData = await apiRequest.call(this, requestMethod, endpoint, data, qs);
const data = { records: rows, typecast: (options.typecast) ? true : false };
returnData.push(...responseData.records);
responseData = await apiRequest.call(this, requestMethod, endpoint, data, qs);
returnData.push(...responseData.records);
// empty rows
rows.length = 0;
}
}
} else {

View file

@ -29,6 +29,10 @@ import {
import * as moment from 'moment-timezone';
import {
noCase,
} from 'change-case';
export class Box implements INodeType {
description: INodeTypeDescription = {
displayName: 'Box',
@ -81,6 +85,7 @@ export class Box implements INodeType {
const length = items.length as unknown as number;
const qs: IDataObject = {};
let responseData;
const timezone = this.getTimezone();
const resource = this.getNodeParameter('resource', 0) as string;
const operation = this.getNodeParameter('operation', 0) as string;
for (let i = 0; i < length; i++) {
@ -199,6 +204,51 @@ export class Box implements INodeType {
}
returnData.push.apply(returnData, responseData as IDataObject[]);
}
// https://developer.box.com/reference/post-collaborations/
if (operation === 'share') {
const fileId = this.getNodeParameter('fileId', i) as string;
const role = this.getNodeParameter('role', i) as string;
const accessibleBy = this.getNodeParameter('accessibleBy', i) as string;
const options = this.getNodeParameter('options', i) as IDataObject;
// tslint:disable-next-line: no-any
const body: { accessible_by: IDataObject, [key: string]: any } = {
accessible_by: {},
item: {
id: fileId,
type: 'file',
},
role: (role === 'coOwner') ? 'co-owner' : noCase(role),
...options,
};
if (body.fields) {
qs.fields = body.fields;
delete body.fields;
}
if (body.expires_at) {
body.expires_at = moment.tz(body.expires_at, timezone).format();
}
if (body.notify) {
qs.notify = body.notify;
delete body.notify;
}
if (accessibleBy === 'user') {
const useEmail = this.getNodeParameter('useEmail', i) as boolean;
if (useEmail) {
body.accessible_by['login'] = this.getNodeParameter('email', i) as string;
} else {
body.accessible_by['id'] = this.getNodeParameter('userId', i) as string;
}
} else {
body.accessible_by['id'] = this.getNodeParameter('groupId', i) as string;
}
responseData = await boxApiRequest.call(this, 'POST', `/collaborations`, body, qs);
returnData.push(responseData as IDataObject);
}
// https://developer.box.com/reference/post-files-content
if (operation === 'upload') {
const parentId = this.getNodeParameter('parentId', i) as string;
@ -356,6 +406,79 @@ export class Box implements INodeType {
}
returnData.push.apply(returnData, responseData as IDataObject[]);
}
// https://developer.box.com/reference/post-collaborations/
if (operation === 'share') {
const folderId = this.getNodeParameter('folderId', i) as string;
const role = this.getNodeParameter('role', i) as string;
const accessibleBy = this.getNodeParameter('accessibleBy', i) as string;
const options = this.getNodeParameter('options', i) as IDataObject;
// tslint:disable-next-line: no-any
const body: { accessible_by: IDataObject, [key: string]: any } = {
accessible_by: {},
item: {
id: folderId,
type: 'folder',
},
role: (role === 'coOwner') ? 'co-owner' : noCase(role),
...options,
};
if (body.fields) {
qs.fields = body.fields;
delete body.fields;
}
if (body.expires_at) {
body.expires_at = moment.tz(body.expires_at, timezone).format();
}
if (body.notify) {
qs.notify = body.notify;
delete body.notify;
}
if (accessibleBy === 'user') {
const useEmail = this.getNodeParameter('useEmail', i) as boolean;
if (useEmail) {
body.accessible_by['login'] = this.getNodeParameter('email', i) as string;
} else {
body.accessible_by['id'] = this.getNodeParameter('userId', i) as string;
}
} else {
body.accessible_by['id'] = this.getNodeParameter('groupId', i) as string;
}
responseData = await boxApiRequest.call(this, 'POST', `/collaborations`, body, qs);
returnData.push(responseData as IDataObject);
}
//https://developer.box.com/guides/folders/single/move/
if (operation === 'update') {
const folderId = this.getNodeParameter('folderId', i) as string;
const updateFields = this.getNodeParameter('updateFields', i) as IDataObject;
if (updateFields.fields) {
qs.fields = updateFields.fields;
delete updateFields.fields;
}
const body = {
...updateFields,
} as IDataObject;
if (body.parentId) {
body.parent = {
id: body.parentId,
};
delete body.parentId;
}
if (body.tags) {
body.tags = (body.tags as string).split(',');
}
responseData = await boxApiRequest.call(this, 'PUT', `/folders/${folderId}`, body, qs);
returnData.push(responseData as IDataObject);
}
}
}
if (resource === 'file' && operation === 'download') {

View file

@ -21,7 +21,7 @@ export class BoxTrigger implements INodeType {
icon: 'file:box.png',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when a Box events occurs.',
description: 'Starts the workflow when Box events occur',
defaults: {
name: 'Box Trigger',
color: '#00aeef',

View file

@ -40,6 +40,11 @@ export const fileOperations = [
value: 'search',
description: 'Search files',
},
{
name: 'Share',
value: 'share',
description: 'Share a file',
},
{
name: 'Upload',
value: 'upload',
@ -496,6 +501,242 @@ export const fileFields = [
],
},
/* -------------------------------------------------------------------------- */
/* file:share */
/* -------------------------------------------------------------------------- */
{
displayName: 'File ID',
name: 'fileId',
type: 'string',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'file',
],
},
},
default: '',
description: 'The ID of the file to share.',
},
{
displayName: 'Accessible By',
name: 'accessibleBy',
type: 'options',
options: [
{
name: 'Group',
value: 'group',
},
{
name: 'User',
value: 'user',
},
],
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'file',
],
},
},
default: '',
description: 'The type of object the file will be shared with.',
},
{
displayName: 'Use Email',
name: 'useEmail',
type: 'boolean',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'file',
],
accessibleBy: [
'user',
],
},
},
default: true,
description: 'Whether identify the user by email or ID.',
},
{
displayName: 'Email',
name: 'email',
type: 'string',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'file',
],
useEmail: [
true,
],
accessibleBy: [
'user',
],
},
},
default: '',
description: `The user's email address to share the file with.`,
},
{
displayName: 'User ID',
name: 'userId',
type: 'string',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'file',
],
useEmail: [
false,
],
accessibleBy: [
'user',
],
},
},
default: '',
description: `The user's ID to share the file with.`,
},
{
displayName: 'Group ID',
name: 'groupId',
type: 'string',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'file',
],
accessibleBy: [
'group',
],
},
},
default: '',
description: `The group's ID to share the file with.`,
},
{
displayName: 'Role',
name: 'role',
type: 'options',
options: [
{
name: 'Co-Owner',
value: 'coOwner',
description: 'A Co-owner has all of functional read/write access that an editor does',
},
{
name: 'Editor',
value: 'editor',
description: 'An editor has full read/write access to a folder or file',
},
{
name: 'Previewer',
value: 'previewer',
description: 'A previewer has limited read access',
},
{
name: 'Previewer Uploader',
value: 'previewerUploader',
description: 'This access level is a combination of Previewer and Uploader',
},
{
name: 'Uploader',
value: 'uploader',
description: 'An uploader has limited write access',
},
{
name: 'Viewer',
value: 'viewer',
description: 'A viewer has read access to a folder or file',
},
{
name: 'Viewer Uploader',
value: 'viewerUploader',
description: 'This access level is a combination of Viewer and Uploader',
},
],
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'file',
],
},
},
default: 'editor',
description: 'The level of access granted.',
},
{
displayName: 'Options',
name: 'options',
type: 'collection',
placeholder: 'Add Option',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'file',
],
},
},
default: {},
options: [
{
displayName: 'Can View Path',
name: 'can_view_path',
type: 'boolean',
default: false,
description: `Whether the invited users can see the entire parent path to the associated folder.</br>
The user will not gain privileges in any parent folder and therefore cannot see content the user is not collaborated on.`,
},
{
displayName: 'Expires At',
name: 'expires_at',
type: 'dateTime',
default: '',
description: 'Set the expiration date for the collaboration. At this date, the collaboration will be automatically removed from the item.',
},
{
displayName: 'Fields',
name: 'fields',
type: 'string',
default: '',
description: 'A comma-separated list of attributes to include in the response. This can be used to request fields that are not normally returned in a standard response.',
},
{
displayName: 'Notify',
name: 'notify',
type: 'boolean',
default: false,
description: 'Whether if users should receive email notification for the action performed.',
},
],
},
/* -------------------------------------------------------------------------- */
/* file:upload */
/* -------------------------------------------------------------------------- */

View file

@ -35,6 +35,16 @@ export const folderOperations = [
value: 'search',
description: 'Search files',
},
{
name: 'Share',
value: 'share',
description: 'Share a folder',
},
{
name: 'Update',
value: 'update',
description: 'Update folder',
},
],
default: 'create',
description: 'The operation to perform.',
@ -147,6 +157,7 @@ export const folderFields = [
default: '',
description: 'Folder ID',
},
/* -------------------------------------------------------------------------- */
/* folder:delete */
/* -------------------------------------------------------------------------- */
@ -441,4 +452,417 @@ export const folderFields = [
},
],
},
/* -------------------------------------------------------------------------- */
/* folder:share */
/* -------------------------------------------------------------------------- */
{
displayName: 'Folder ID',
name: 'folderId',
type: 'string',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'folder',
],
},
},
default: '',
description: 'The ID of the folder to share.',
},
{
displayName: 'Accessible By',
name: 'accessibleBy',
type: 'options',
options: [
{
name: 'User',
value: 'user',
},
{
name: 'Group',
value: 'group',
},
],
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'folder',
],
},
},
default: 'user',
description: 'The type of object the file will be shared with.',
},
{
displayName: 'Use Email',
name: 'useEmail',
type: 'boolean',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'folder',
],
accessibleBy: [
'user',
],
},
},
default: true,
description: 'Whether identify the user by email or ID.',
},
{
displayName: 'Email',
name: 'email',
type: 'string',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'folder',
],
accessibleBy: [
'user',
],
useEmail: [
true,
],
},
},
default: '',
description: `The user's email address to share the folder with.`,
},
{
displayName: 'User ID',
name: 'userId',
type: 'string',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'folder',
],
accessibleBy: [
'user',
],
useEmail: [
false,
],
},
},
default: '',
description: `The user's ID to share the folder with.`,
},
{
displayName: 'Group ID',
name: 'groupId',
type: 'string',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'folder',
],
accessibleBy: [
'group',
],
},
},
default: '',
description: `The group's ID to share the folder with.`,
},
{
displayName: 'Role',
name: 'role',
type: 'options',
options: [
{
name: 'Co-Owner',
value: 'coOwner',
description: 'A Co-owner has all of functional read/write access that an editor does',
},
{
name: 'Editor',
value: 'editor',
description: 'An editor has full read/write access to a folder or file',
},
{
name: 'Previewer',
value: 'previewer',
description: 'A previewer has limited read access',
},
{
name: 'Previewer Uploader',
value: 'previewerUploader',
description: 'This access level is a combination of Previewer and Uploader',
},
{
name: 'Uploader',
value: 'uploader',
description: 'An uploader has limited write access',
},
{
name: 'Viewer',
value: 'viewer',
description: 'A viewer has read access to a folder or file',
},
{
name: 'Viewer Uploader',
value: 'viewerUploader',
description: 'This access level is a combination of Viewer and Uploader',
},
],
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'folder',
],
},
},
default: 'editor',
description: 'The level of access granted.',
},
{
displayName: 'Options',
name: 'options',
type: 'collection',
placeholder: 'Add Option',
displayOptions: {
show: {
operation: [
'share',
],
resource: [
'folder',
],
},
},
default: {},
options: [
{
displayName: 'Can View Path',
name: 'can_view_path',
type: 'boolean',
default: false,
description: `Whether the invited users can see the entire parent path to the associated folder.</br>
The user will not gain privileges in any parent folder and therefore cannot see content the user is not collaborated on.`,
},
{
displayName: 'Expires At',
name: 'expires_at',
type: 'dateTime',
default: '',
description: 'Set the expiration date for the collaboration. At this date, the collaboration will be automatically removed from the item.',
},
{
displayName: 'Fields',
name: 'fields',
type: 'string',
default: '',
description: 'A comma-separated list of attributes to include in the response. This can be used to request fields that are not normally returned in a standard response.',
},
{
displayName: 'Notify',
name: 'notify',
type: 'boolean',
default: false,
description: 'Whether if users should receive email notification for the action performed.',
},
],
},
/* -------------------------------------------------------------------------- */
/* folder:update */
/* -------------------------------------------------------------------------- */
{
displayName: 'Folder ID',
name: 'folderId',
required: true,
type: 'string',
displayOptions: {
show: {
operation: [
'update',
],
resource: [
'folder',
],
},
},
default: '',
description: 'Folder ID',
},
{
displayName: 'Update Fields',
name: 'updateFields',
type: 'collection',
displayOptions: {
show: {
operation: [
'update',
],
resource: [
'folder',
],
},
},
default: {},
placeholder: 'Add Field',
options: [
{
displayName: 'Can Non-Owners Invite',
name: 'can_non_owners_invite',
type: 'boolean',
default: false,
description: 'Specifies if users who are not the owner of the folder can invite new collaborators to the folder.',
},
{
displayName: 'Can Non-Owners View Colaborators',
name: 'can_non_owners_view_collaborators',
type: 'boolean',
default: false,
description: 'Restricts collaborators who are not the owner of this folder from viewing other collaborations on this folder.',
},
{
displayName: 'Description',
name: 'description',
type: 'string',
default: '',
description: 'The optional description of this folder.',
},
{
displayName: 'Fields',
name: 'fields',
type: 'string',
default: '',
description: 'A comma-separated list of attributes to include in the response. This can be used to request fields that are not normally returned in a standard response.',
},
{
displayName: 'Is Collaboration Restricted To Enterprise',
name: 'is_collaboration_restricted_to_enterprise',
type: 'boolean',
default: false,
description: 'Specifies if new invites to this folder are restricted to users within the enterprise. This does not affect existing collaborations.',
},
{
displayName: 'Name',
name: 'name',
type: 'string',
default: '',
description: 'The optional new name for this folder.',
},
{
displayName: 'Parent ID',
name: 'parentId',
type: 'string',
default: '',
description: 'The parent folder for this folder. Use this to move the folder or to restore it out of the trash.',
},
{
displayName: 'Shared Link',
name: 'shared_link',
type: 'collection',
typeOptions: {
multipleValues: false,
},
description: 'Share link information.',
placeholder: 'Add Shared Link Config',
default: {},
options: [
{
displayName: 'Access',
name: 'access',
type: 'options',
options: [
{
name: 'Collaborators',
value: 'collaborators',
description: 'Only those who have been invited to the folder',
},
{
name: 'Company',
value: 'company',
description: 'only people within the company',
},
{
name: 'Open',
value: 'open',
description: 'Anyone with the link',
},
],
default: 'open',
},
{
displayName: 'Password',
name: 'password',
type: 'string',
displayOptions: {
show: {
access: [
'open',
],
},
},
default: '',
description: 'The password required to access the shared link. Set the password to null to remove it.',
},
{
displayName: 'Permissions',
name: 'permissions',
type: 'collection',
placeholder: 'Add Permition',
default: {},
options: [
{
displayName: 'Can Download',
name: 'can_download',
type: 'boolean',
default: false,
description: 'If the shared link allows for downloading of files.',
},
{
displayName: 'Unshared At',
name: 'unshared_at',
type: 'dateTime',
default: '',
description: 'The timestamp at which this shared link will expire.',
},
{
displayName: 'Vanity Name',
name: 'vanity_name',
type: 'string',
default: '',
description: 'Defines a custom vanity name to use in the shared link URL, for example https://app.box.com/v/my-shared-link.',
},
],
},
{
displayName: 'Tags',
name: 'tags',
type: 'string',
default: '',
description: 'The tags for this item. These tags are shown in the Box web app and mobile apps next to an item.',
},
],
},
],
},
] as INodeProperties[];

View file

@ -20,7 +20,7 @@ export class CalendlyTrigger implements INodeType {
icon: 'file:calendly.svg',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when Calendly events occur.',
description: 'Starts the workflow when Calendly events occur',
defaults: {
name: 'Calendly Trigger',
color: '#374252',

View file

@ -17,7 +17,7 @@ export class ChargebeeTrigger implements INodeType {
icon: 'file:chargebee.png',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when Chargebee events occur.',
description: 'Starts the workflow when Chargebee events occur',
defaults: {
name: 'Chargebee Trigger',
color: '#559922',

View file

@ -26,7 +26,7 @@ export class ClockifyTrigger implements INodeType {
name: 'clockifyTrigger',
group: [ 'trigger' ],
version: 1,
description: 'Watches Clockify For Events',
description: 'Listens to Clockify events',
defaults: {
name: 'Clockify Trigger',
color: '#000000',

View file

@ -48,7 +48,7 @@ export class ConvertKit implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume ConvertKit API.',
description: 'Consume ConvertKit API',
defaults: {
name: 'ConvertKit',
color: '#fb6970',

View file

@ -25,7 +25,7 @@ export class CrateDb implements INodeType {
icon: 'file:cratedb.png',
group: ['input'],
version: 1,
description: 'Add and update data in CrateDB.',
description: 'Add and update data in CrateDB',
defaults: {
name: 'CrateDB',
color: '#47889f',

View file

@ -31,7 +31,7 @@ export class CustomerIoTrigger implements INodeType {
group: ['trigger'],
icon: 'file:customerio.svg',
version: 1,
description: 'Starts the workflow on a Customer.io update. (Beta)',
description: 'Starts the workflow on a Customer.io update (Beta)',
defaults: {
name: 'Customer.io Trigger',
color: '#ffcd00',

View file

@ -55,7 +55,7 @@ export class Discourse implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Discourse API.',
description: 'Consume Discourse API',
defaults: {
name: 'Discourse',
color: '#000000',

View file

@ -35,7 +35,7 @@ export class EmailReadImap implements INodeType {
icon: 'fa:inbox',
group: ['trigger'],
version: 1,
description: 'Triggers the workflow when a new email gets received',
description: 'Triggers the workflow when a new email is received',
defaults: {
name: 'IMAP Email',
color: '#44AA22',

View file

@ -51,7 +51,7 @@ export class ExecuteCommand implements INodeType {
icon: 'fa:terminal',
group: ['transform'],
version: 1,
description: 'Executes a command on the host.',
description: 'Executes a command on the host',
defaults: {
name: 'Execute Command',
color: '#886644',

View file

@ -33,7 +33,7 @@ export class FacebookTrigger implements INodeType {
group: ['trigger'],
version: 1,
subtitle: '={{$parameter["appId"] +"/"+ $parameter["object"]}}',
description: 'Starts the workflow when a Facebook events occurs.',
description: 'Starts the workflow when Facebook events occur',
defaults: {
name: 'Facebook Trigger',
color: '#3B5998',

View file

@ -30,7 +30,7 @@ export class FileMaker implements INodeType {
icon: 'file:filemaker.png',
group: ['input'],
version: 1,
description: 'Retrieve data from FileMaker data API.',
description: 'Retrieve data from the FileMaker data API',
defaults: {
name: 'FileMaker',
color: '#665533',

View file

@ -45,7 +45,7 @@ export class Ftp implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["protocol"] + ": " + $parameter["operation"]}}',
description: 'Transfers files via FTP or SFTP.',
description: 'Transfers files via FTP or SFTP',
defaults: {
name: 'FTP',
color: '#303050',

View file

@ -15,7 +15,7 @@ export class Function implements INodeType {
icon: 'fa:code',
group: ['transform'],
version: 1,
description: 'Run custom function code which gets executed once and allows to add, remove, change and replace items.',
description: 'Run custom function code which gets executed once and allows you to add, remove, change and replace items',
defaults: {
name: 'Function',
color: '#FF9922',

View file

@ -17,7 +17,7 @@ export class FunctionItem implements INodeType {
icon: 'fa:code',
group: ['transform'],
version: 1,
description: 'Run custom function code which gets executed once per item.',
description: 'Run custom function code which gets executed once per item',
defaults: {
name: 'FunctionItem',
color: '#ddbb33',

View file

@ -31,7 +31,7 @@ export class GetResponse implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume GetResponse API.',
description: 'Consume GetResponse API',
defaults: {
name: 'GetResponse',
color: '#00afec',

View file

@ -26,7 +26,7 @@ export class GetResponseTrigger implements INodeType {
icon: 'file:getResponse.png',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when GetResponse events occur.',
description: 'Starts the workflow when GetResponse events occur',
defaults: {
name: 'GetResponse Trigger',
color: '#00afec',

View file

@ -33,7 +33,7 @@ export class Ghost implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Ghost API.',
description: 'Consume Ghost API',
defaults: {
name: 'Ghost',
color: '#15212a',

View file

@ -28,7 +28,7 @@ export class Github implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume GitHub API.',
description: 'Consume GitHub API',
defaults: {
name: 'GitHub',
color: '#000000',

View file

@ -25,7 +25,7 @@ export class GithubTrigger implements INodeType {
group: ['trigger'],
version: 1,
subtitle: '={{$parameter["owner"] + "/" + $parameter["repository"] + ": " + $parameter["events"].join(", ")}}',
description: 'Starts the workflow when a Github events occurs.',
description: 'Starts the workflow when Github events occur',
defaults: {
name: 'Github Trigger',
color: '#000000',

View file

@ -23,7 +23,7 @@ export class Gitlab implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Retrieve data from GitLab API.',
description: 'Retrieve data from GitLab API',
defaults: {
name: 'Gitlab',
color: '#FC6D27',

View file

@ -24,7 +24,7 @@ export class GitlabTrigger implements INodeType {
group: ['trigger'],
version: 1,
subtitle: '={{$parameter["owner"] + "/" + $parameter["repository"] + ": " + $parameter["events"].join(", ")}}',
description: 'Starts the workflow when a GitLab event occurs.',
description: 'Starts the workflow when GitLab events occur',
defaults: {
name: 'Gitlab Trigger',
color: '#FC6D27',

View file

@ -32,7 +32,7 @@ export class GoogleBigQuery implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Google BigQuery API.',
description: 'Consume Google BigQuery API',
defaults: {
name: 'Google BigQuery',
color: '#3E87E4',

View file

@ -44,7 +44,7 @@ export class GoogleCalendar implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Google Calendar API.',
description: 'Consume Google Calendar API',
defaults: {
name: 'Google Calendar',
color: '#3E87E4',

View file

@ -33,7 +33,7 @@ export class GoogleContacts implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Google Contacts API.',
description: 'Consume Google Contacts API',
defaults: {
name: 'Google Contacts',
color: '#1a73e8',

View file

@ -24,7 +24,7 @@
// group: ['trigger'],
// version: 1,
// subtitle: '={{$parameter["owner"] + "/" + $parameter["repository"] + ": " + $parameter["events"].join(", ")}}',
// description: 'Starts the workflow when a file on Google Drive got changed.',
// description: 'Starts the workflow when a file on Google Drive is changed',
// defaults: {
// name: 'Google Drive Trigger',
// color: '#3f87f2',

View file

@ -486,8 +486,9 @@ export class GoogleSheet {
inputData.forEach((item) => {
rowData = [];
keyColumnOrder.forEach((key) => {
if (item.hasOwnProperty(key) && item[key]) {
rowData.push(item[key]!.toString());
const data = item[key];
if (item.hasOwnProperty(key) && data !== null && typeof data !== 'undefined') {
rowData.push(data.toString());
} else {
rowData.push('');
}

View file

@ -29,7 +29,7 @@ export class GoogleTasks implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Google Tasks API.',
description: 'Consume Google Tasks API',
defaults: {
name: 'Google Tasks',
color: '#3E87E4',

View file

@ -55,7 +55,7 @@ export class YouTube implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume YouTube API.',
description: 'Consume YouTube API',
defaults: {
name: 'YouTube',
color: '#FF0000',

View file

@ -22,7 +22,7 @@ export class Gotify implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Gotify API.',
description: 'Consume Gotify API',
defaults: {
name: 'Gotify',
color: '#71c8ec',

View file

@ -64,7 +64,7 @@ export class HelpScout implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Help Scout API.',
description: 'Consume HelpScout API',
defaults: {
name: 'HelpScout',
color: '#1392ee',

View file

@ -26,7 +26,7 @@ export class HelpScoutTrigger implements INodeType {
icon: 'file:helpScout.svg',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when HelpScout events occur.',
description: 'Starts the workflow when HelpScout events occur',
defaults: {
name: 'HelpScout Trigger',
color: '#1392ee',

View file

@ -32,7 +32,7 @@ export class HttpRequest implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["requestMethod"] + ": " + $parameter["url"]}}',
description: 'Makes a HTTP request and returns the received data',
description: 'Makes an HTTP request and returns the response data',
defaults: {
name: 'HTTP Request',
color: '#2200DD',

View file

@ -37,7 +37,7 @@ export class HubspotTrigger implements INodeType {
icon: 'file:hubspot.svg',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when HubSpot events occur.',
description: 'Starts the workflow when HubSpot events occur',
defaults: {
name: 'Hubspot Trigger',
color: '#ff7f64',

View file

@ -16,7 +16,7 @@ export class If implements INodeType {
icon: 'fa:map-signs',
group: ['transform'],
version: 1,
description: 'Splits a stream depending on defined compare operations.',
description: 'Splits a stream based on comparisons',
defaults: {
name: 'IF',
color: '#408000',

View file

@ -47,7 +47,7 @@ export class Intercom implements INodeType {
group: ['output'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume intercom API',
description: 'Consume Intercom API',
defaults: {
name: 'Intercom',
color: '#0575f3',

View file

@ -20,7 +20,7 @@ export class InvoiceNinjaTrigger implements INodeType {
icon: 'file:invoiceNinja.svg',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when Invoice Ninja events occur.',
description: 'Starts the workflow when Invoice Ninja events occur',
defaults: {
name: 'Invoice Ninja Trigger',
color: '#000000',

View file

@ -42,7 +42,7 @@ export class Iterable implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Iterable API.',
description: 'Consume Iterable API',
defaults: {
name: 'Iterable',
color: '#725ed8',

View file

@ -26,7 +26,7 @@ export class JiraTrigger implements INodeType {
icon: 'file:jira.svg',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when Jira events occurs.',
description: 'Starts the workflow when Jira events occur',
defaults: {
name: 'Jira Trigger',
color: '#4185f7',

View file

@ -110,7 +110,7 @@ export class Keap implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Keap API.',
description: 'Consume Keap API',
defaults: {
name: 'Keap',
color: '#79af53',

View file

@ -28,7 +28,7 @@ export class KeapTrigger implements INodeType {
group: ['trigger'],
version: 1,
subtitle: '={{$parameter["eventId"]}}',
description: 'Starts the workflow when Infusionsoft events occur.',
description: 'Starts the workflow when Infusionsoft events occur',
defaults: {
name: 'Keap Trigger',
color: '#79af53',

View file

@ -29,7 +29,7 @@ export class Line implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Line API.',
description: 'Consume Line API',
defaults: {
name: 'Line',
color: '#00b900',

View file

@ -24,7 +24,7 @@ export class LinkedIn implements INodeType {
icon: 'file:linkedin.png',
group: ['input'],
version: 1,
description: 'Consume LinkedIn Api',
description: 'Consume LinkedIn API',
defaults: {
name: 'LinkedIn',
color: '#0075b4',

View file

@ -29,7 +29,7 @@ export class MailerLite implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Mailer Lite API.',
description: 'Consume Mailer Lite API',
defaults: {
name: 'MailerLite',
color: '#58be72',

View file

@ -21,7 +21,7 @@ export class MailerLiteTrigger implements INodeType {
icon: 'file:mailerLite.png',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when a MailerLite events occurs.',
description: 'Starts the workflow when MailerLite events occur',
defaults: {
name: 'MailerLite Trigger',
color: '#58be72',

View file

@ -19,7 +19,7 @@ export class Mailgun implements INodeType {
icon: 'file:mailgun.svg',
group: ['output'],
version: 1,
description: 'Sends an Email via Mailgun',
description: 'Sends an email via Mailgun',
defaults: {
name: 'Mailgun',
color: '#c02428',

View file

@ -105,7 +105,7 @@ export class Mandrill implements INodeType {
group: ['output'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume mandrill API',
description: 'Consume Mandrill API',
defaults: {
name: 'Mandrill',
color: '#c02428',

View file

@ -17,7 +17,7 @@ export class Merge implements INodeType {
group: ['transform'],
version: 1,
subtitle: '={{$parameter["mode"]}}',
description: 'Merges data of multiple streams once data of both is available',
description: 'Merges data of multiple streams once data from both is available',
defaults: {
name: 'Merge',
color: '#00bbcc',

View file

@ -22,7 +22,7 @@ export class MessageBird implements INodeType {
group: ['output'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Sending SMS',
description: 'Sends SMS via MessageBird',
defaults: {
name: 'MessageBird',
color: '#2481d7',

View file

@ -42,7 +42,7 @@ export class MicrosoftExcel implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Microsoft Excel API.',
description: 'Consume Microsoft Excel API',
defaults: {
name: 'Microsoft Excel',
color: '#1c6d40',

View file

@ -36,7 +36,7 @@ export class MicrosoftOneDrive implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Microsoft OneDrive API.',
description: 'Consume Microsoft OneDrive API',
defaults: {
name: 'Microsoft OneDrive',
color: '#1d4bab',

View file

@ -121,7 +121,7 @@ export function extractUpdateSet(item: IDataObject, columns: string[]): string {
return columns
.map(
column =>
`${column} = ${
`"${column}" = ${
typeof item[column] === 'string' ? `'${item[column]}'` : item[column]
}`,
)
@ -153,3 +153,9 @@ export function extractDeleteValues(items: IDataObject[], key: string): string {
.map(item => (typeof item[key] === 'string' ? `'${item[key]}'` : item[key]))
.join(',')})`;
}
export function formatColumns(columns: string) {
return columns.split(',')
.map((column) => (`"${column.trim()}"`)).join(',');
}

View file

@ -29,6 +29,7 @@ import {
extractUpdateCondition,
extractUpdateSet,
extractValues,
formatColumns,
} from './GenericFunctions';
export class MicrosoftSql implements INodeType {
@ -38,7 +39,7 @@ export class MicrosoftSql implements INodeType {
icon: 'file:mssql.svg',
group: ['input'],
version: 1,
description: 'Gets, add and update data in Microsoft SQL.',
description: 'Get, add and update data in Microsoft SQL',
defaults: {
name: 'Microsoft SQL',
color: '#bcbcbd',
@ -226,7 +227,8 @@ export class MicrosoftSql implements INodeType {
user: credentials.user as string,
password: credentials.password as string,
domain: credentials.domain ? (credentials.domain as string) : undefined,
connectTimeout: credentials.connectTimeout as number,
connectionTimeout: credentials.connectTimeout as number,
requestTimeout: credentials.requestTimeout as number,
options: {
encrypt: credentials.tls as boolean,
enableArithAbort: false,
@ -281,7 +283,7 @@ export class MicrosoftSql implements INodeType {
return pool
.request()
.query(
`INSERT INTO ${table}(${columnString}) VALUES ${values};`,
`INSERT INTO ${table}(${formatColumns(columnString)}) VALUES ${values};`,
);
});
},
@ -364,7 +366,7 @@ export class MicrosoftSql implements INodeType {
return pool
.request()
.query(
`DELETE FROM ${table} WHERE ${deleteKey} IN ${extractDeleteValues(
`DELETE FROM ${table} WHERE "${deleteKey}" IN ${extractDeleteValues(
deleteValues,
deleteKey,
)};`,

View file

@ -26,7 +26,7 @@ export class Mindee implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Mindee API.',
description: 'Consume Mindee API',
defaults: {
name: 'Mindee',
color: '#e94950',

View file

@ -17,7 +17,7 @@ export class Mocean implements INodeType {
icon: 'file:mocean.png',
group: ['transform'],
version: 1,
description: 'Send SMS & voice messages via Mocean (https://moceanapi.com)',
description: 'Send SMS and voice messages via Mocean',
defaults: {
name: 'Mocean',
color: '#772244',

View file

@ -11,7 +11,7 @@ export const nodeDescription: INodeTypeDescription = {
icon: 'file:mongodb.svg',
group: ['input'],
version: 1,
description: 'Find, insert and update documents in MongoDB.',
description: 'Find, insert and update documents in MongoDB',
defaults: {
name: 'MongoDB',
color: '#13AA52',

View file

@ -49,7 +49,7 @@ export class MoveBinaryData implements INodeType {
group: ['transform'],
version: 1,
subtitle: '={{$parameter["mode"]==="binaryToJson" ? "Binary to JSON" : "JSON to Binary"}}',
description: 'Move data between binary and JSON properties.',
description: 'Move data between binary and JSON properties',
defaults: {
name: 'Move Binary Data',
color: '#7722CC',

View file

@ -20,7 +20,7 @@ export class Msg91 implements INodeType {
group: ['transform'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Send Transactional SMS',
description: 'Sends transactional SMS via MSG91',
defaults: {
name: 'Msg91',
color: '#0000ff',

View file

@ -18,7 +18,7 @@ export class MySql implements INodeType {
icon: 'file:mysql.svg',
group: ['input'],
version: 1,
description: 'Get, add and update data in MySQL.',
description: 'Get, add and update data in MySQL',
defaults: {
name: 'MySQL',
color: '#4279a2',

View file

@ -147,7 +147,7 @@ export class N8nTrainingCustomerDatastore implements INodeType {
responseData = data;
} else {
const limit = this.getNodeParameter('limit', i) as number;
responseData = data.splice(0, limit);
responseData = data.slice(0, limit);
}
}

View file

@ -25,7 +25,7 @@ export class Nasa implements INodeType {
group: ['transform'],
version: 1,
subtitle: '={{$parameter["operation"] + ":" + $parameter["resource"]}}',
description: 'Retrieve data the from NASA API',
description: 'Retrieve data from the NASA API',
defaults: {
name: 'NASA',
color: '#0B3D91',

View file

@ -19,7 +19,7 @@ export class OpenWeatherMap implements INodeType {
icon: 'fa:sun',
group: ['input'],
version: 1,
description: 'Gets current and future weather information.',
description: 'Gets current and future weather information',
defaults: {
name: 'OpenWeatherMap',
color: '#554455',

View file

@ -33,7 +33,7 @@ export class Phantombuster implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Phantombuster API.',
description: 'Consume Phantombuster API',
defaults: {
name: 'Phantombuster',
color: '#62bfd7',

View file

@ -29,7 +29,7 @@ export class PhilipsHue implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Philips Hue API.',
description: 'Consume Philips Hue API',
defaults: {
name: 'Philips Hue',
color: '#063c9a',

View file

@ -43,7 +43,7 @@ export class PipedriveTrigger implements INodeType {
icon: 'file:pipedrive.svg',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when Pipedrive events occur.',
description: 'Starts the workflow when Pipedrive events occur',
defaults: {
name: 'Pipedrive Trigger',
color: '#559922',

View file

@ -47,7 +47,7 @@ export class PostHog implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume PostHog API.',
description: 'Consume PostHog API',
defaults: {
name: 'PostHog',
color: '#000000',

View file

@ -18,7 +18,7 @@ export class Postgres implements INodeType {
icon: 'file:postgres.svg',
group: ['input'],
version: 1,
description: 'Gets, add and update data in Postgres.',
description: 'Get, add and update data in Postgres',
defaults: {
name: 'Postgres',
color: '#336791',

View file

@ -22,7 +22,7 @@ export class PostmarkTrigger implements INodeType {
icon: 'file:postmark.png',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when Postmark events occur.',
description: 'Starts the workflow when Postmark events occur',
defaults: {
name: 'Postmark Trigger',
color: '#fedd00',

View file

@ -29,7 +29,7 @@ export class Pushbullet implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Pushbullet API.',
description: 'Consume Pushbullet API',
defaults: {
name: 'Pushbullet',
color: '#457854',

View file

@ -23,7 +23,7 @@ export class Pushcut implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Pushcut API.',
description: 'Consume Pushcut API',
defaults: {
name: 'Pushcut',
color: '#1f2957',

View file

@ -21,7 +21,7 @@ export class PushcutTrigger implements INodeType {
icon: 'file:pushcut.png',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when a Pushcut events occurs.',
description: 'Starts the workflow when Pushcut events occur',
defaults: {
name: 'Pushcut Trigger',
color: '#1f2957',

View file

@ -27,7 +27,7 @@ export class Pushover implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Consume Pushover API.',
description: 'Consume Pushover API',
defaults: {
name: 'Pushover',
color: '#4b9cea',

View file

@ -21,7 +21,7 @@ export class QuestDb implements INodeType {
icon: 'file:questdb.png',
group: ['input'],
version: 1,
description: 'Gets, add and update data in QuestDB.',
description: 'Get, add and update data in QuestDB',
defaults: {
name: 'QuestDB',
color: '#2C4A79',

View file

@ -46,7 +46,7 @@ export class QuickBase implements INodeType {
group: ['input'],
version: 1,
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Integrate with the Quick Base RESTful API.',
description: 'Integrate with the Quick Base RESTful API',
defaults: {
name: 'Quick Base',
color: '#73489d',

View file

@ -20,7 +20,7 @@ export class Redis implements INodeType {
icon: 'file:redis.svg',
group: ['input'],
version: 1,
description: 'Get, send and update data in Redis.',
description: 'Get, send and update data in Redis',
defaults: {
name: 'Redis',
color: '#0033AA',

View file

@ -23,7 +23,7 @@ export class RenameKeys implements INodeType {
icon: 'fa:edit',
group: ['transform'],
version: 1,
description: 'Renames keys.',
description: 'Renames keys',
defaults: {
name: 'Rename Keys',
color: '#772244',

Some files were not shown because too many files have changed in this diff Show more