mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-15 00:54:06 -08:00
Merge master
This commit is contained in:
commit
58fc6ee338
|
@ -13,7 +13,9 @@
|
|||
"start:windows": "cd packages/cli/bin && n8n",
|
||||
"test": "lerna run test",
|
||||
"tslint": "lerna exec npm run tslint",
|
||||
"watch": "lerna run --parallel watch"
|
||||
"watch": "lerna run --parallel watch",
|
||||
"webhook": "./packages/cli/bin/n8n webhook",
|
||||
"worker": "./packages/cli/bin/n8n worker"
|
||||
},
|
||||
"devDependencies": {
|
||||
"lerna": "^3.13.1",
|
||||
|
|
|
@ -2,6 +2,15 @@
|
|||
|
||||
This list shows all the versions which include breaking changes and how to upgrade.
|
||||
|
||||
## 0.118.0
|
||||
|
||||
### What changed?
|
||||
In the Postgres, CrateDB, QuestDB and TimescaleDB nodes the `Execute Query` operation returns the result from all queries executed instead of just one of the results.
|
||||
|
||||
### When is action necessary?
|
||||
|
||||
If you use any of the above mentioned nodes with the `Execute Query` operation and the result is relevant to you, you are encouraged to revisit your logic. The node output may now contain more information than before. This change was made so that the behavior is more consistent across n8n where input with multiple rows should yield results acccording all input data instead of only one. Please note: n8n was already running multiple queries based on input. Only the output was changed.
|
||||
|
||||
## 0.117.0
|
||||
|
||||
### What changed?
|
||||
|
|
|
@ -24,8 +24,8 @@ if (process.argv.length === 2) {
|
|||
|
||||
var nodeVersion = process.versions.node.split('.');
|
||||
|
||||
if (parseInt(nodeVersion[0], 10) < 12 || parseInt(nodeVersion[0], 10) === 12 && parseInt(nodeVersion[1], 10) < 9) {
|
||||
console.log(`\nYour Node.js version (${process.versions.node}) is too old to run n8n.\nPlease update to version 12.9 or later!\n`);
|
||||
if (parseInt(nodeVersion[0], 10) < 14) {
|
||||
console.log(`\nYour Node.js version (${process.versions.node}) is too old to run n8n.\nPlease update to version 14 or later!\n`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@ import {
|
|||
CredentialTypes,
|
||||
Db,
|
||||
ExternalHooks,
|
||||
GenericHelpers,
|
||||
IWorkflowBase,
|
||||
IWorkflowExecutionDataProcess,
|
||||
LoadNodesAndCredentials,
|
||||
|
@ -23,6 +22,13 @@ import {
|
|||
WorkflowRunner,
|
||||
} from '../src';
|
||||
|
||||
import {
|
||||
getLogger,
|
||||
} from '../src/Logger';
|
||||
|
||||
import {
|
||||
LoggerProxy,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class Execute extends Command {
|
||||
static description = '\nExecutes a given workflow';
|
||||
|
@ -47,6 +53,9 @@ export class Execute extends Command {
|
|||
|
||||
|
||||
async run() {
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
|
||||
const { flags } = this.parse(Execute);
|
||||
|
||||
// Start directly with the init of the database to improve startup time
|
||||
|
@ -57,12 +66,12 @@ export class Execute extends Command {
|
|||
const loadNodesAndCredentialsPromise = loadNodesAndCredentials.init();
|
||||
|
||||
if (!flags.id && !flags.file) {
|
||||
GenericHelpers.logOutput(`Either option "--id" or "--file" have to be set!`);
|
||||
console.info(`Either option "--id" or "--file" have to be set!`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (flags.id && flags.file) {
|
||||
GenericHelpers.logOutput(`Either "id" or "file" can be set never both!`);
|
||||
console.info(`Either "id" or "file" can be set never both!`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -74,7 +83,7 @@ export class Execute extends Command {
|
|||
workflowData = JSON.parse(await fs.readFile(flags.file, 'utf8'));
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
GenericHelpers.logOutput(`The file "${flags.file}" could not be found.`);
|
||||
console.info(`The file "${flags.file}" could not be found.`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -84,7 +93,7 @@ export class Execute extends Command {
|
|||
// Do a basic check if the data in the file looks right
|
||||
// TODO: Later check with the help of TypeScript data if it is valid or not
|
||||
if (workflowData === undefined || workflowData.nodes === undefined || workflowData.connections === undefined) {
|
||||
GenericHelpers.logOutput(`The file "${flags.file}" does not contain valid workflow data.`);
|
||||
console.info(`The file "${flags.file}" does not contain valid workflow data.`);
|
||||
return;
|
||||
}
|
||||
workflowId = workflowData.id!.toString();
|
||||
|
@ -98,8 +107,8 @@ export class Execute extends Command {
|
|||
workflowId = flags.id;
|
||||
workflowData = await Db.collections!.Workflow!.findOne(workflowId);
|
||||
if (workflowData === undefined) {
|
||||
GenericHelpers.logOutput(`The workflow with the id "${workflowId}" does not exist.`);
|
||||
return;
|
||||
console.info(`The workflow with the id "${workflowId}" does not exist.`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -141,7 +150,7 @@ export class Execute extends Command {
|
|||
if (startNode === undefined) {
|
||||
// If the workflow does not contain a start-node we can not know what
|
||||
// should be executed and with which data to start.
|
||||
GenericHelpers.logOutput(`The workflow does not contain a "Start" node. So it can not be executed.`);
|
||||
console.info(`The workflow does not contain a "Start" node. So it can not be executed.`);
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
|
@ -166,9 +175,10 @@ export class Execute extends Command {
|
|||
}
|
||||
|
||||
if (data.data.resultData.error) {
|
||||
this.log('Execution was NOT successfull:');
|
||||
this.log('====================================');
|
||||
this.log(JSON.stringify(data, null, 2));
|
||||
console.info('Execution was NOT successful. See log message for details.');
|
||||
logger.info('Execution error:');
|
||||
logger.info('====================================');
|
||||
logger.info(JSON.stringify(data, null, 2));
|
||||
|
||||
const { error } = data.data.resultData;
|
||||
throw {
|
||||
|
@ -182,10 +192,11 @@ export class Execute extends Command {
|
|||
}
|
||||
this.log(JSON.stringify(data, null, 2));
|
||||
} catch (e) {
|
||||
console.error('\nGOT ERROR');
|
||||
console.log('====================================');
|
||||
console.error(e.message);
|
||||
console.error(e.stack);
|
||||
console.error('Error executing workflow. See log messages for details.');
|
||||
logger.error('\nExecution error:');
|
||||
logger.info('====================================');
|
||||
logger.error(e.message);
|
||||
logger.error(e.stack);
|
||||
this.exit(1);
|
||||
}
|
||||
|
||||
|
|
|
@ -14,10 +14,17 @@ import {
|
|||
|
||||
import {
|
||||
Db,
|
||||
GenericHelpers,
|
||||
ICredentialsDecryptedDb,
|
||||
} from '../../src';
|
||||
|
||||
import {
|
||||
getLogger,
|
||||
} from '../../src/Logger';
|
||||
|
||||
import {
|
||||
LoggerProxy,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
|
@ -59,6 +66,9 @@ export class ExportCredentialsCommand extends Command {
|
|||
};
|
||||
|
||||
async run() {
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
|
||||
const { flags } = this.parse(ExportCredentialsCommand);
|
||||
|
||||
if (flags.backup) {
|
||||
|
@ -68,41 +78,42 @@ export class ExportCredentialsCommand extends Command {
|
|||
}
|
||||
|
||||
if (!flags.all && !flags.id) {
|
||||
GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`);
|
||||
console.info(`Either option "--all" or "--id" have to be set!`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (flags.all && flags.id) {
|
||||
GenericHelpers.logOutput(`You should either use "--all" or "--id" but never both!`);
|
||||
console.info(`You should either use "--all" or "--id" but never both!`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (flags.separate) {
|
||||
try {
|
||||
if (!flags.output) {
|
||||
GenericHelpers.logOutput(`You must inform an output directory via --output when using --separate`);
|
||||
console.info(`You must inform an output directory via --output when using --separate`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (fs.existsSync(flags.output)) {
|
||||
if (!fs.lstatSync(flags.output).isDirectory()) {
|
||||
GenericHelpers.logOutput(`The paramenter --output must be a directory`);
|
||||
console.info(`The paramenter --output must be a directory`);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
fs.mkdirSync(flags.output, { recursive: true });
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('\nFILESYSTEM ERROR');
|
||||
console.log('====================================');
|
||||
console.error(e.message);
|
||||
console.error(e.stack);
|
||||
console.error('Aborting execution as a filesystem error has been encountered while creating the output directory. See log messages for details.');
|
||||
logger.error('\nFILESYSTEM ERROR');
|
||||
logger.info('====================================');
|
||||
logger.error(e.message);
|
||||
logger.error(e.stack);
|
||||
this.exit(1);
|
||||
}
|
||||
} else if (flags.output) {
|
||||
if (fs.existsSync(flags.output)) {
|
||||
if (fs.lstatSync(flags.output).isDirectory()) {
|
||||
GenericHelpers.logOutput(`The paramenter --output must be a writeble file`);
|
||||
console.info(`The paramenter --output must be a writeble file`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -143,18 +154,21 @@ export class ExportCredentialsCommand extends Command {
|
|||
const filename = (flags.output!.endsWith(path.sep) ? flags.output! : flags.output + path.sep) + credentials[i].id + '.json';
|
||||
fs.writeFileSync(filename, fileContents);
|
||||
}
|
||||
console.log('Successfully exported', i, 'credentials.');
|
||||
console.info(`Successfully exported ${i} credentials.`);
|
||||
} else {
|
||||
const fileContents = JSON.stringify(credentials, null, flags.pretty ? 2 : undefined);
|
||||
if (flags.output) {
|
||||
fs.writeFileSync(flags.output!, fileContents);
|
||||
console.log('Successfully exported', credentials.length, 'credentials.');
|
||||
console.info(`Successfully exported ${credentials.length} credentials.`);
|
||||
} else {
|
||||
console.log(fileContents);
|
||||
console.info(fileContents);
|
||||
}
|
||||
}
|
||||
// Force exit as process won't exit using MySQL or Postgres.
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
this.error(error.message);
|
||||
console.error('Error exporting credentials. See log messages for details.');
|
||||
logger.error(error.message);
|
||||
this.exit(1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,9 +9,16 @@ import {
|
|||
|
||||
import {
|
||||
Db,
|
||||
GenericHelpers,
|
||||
} from '../../src';
|
||||
|
||||
import {
|
||||
getLogger,
|
||||
} from '../../src/Logger';
|
||||
|
||||
import {
|
||||
LoggerProxy,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
|
@ -49,6 +56,9 @@ export class ExportWorkflowsCommand extends Command {
|
|||
};
|
||||
|
||||
async run() {
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
|
||||
const { flags } = this.parse(ExportWorkflowsCommand);
|
||||
|
||||
if (flags.backup) {
|
||||
|
@ -58,41 +68,42 @@ export class ExportWorkflowsCommand extends Command {
|
|||
}
|
||||
|
||||
if (!flags.all && !flags.id) {
|
||||
GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`);
|
||||
console.info(`Either option "--all" or "--id" have to be set!`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (flags.all && flags.id) {
|
||||
GenericHelpers.logOutput(`You should either use "--all" or "--id" but never both!`);
|
||||
console.info(`You should either use "--all" or "--id" but never both!`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (flags.separate) {
|
||||
try {
|
||||
if (!flags.output) {
|
||||
GenericHelpers.logOutput(`You must inform an output directory via --output when using --separate`);
|
||||
console.info(`You must inform an output directory via --output when using --separate`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (fs.existsSync(flags.output)) {
|
||||
if (!fs.lstatSync(flags.output).isDirectory()) {
|
||||
GenericHelpers.logOutput(`The paramenter --output must be a directory`);
|
||||
console.info(`The paramenter --output must be a directory`);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
fs.mkdirSync(flags.output, { recursive: true });
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('\nFILESYSTEM ERROR');
|
||||
console.log('====================================');
|
||||
console.error(e.message);
|
||||
console.error(e.stack);
|
||||
console.error('Aborting execution as a filesystem error has been encountered while creating the output directory. See log messages for details.');
|
||||
logger.error('\nFILESYSTEM ERROR');
|
||||
logger.info('====================================');
|
||||
logger.error(e.message);
|
||||
logger.error(e.stack);
|
||||
this.exit(1);
|
||||
}
|
||||
} else if (flags.output) {
|
||||
if (fs.existsSync(flags.output)) {
|
||||
if (fs.lstatSync(flags.output).isDirectory()) {
|
||||
GenericHelpers.logOutput(`The paramenter --output must be a writeble file`);
|
||||
console.info(`The paramenter --output must be a writeble file`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -119,18 +130,21 @@ export class ExportWorkflowsCommand extends Command {
|
|||
const filename = (flags.output!.endsWith(path.sep) ? flags.output! : flags.output + path.sep) + workflows[i].id + '.json';
|
||||
fs.writeFileSync(filename, fileContents);
|
||||
}
|
||||
console.log('Successfully exported', i, 'workflows.');
|
||||
console.info(`Successfully exported ${i} workflows.`);
|
||||
} else {
|
||||
const fileContents = JSON.stringify(workflows, null, flags.pretty ? 2 : undefined);
|
||||
if (flags.output) {
|
||||
fs.writeFileSync(flags.output!, fileContents);
|
||||
console.log('Successfully exported', workflows.length, workflows.length === 1 ? 'workflow.' : 'workflows.');
|
||||
console.info(`Successfully exported ${workflows.length} ${workflows.length === 1 ? 'workflow.' : 'workflows.'}`);
|
||||
} else {
|
||||
console.log(fileContents);
|
||||
console.info(fileContents);
|
||||
}
|
||||
}
|
||||
// Force exit as process won't exit using MySQL or Postgres.
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
this.error(error.message);
|
||||
console.error('Error exporting workflows. See log messages for details.');
|
||||
logger.error(error.message);
|
||||
this.exit(1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,9 +10,16 @@ import {
|
|||
|
||||
import {
|
||||
Db,
|
||||
GenericHelpers,
|
||||
} from '../../src';
|
||||
|
||||
import {
|
||||
getLogger,
|
||||
} from '../../src/Logger';
|
||||
|
||||
import {
|
||||
LoggerProxy,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as glob from 'glob-promise';
|
||||
import * as path from 'path';
|
||||
|
@ -37,17 +44,20 @@ export class ImportCredentialsCommand extends Command {
|
|||
};
|
||||
|
||||
async run() {
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
|
||||
const { flags } = this.parse(ImportCredentialsCommand);
|
||||
|
||||
if (!flags.input) {
|
||||
GenericHelpers.logOutput(`An input file or directory with --input must be provided`);
|
||||
console.info(`An input file or directory with --input must be provided`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (flags.separate) {
|
||||
if (fs.existsSync(flags.input)) {
|
||||
if (!fs.lstatSync(flags.input).isDirectory()) {
|
||||
GenericHelpers.logOutput(`The paramenter --input must be a directory`);
|
||||
console.info(`The paramenter --input must be a directory`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -89,9 +99,11 @@ export class ImportCredentialsCommand extends Command {
|
|||
await Db.collections.Credentials!.save(fileContents[i]);
|
||||
}
|
||||
}
|
||||
console.log('Successfully imported', i, 'credentials.');
|
||||
console.info(`Successfully imported ${i} ${i === 1 ? 'credential.' : 'credentials.'}`);
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
this.error(error.message);
|
||||
console.error('An error occurred while exporting credentials. See log messages for details.');
|
||||
logger.error(error.message);
|
||||
this.exit(1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,9 +5,16 @@ import {
|
|||
|
||||
import {
|
||||
Db,
|
||||
GenericHelpers,
|
||||
} from '../../src';
|
||||
|
||||
import {
|
||||
getLogger,
|
||||
} from '../../src/Logger';
|
||||
|
||||
import {
|
||||
LoggerProxy,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as glob from 'glob-promise';
|
||||
import * as path from 'path';
|
||||
|
@ -32,17 +39,20 @@ export class ImportWorkflowsCommand extends Command {
|
|||
};
|
||||
|
||||
async run() {
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
|
||||
const { flags } = this.parse(ImportWorkflowsCommand);
|
||||
|
||||
if (!flags.input) {
|
||||
GenericHelpers.logOutput(`An input file or directory with --input must be provided`);
|
||||
console.info(`An input file or directory with --input must be provided`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (flags.separate) {
|
||||
if (fs.existsSync(flags.input)) {
|
||||
if (!fs.lstatSync(flags.input).isDirectory()) {
|
||||
GenericHelpers.logOutput(`The paramenter --input must be a directory`);
|
||||
console.info(`The paramenter --input must be a directory`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -69,9 +79,11 @@ export class ImportWorkflowsCommand extends Command {
|
|||
}
|
||||
}
|
||||
|
||||
console.log('Successfully imported', i, i === 1 ? 'workflow.' : 'workflows.');
|
||||
console.info(`Successfully imported ${i} ${i === 1 ? 'workflow.' : 'workflows.'}`);
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
this.error(error.message);
|
||||
console.error('An error occurred while exporting workflows. See log messages for details.');
|
||||
logger.error(error.message);
|
||||
this.exit(1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,11 +25,17 @@ import {
|
|||
} from '../src';
|
||||
import { IDataObject } from 'n8n-workflow';
|
||||
|
||||
import {
|
||||
getLogger,
|
||||
} from '../src/Logger';
|
||||
|
||||
import {
|
||||
LoggerProxy,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
let activeWorkflowRunner: ActiveWorkflowRunner.ActiveWorkflowRunner | undefined;
|
||||
let processExistCode = 0;
|
||||
|
||||
|
||||
export class Start extends Command {
|
||||
static description = 'Starts n8n. Makes Web-UI available and starts active workflows';
|
||||
|
||||
|
@ -71,7 +77,7 @@ export class Start extends Command {
|
|||
* get removed.
|
||||
*/
|
||||
static async stopProcess() {
|
||||
console.log(`\nStopping n8n...`);
|
||||
getLogger().info('\nStopping n8n...');
|
||||
|
||||
try {
|
||||
const externalHooks = ExternalHooks();
|
||||
|
@ -132,13 +138,18 @@ export class Start extends Command {
|
|||
// Wrap that the process does not close but we can still use async
|
||||
await (async () => {
|
||||
try {
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
logger.info('Initializing n8n process');
|
||||
|
||||
// Start directly with the init of the database to improve startup time
|
||||
const startDbInitPromise = Db.init().catch((error: Error) => {
|
||||
console.error(`There was an error initializing DB: ${error.message}`);
|
||||
logger.error(`There was an error initializing DB: "${error.message}"`);
|
||||
|
||||
processExistCode = 1;
|
||||
// @ts-ignore
|
||||
process.emit('SIGINT');
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Make sure the settings exist
|
||||
|
@ -184,7 +195,7 @@ export class Start extends Command {
|
|||
cumulativeTimeout += now - lastTimer;
|
||||
lastTimer = now;
|
||||
if (cumulativeTimeout > redisConnectionTimeoutLimit) {
|
||||
console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.');
|
||||
logger.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + ". Exiting process.");
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
@ -213,9 +224,9 @@ export class Start extends Command {
|
|||
|
||||
redis.on('error', (error) => {
|
||||
if (error.toString().includes('ECONNREFUSED') === true) {
|
||||
console.warn('Redis unavailable - trying to reconnect...');
|
||||
logger.warn('Redis unavailable - trying to reconnect...');
|
||||
} else {
|
||||
console.warn('Error with Redis: ', error);
|
||||
logger.warn('Error with Redis: ', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -11,6 +11,13 @@ import {
|
|||
GenericHelpers,
|
||||
} from '../../src';
|
||||
|
||||
import {
|
||||
getLogger,
|
||||
} from '../../src/Logger';
|
||||
|
||||
import {
|
||||
LoggerProxy,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class UpdateWorkflowCommand extends Command {
|
||||
static description = '\Update workflows';
|
||||
|
@ -34,25 +41,28 @@ export class UpdateWorkflowCommand extends Command {
|
|||
};
|
||||
|
||||
async run() {
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
|
||||
const { flags } = this.parse(UpdateWorkflowCommand);
|
||||
|
||||
if (!flags.all && !flags.id) {
|
||||
GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`);
|
||||
console.info(`Either option "--all" or "--id" have to be set!`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (flags.all && flags.id) {
|
||||
GenericHelpers.logOutput(`Either something else on top should be "--all" or "--id" can be set never both!`);
|
||||
console.info(`Either something else on top should be "--all" or "--id" can be set never both!`);
|
||||
return;
|
||||
}
|
||||
|
||||
const updateQuery: IDataObject = {};
|
||||
if (flags.active === undefined) {
|
||||
GenericHelpers.logOutput(`No update flag like "--active=true" has been set!`);
|
||||
console.info(`No update flag like "--active=true" has been set!`);
|
||||
return;
|
||||
} else {
|
||||
if (!['false', 'true'].includes(flags.active)) {
|
||||
GenericHelpers.logOutput(`Valid values for flag "--active" are only "false" or "true"!`);
|
||||
console.info(`Valid values for flag "--active" are only "false" or "true"!`);
|
||||
return;
|
||||
}
|
||||
updateQuery.active = flags.active === 'true';
|
||||
|
@ -63,20 +73,21 @@ export class UpdateWorkflowCommand extends Command {
|
|||
|
||||
const findQuery: IDataObject = {};
|
||||
if (flags.id) {
|
||||
console.log(`Deactivating workflow with ID: ${flags.id}`);
|
||||
console.info(`Deactivating workflow with ID: ${flags.id}`);
|
||||
findQuery.id = flags.id;
|
||||
} else {
|
||||
console.log('Deactivating all workflows');
|
||||
console.info('Deactivating all workflows');
|
||||
findQuery.active = true;
|
||||
}
|
||||
|
||||
await Db.collections.Workflow!.update(findQuery, updateQuery);
|
||||
console.log('Done');
|
||||
console.info('Done');
|
||||
} catch (e) {
|
||||
console.error('\nGOT ERROR');
|
||||
console.log('====================================');
|
||||
console.error(e.message);
|
||||
console.error(e.stack);
|
||||
console.error('Error updating database. See log messages for details.');
|
||||
logger.error('\nGOT ERROR');
|
||||
logger.info('====================================');
|
||||
logger.error(e.message);
|
||||
logger.error(e.stack);
|
||||
this.exit(1);
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,13 @@ import {
|
|||
} from '../src';
|
||||
import { IDataObject } from 'n8n-workflow';
|
||||
|
||||
import {
|
||||
getLogger,
|
||||
} from '../src/Logger';
|
||||
|
||||
import {
|
||||
LoggerProxy,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
let activeWorkflowRunner: ActiveWorkflowRunner.ActiveWorkflowRunner | undefined;
|
||||
let processExistCode = 0;
|
||||
|
@ -42,7 +49,7 @@ export class Webhook extends Command {
|
|||
* get removed.
|
||||
*/
|
||||
static async stopProcess() {
|
||||
console.log(`\nStopping n8n...`);
|
||||
LoggerProxy.info(`\nStopping n8n...`);
|
||||
|
||||
try {
|
||||
const externalHooks = ExternalHooks();
|
||||
|
@ -72,7 +79,7 @@ export class Webhook extends Command {
|
|||
let count = 0;
|
||||
while (executingWorkflows.length !== 0) {
|
||||
if (count++ % 4 === 0) {
|
||||
console.log(`Waiting for ${executingWorkflows.length} active executions to finish...`);
|
||||
LoggerProxy.info(`Waiting for ${executingWorkflows.length} active executions to finish...`);
|
||||
}
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 500);
|
||||
|
@ -81,7 +88,7 @@ export class Webhook extends Command {
|
|||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('There was an error shutting down n8n.', error);
|
||||
LoggerProxy.error('There was an error shutting down n8n.', error);
|
||||
}
|
||||
|
||||
process.exit(processExistCode);
|
||||
|
@ -89,6 +96,9 @@ export class Webhook extends Command {
|
|||
|
||||
|
||||
async run() {
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
|
||||
// Make sure that n8n shuts down gracefully if possible
|
||||
process.on('SIGTERM', Webhook.stopProcess);
|
||||
process.on('SIGINT', Webhook.stopProcess);
|
||||
|
@ -116,11 +126,12 @@ export class Webhook extends Command {
|
|||
try {
|
||||
// Start directly with the init of the database to improve startup time
|
||||
const startDbInitPromise = Db.init().catch(error => {
|
||||
console.error(`There was an error initializing DB: ${error.message}`);
|
||||
logger.error(`There was an error initializing DB: "${error.message}"`);
|
||||
|
||||
processExistCode = 1;
|
||||
// @ts-ignore
|
||||
process.emit('SIGINT');
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Make sure the settings exist
|
||||
|
@ -166,7 +177,7 @@ export class Webhook extends Command {
|
|||
cumulativeTimeout += now - lastTimer;
|
||||
lastTimer = now;
|
||||
if (cumulativeTimeout > redisConnectionTimeoutLimit) {
|
||||
console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.');
|
||||
logger.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + ". Exiting process.");
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
@ -195,9 +206,9 @@ export class Webhook extends Command {
|
|||
|
||||
redis.on('error', (error) => {
|
||||
if (error.toString().includes('ECONNREFUSED') === true) {
|
||||
console.warn('Redis unavailable - trying to reconnect...');
|
||||
logger.warn('Redis unavailable - trying to reconnect...');
|
||||
} else {
|
||||
console.warn('Error with Redis: ', error);
|
||||
logger.warn('Error with Redis: ', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -209,14 +220,16 @@ export class Webhook extends Command {
|
|||
await activeWorkflowRunner.initWebhooks();
|
||||
|
||||
const editorUrl = GenericHelpers.getBaseUrl();
|
||||
this.log('Webhook listener waiting for requests.');
|
||||
console.info('Webhook listener waiting for requests.');
|
||||
|
||||
} catch (error) {
|
||||
this.error(`There was an error: ${error.message}`);
|
||||
console.error('Exiting due to error. See log message for details.');
|
||||
logger.error(`Webhook process cannot continue. "${error.message}"`);
|
||||
|
||||
processExistCode = 1;
|
||||
// @ts-ignore
|
||||
process.emit('SIGINT');
|
||||
process.exit(1);
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
|
|
@ -37,6 +37,14 @@ import {
|
|||
WorkflowExecuteAdditionalData,
|
||||
} from '../src';
|
||||
|
||||
import {
|
||||
getLogger,
|
||||
} from '../src/Logger';
|
||||
|
||||
import {
|
||||
LoggerProxy,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as config from '../config';
|
||||
import * as Bull from 'bull';
|
||||
import * as Queue from '../src/Queue';
|
||||
|
@ -71,7 +79,7 @@ export class Worker extends Command {
|
|||
* get removed.
|
||||
*/
|
||||
static async stopProcess() {
|
||||
console.log(`\nStopping n8n...`);
|
||||
LoggerProxy.info(`Stopping n8n...`);
|
||||
|
||||
// Stop accepting new jobs
|
||||
Worker.jobQueue.pause(true);
|
||||
|
@ -95,7 +103,7 @@ export class Worker extends Command {
|
|||
while (Object.keys(Worker.runningJobs).length !== 0) {
|
||||
if (count++ % 4 === 0) {
|
||||
const waitLeft = Math.ceil((stopTime - new Date().getTime()) / 1000);
|
||||
console.log(`Waiting for ${Object.keys(Worker.runningJobs).length} active executions to finish... (wait ${waitLeft} more seconds)`);
|
||||
LoggerProxy.info(`Waiting for ${Object.keys(Worker.runningJobs).length} active executions to finish... (wait ${waitLeft} more seconds)`);
|
||||
}
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 500);
|
||||
|
@ -103,7 +111,7 @@ export class Worker extends Command {
|
|||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('There was an error shutting down n8n.', error);
|
||||
LoggerProxy.error('There was an error shutting down n8n.', error);
|
||||
}
|
||||
|
||||
process.exit(Worker.processExistCode);
|
||||
|
@ -113,7 +121,7 @@ export class Worker extends Command {
|
|||
const jobData = job.data as IBullJobData;
|
||||
const executionDb = await Db.collections.Execution!.findOne(jobData.executionId) as IExecutionFlattedDb;
|
||||
const currentExecutionDb = ResponseHelper.unflattenExecutionData(executionDb) as IExecutionResponse;
|
||||
console.log(`Start job: ${job.id} (Workflow ID: ${currentExecutionDb.workflowData.id} | Execution: ${jobData.executionId})`);
|
||||
LoggerProxy.info(`Start job: ${job.id} (Workflow ID: ${currentExecutionDb.workflowData.id} | Execution: ${jobData.executionId})`);
|
||||
|
||||
let staticData = currentExecutionDb.workflowData!.staticData;
|
||||
if (jobData.loadStaticData === true) {
|
||||
|
@ -170,7 +178,10 @@ export class Worker extends Command {
|
|||
}
|
||||
|
||||
async run() {
|
||||
console.log('Starting n8n worker...');
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
|
||||
console.info('Starting n8n worker...');
|
||||
|
||||
// Make sure that n8n shuts down gracefully if possible
|
||||
process.on('SIGTERM', Worker.stopProcess);
|
||||
|
@ -183,11 +194,12 @@ export class Worker extends Command {
|
|||
|
||||
// Start directly with the init of the database to improve startup time
|
||||
const startDbInitPromise = Db.init().catch(error => {
|
||||
console.error(`There was an error initializing DB: ${error.message}`);
|
||||
logger.error(`There was an error initializing DB: "${error.message}"`);
|
||||
|
||||
Worker.processExistCode = 1;
|
||||
// @ts-ignore
|
||||
process.emit('SIGINT');
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Make sure the settings exist
|
||||
|
@ -221,10 +233,10 @@ export class Worker extends Command {
|
|||
|
||||
const versions = await GenericHelpers.getVersions();
|
||||
|
||||
console.log('\nn8n worker is now ready');
|
||||
console.log(` * Version: ${versions.cli}`);
|
||||
console.log(` * Concurrency: ${flags.concurrency}`);
|
||||
console.log('');
|
||||
console.info('\nn8n worker is now ready');
|
||||
console.info(` * Version: ${versions.cli}`);
|
||||
console.info(` * Concurrency: ${flags.concurrency}`);
|
||||
console.info('');
|
||||
|
||||
Worker.jobQueue.on('global:progress', (jobId, progress) => {
|
||||
// Progress of a job got updated which does get used
|
||||
|
@ -252,27 +264,28 @@ export class Worker extends Command {
|
|||
cumulativeTimeout += now - lastTimer;
|
||||
lastTimer = now;
|
||||
if (cumulativeTimeout > redisConnectionTimeoutLimit) {
|
||||
console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.');
|
||||
logger.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + ". Exiting process.");
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
console.warn('Redis unavailable - trying to reconnect...');
|
||||
logger.warn('Redis unavailable - trying to reconnect...');
|
||||
} else if (error.toString().includes('Error initializing Lua scripts') === true) {
|
||||
// This is a non-recoverable error
|
||||
// Happens when worker starts and Redis is unavailable
|
||||
// Even if Redis comes back online, worker will be zombie
|
||||
console.error('Error initializing worker.');
|
||||
logger.error('Error initializing worker.');
|
||||
process.exit(2);
|
||||
} else {
|
||||
console.error('Error from queue: ', error);
|
||||
logger.error('Error from queue: ', error);
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
this.error(`There was an error: ${error.message}`);
|
||||
logger.error(`Worker process cannot continue. "${error.message}"`);
|
||||
|
||||
Worker.processExistCode = 1;
|
||||
// @ts-ignore
|
||||
process.emit('SIGINT');
|
||||
process.exit(1);
|
||||
}
|
||||
})();
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
import * as convict from 'convict';
|
||||
import * as dotenv from 'dotenv';
|
||||
import * as path from 'path';
|
||||
import * as core from 'n8n-core';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
|
@ -572,6 +574,41 @@ const config = convict({
|
|||
},
|
||||
},
|
||||
|
||||
logs: {
|
||||
level: {
|
||||
doc: 'Log output level. Options are error, warn, info, verbose and debug.',
|
||||
format: String,
|
||||
default: 'info',
|
||||
env: 'N8N_LOG_LEVEL',
|
||||
},
|
||||
output: {
|
||||
doc: 'Where to output logs. Options are: console, file. Multiple can be separated by comma (",")',
|
||||
format: String,
|
||||
default: 'console',
|
||||
env: 'N8N_LOG_OUTPUT',
|
||||
},
|
||||
file: {
|
||||
fileCountMax: {
|
||||
doc: 'Maximum number of files to keep.',
|
||||
format: Number,
|
||||
default: 100,
|
||||
env: 'N8N_LOG_FILE_COUNT_MAX',
|
||||
},
|
||||
fileSizeMax: {
|
||||
doc: 'Maximum size for each log file in MB.',
|
||||
format: Number,
|
||||
default: 16,
|
||||
env: 'N8N_LOG_FILE_SIZE_MAX',
|
||||
},
|
||||
location: {
|
||||
doc: 'Log file location; only used if log output is set to file.',
|
||||
format: String,
|
||||
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'logs/n8n.log'),
|
||||
env: 'N8N_LOG_FILE_LOCATION',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
});
|
||||
|
||||
// Overwrite default configuration with settings which got defined in
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n",
|
||||
"version": "0.117.0",
|
||||
"version": "0.118.0",
|
||||
"description": "n8n Workflow Automation Tool",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
@ -44,7 +44,7 @@
|
|||
"workflow"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
"node": ">=14.0.0"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
|
@ -64,7 +64,7 @@
|
|||
"@types/jest": "^26.0.13",
|
||||
"@types/localtunnel": "^1.9.0",
|
||||
"@types/lodash.get": "^4.4.6",
|
||||
"@types/node": "14.0.27",
|
||||
"@types/node": "^14.14.40",
|
||||
"@types/open": "^6.1.0",
|
||||
"@types/parseurl": "^1.3.1",
|
||||
"@types/request-promise-native": "~1.0.15",
|
||||
|
@ -88,6 +88,7 @@
|
|||
"body-parser": "^1.18.3",
|
||||
"body-parser-xml": "^1.1.0",
|
||||
"bull": "^3.19.0",
|
||||
"callsites": "^3.1.0",
|
||||
"client-oauth2": "^4.2.5",
|
||||
"compression": "^1.7.4",
|
||||
"connect-history-api-fallback": "^1.6.0",
|
||||
|
@ -105,10 +106,10 @@
|
|||
"localtunnel": "^2.0.0",
|
||||
"lodash.get": "^4.4.2",
|
||||
"mysql2": "~2.2.0",
|
||||
"n8n-core": "~0.68.0",
|
||||
"n8n-editor-ui": "~0.87.0",
|
||||
"n8n-nodes-base": "~0.114.0",
|
||||
"n8n-workflow": "~0.56.0",
|
||||
"n8n-core": "~0.69.0",
|
||||
"n8n-editor-ui": "~0.88.0",
|
||||
"n8n-nodes-base": "~0.115.0",
|
||||
"n8n-workflow": "~0.57.0",
|
||||
"oauth-1.0a": "^2.2.6",
|
||||
"open": "^7.0.0",
|
||||
"pg": "^8.3.0",
|
||||
|
@ -116,7 +117,7 @@
|
|||
"request-promise-native": "^1.0.7",
|
||||
"sqlite3": "^5.0.1",
|
||||
"sse-channel": "^3.1.1",
|
||||
"tslib": "1.13.0",
|
||||
"tslib": "1.14.1",
|
||||
"typeorm": "^0.2.30"
|
||||
},
|
||||
"jest": {
|
||||
|
|
|
@ -35,6 +35,9 @@ import {
|
|||
} from 'n8n-workflow';
|
||||
|
||||
import * as express from 'express';
|
||||
import {
|
||||
LoggerProxy as Logger,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class ActiveWorkflowRunner {
|
||||
private activeWorkflows: ActiveWorkflows | null = null;
|
||||
|
@ -43,7 +46,6 @@ export class ActiveWorkflowRunner {
|
|||
[key: string]: IActivationError;
|
||||
} = {};
|
||||
|
||||
|
||||
async init() {
|
||||
|
||||
// Get the active workflows from database
|
||||
|
@ -59,20 +61,24 @@ export class ActiveWorkflowRunner {
|
|||
this.activeWorkflows = new ActiveWorkflows();
|
||||
|
||||
if (workflowsData.length !== 0) {
|
||||
console.log('\n ================================');
|
||||
console.log(' Start Active Workflows:');
|
||||
console.log(' ================================');
|
||||
console.info(' ================================');
|
||||
console.info(' Start Active Workflows:');
|
||||
console.info(' ================================');
|
||||
|
||||
for (const workflowData of workflowsData) {
|
||||
console.log(` - ${workflowData.name}`);
|
||||
Logger.debug(`Initializing active workflow "${workflowData.name}" (startup)`, {workflowName: workflowData.name, workflowId: workflowData.id});
|
||||
try {
|
||||
await this.add(workflowData.id.toString(), 'init', workflowData);
|
||||
Logger.verbose(`Successfully started workflow "${workflowData.name}"`, {workflowName: workflowData.name, workflowId: workflowData.id});
|
||||
console.log(` => Started`);
|
||||
} catch (error) {
|
||||
console.log(` => ERROR: Workflow could not be activated:`);
|
||||
console.log(` ${error.message}`);
|
||||
Logger.error(`Unable to initialize workflow "${workflowData.name}" (startup)`, {workflowName: workflowData.name, workflowId: workflowData.id});
|
||||
}
|
||||
}
|
||||
Logger.verbose('Finished initializing active workflows (startup)');
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -88,6 +94,7 @@ export class ActiveWorkflowRunner {
|
|||
*/
|
||||
async removeAll(): Promise<void> {
|
||||
const activeWorkflowId: string[] = [];
|
||||
Logger.verbose('Call to remove all active workflows received (removeAll)');
|
||||
|
||||
if (this.activeWorkflows !== null) {
|
||||
// TODO: This should be renamed!
|
||||
|
@ -117,6 +124,7 @@ export class ActiveWorkflowRunner {
|
|||
* @memberof ActiveWorkflowRunner
|
||||
*/
|
||||
async executeWebhook(httpMethod: WebhookHttpMethod, path: string, req: express.Request, res: express.Response): Promise<IResponseCallbackData> {
|
||||
Logger.debug(`Received webhoook "${httpMethod}" for path "${path}"`);
|
||||
if (this.activeWorkflows === null) {
|
||||
throw new ResponseHelper.ResponseError('The "activeWorkflows" instance did not get initialized yet.', 404, 404);
|
||||
}
|
||||
|
@ -437,6 +445,7 @@ export class ActiveWorkflowRunner {
|
|||
return ((workflow: Workflow, node: INode) => {
|
||||
const returnFunctions = NodeExecuteFunctions.getExecutePollFunctions(workflow, node, additionalData, mode, activation);
|
||||
returnFunctions.__emit = (data: INodeExecutionData[][]): void => {
|
||||
Logger.debug(`Received event to trigger execution for workflow "${workflow.name}"`);
|
||||
this.runWorkflow(workflowData, node, data, additionalData, mode);
|
||||
};
|
||||
return returnFunctions;
|
||||
|
@ -458,6 +467,7 @@ export class ActiveWorkflowRunner {
|
|||
return ((workflow: Workflow, node: INode) => {
|
||||
const returnFunctions = NodeExecuteFunctions.getExecuteTriggerFunctions(workflow, node, additionalData, mode, activation);
|
||||
returnFunctions.emit = (data: INodeExecutionData[][]): void => {
|
||||
Logger.debug(`Received trigger for workflow "${workflow.name}"`);
|
||||
WorkflowHelpers.saveStaticData(workflow);
|
||||
this.runWorkflow(workflowData, node, data, additionalData, mode).catch((err) => console.error(err));
|
||||
};
|
||||
|
@ -492,6 +502,7 @@ export class ActiveWorkflowRunner {
|
|||
|
||||
const canBeActivated = workflowInstance.checkIfWorkflowCanBeActivated(['n8n-nodes-base.start']);
|
||||
if (canBeActivated === false) {
|
||||
Logger.error(`Unable to activate workflow "${workflowData.name}"`);
|
||||
throw new Error(`The workflow can not be activated because it does not contain any nodes which could start the workflow. Only workflows which have trigger or webhook nodes can be activated.`);
|
||||
}
|
||||
|
||||
|
@ -507,6 +518,7 @@ export class ActiveWorkflowRunner {
|
|||
if (workflowInstance.getTriggerNodes().length !== 0
|
||||
|| workflowInstance.getPollNodes().length !== 0) {
|
||||
await this.activeWorkflows.add(workflowId, workflowInstance, additionalData, mode, activation, getTriggerFunctions, getPollFunctions);
|
||||
Logger.info(`Successfully activated workflow "${workflowData.name}"`);
|
||||
}
|
||||
|
||||
if (this.activationErrors[workflowId] !== undefined) {
|
||||
|
|
|
@ -3,14 +3,11 @@ import * as express from 'express';
|
|||
import { join as pathJoin } from 'path';
|
||||
import {
|
||||
readFile as fsReadFile,
|
||||
} from 'fs';
|
||||
import { promisify } from 'util';
|
||||
} from 'fs/promises';
|
||||
import { IDataObject } from 'n8n-workflow';
|
||||
|
||||
import { IPackageVersions } from './';
|
||||
|
||||
const fsReadFileAsync = promisify(fsReadFile);
|
||||
|
||||
let versionCache: IPackageVersions | undefined;
|
||||
|
||||
|
||||
|
@ -72,7 +69,7 @@ export async function getVersions(): Promise<IPackageVersions> {
|
|||
return versionCache;
|
||||
}
|
||||
|
||||
const packageFile = await fsReadFileAsync(pathJoin(__dirname, '../../package.json'), 'utf8') as string;
|
||||
const packageFile = await fsReadFile(pathJoin(__dirname, '../../package.json'), 'utf8') as string;
|
||||
const packageData = JSON.parse(packageFile);
|
||||
|
||||
versionCache = {
|
||||
|
@ -122,7 +119,7 @@ export async function getConfigValue(configKey: string): Promise<string | boolea
|
|||
|
||||
let data;
|
||||
try {
|
||||
data = await fsReadFileAsync(fileEnvironmentVariable, 'utf8') as string;
|
||||
data = await fsReadFile(fileEnvironmentVariable, 'utf8') as string;
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
throw new Error(`The file "${fileEnvironmentVariable}" could not be found.`);
|
||||
|
|
|
@ -14,15 +14,9 @@ import {
|
|||
readdir as fsReaddir,
|
||||
readFile as fsReadFile,
|
||||
stat as fsStat,
|
||||
} from 'fs';
|
||||
} from 'fs/promises';
|
||||
import * as glob from 'glob-promise';
|
||||
import * as path from 'path';
|
||||
import { promisify } from 'util';
|
||||
|
||||
const fsAccessAsync = promisify(fsAccess);
|
||||
const fsReaddirAsync = promisify(fsReaddir);
|
||||
const fsReadFileAsync = promisify(fsReadFile);
|
||||
const fsStatAsync = promisify(fsStat);
|
||||
|
||||
|
||||
class LoadNodesAndCredentialsClass {
|
||||
|
@ -49,7 +43,7 @@ class LoadNodesAndCredentialsClass {
|
|||
];
|
||||
for (const checkPath of checkPaths) {
|
||||
try {
|
||||
await fsAccessAsync(checkPath);
|
||||
await fsAccess(checkPath);
|
||||
// Folder exists, so use it.
|
||||
this.nodeModulesPath = path.dirname(checkPath);
|
||||
break;
|
||||
|
@ -102,13 +96,13 @@ class LoadNodesAndCredentialsClass {
|
|||
const getN8nNodePackagesRecursive = async (relativePath: string): Promise<string[]> => {
|
||||
const results: string[] = [];
|
||||
const nodeModulesPath = `${this.nodeModulesPath}/${relativePath}`;
|
||||
for (const file of await fsReaddirAsync(nodeModulesPath)) {
|
||||
for (const file of await fsReaddir(nodeModulesPath)) {
|
||||
const isN8nNodesPackage = file.indexOf('n8n-nodes-') === 0;
|
||||
const isNpmScopedPackage = file.indexOf('@') === 0;
|
||||
if (!isN8nNodesPackage && !isNpmScopedPackage) {
|
||||
continue;
|
||||
}
|
||||
if (!(await fsStatAsync(nodeModulesPath)).isDirectory()) {
|
||||
if (!(await fsStat(nodeModulesPath)).isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
if (isN8nNodesPackage) { results.push(`${relativePath}${file}`); }
|
||||
|
@ -234,7 +228,7 @@ class LoadNodesAndCredentialsClass {
|
|||
const packagePath = path.join(this.nodeModulesPath, packageName);
|
||||
|
||||
// Read the data from the package.json file to see if any n8n data is defiend
|
||||
const packageFileString = await fsReadFileAsync(path.join(packagePath, 'package.json'), 'utf8');
|
||||
const packageFileString = await fsReadFile(path.join(packagePath, 'package.json'), 'utf8');
|
||||
const packageFile = JSON.parse(packageFileString);
|
||||
if (!packageFile.hasOwnProperty('n8n')) {
|
||||
return;
|
||||
|
|
114
packages/cli/src/Logger.ts
Normal file
114
packages/cli/src/Logger.ts
Normal file
|
@ -0,0 +1,114 @@
|
|||
import config = require('../config');
|
||||
import * as winston from 'winston';
|
||||
|
||||
import {
|
||||
IDataObject,
|
||||
ILogger,
|
||||
LogTypes,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as callsites from 'callsites';
|
||||
import { basename } from 'path';
|
||||
|
||||
class Logger implements ILogger {
|
||||
private logger: winston.Logger;
|
||||
|
||||
constructor() {
|
||||
const level = config.get('logs.level');
|
||||
const output = (config.get('logs.output') as string).split(',').map(output => output.trim());
|
||||
|
||||
this.logger = winston.createLogger({
|
||||
level,
|
||||
});
|
||||
|
||||
if (output.includes('console')) {
|
||||
let format: winston.Logform.Format;
|
||||
if (['debug', 'verbose'].includes(level)) {
|
||||
format = winston.format.combine(
|
||||
winston.format.metadata(),
|
||||
winston.format.timestamp(),
|
||||
winston.format.colorize({ all: true }),
|
||||
winston.format.printf(({ level, message, timestamp, metadata }) => {
|
||||
return `${timestamp} | ${level.padEnd(18)} | ${message}` + (Object.keys(metadata).length ? ` ${JSON.stringify(metadata)}` : '');
|
||||
}) as winston.Logform.Format
|
||||
);
|
||||
} else {
|
||||
format = winston.format.printf(({ message }) => message) as winston.Logform.Format;
|
||||
}
|
||||
|
||||
this.logger.add(
|
||||
new winston.transports.Console({
|
||||
format,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
if (output.includes('file')) {
|
||||
const fileLogFormat = winston.format.combine(
|
||||
winston.format.timestamp(),
|
||||
winston.format.metadata(),
|
||||
winston.format.json()
|
||||
);
|
||||
this.logger.add(
|
||||
new winston.transports.File({
|
||||
filename: config.get('logs.file.location'),
|
||||
format: fileLogFormat,
|
||||
maxsize: config.get('logs.file.fileSizeMax') as number * 1048576, // config * 1mb
|
||||
maxFiles: config.get('logs.file.fileCountMax'),
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
log(type: LogTypes, message: string, meta: object = {}) {
|
||||
const callsite = callsites();
|
||||
// We are using the third array element as the structure is as follows:
|
||||
// [0]: this file
|
||||
// [1]: Should be LoggerProxy
|
||||
// [2]: Should point to the caller.
|
||||
// Note: getting line number is useless because at this point
|
||||
// We are in runtime, so it means we are looking at compiled js files
|
||||
const logDetails = {} as IDataObject;
|
||||
if (callsite[2] !== undefined) {
|
||||
logDetails.file = basename(callsite[2].getFileName() || '');
|
||||
const functionName = callsite[2].getFunctionName();
|
||||
if (functionName) {
|
||||
logDetails.function = functionName;
|
||||
}
|
||||
}
|
||||
this.logger.log(type, message, {...meta, ...logDetails});
|
||||
}
|
||||
|
||||
// Convenience methods below
|
||||
|
||||
debug(message: string, meta: object = {}) {
|
||||
this.log('debug', message, meta);
|
||||
}
|
||||
|
||||
info(message: string, meta: object = {}) {
|
||||
this.log('info', message, meta);
|
||||
}
|
||||
|
||||
error(message: string, meta: object = {}) {
|
||||
this.log('error', message, meta);
|
||||
}
|
||||
|
||||
verbose(message: string, meta: object = {}) {
|
||||
this.log('verbose', message, meta);
|
||||
}
|
||||
|
||||
warn(message: string, meta: object = {}) {
|
||||
this.log('warn', message, meta);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
let activeLoggerInstance: Logger | undefined;
|
||||
|
||||
export function getLogger() {
|
||||
if (activeLoggerInstance === undefined) {
|
||||
activeLoggerInstance = new Logger();
|
||||
}
|
||||
|
||||
return activeLoggerInstance;
|
||||
}
|
|
@ -7,6 +7,10 @@ import {
|
|||
IPushDataType,
|
||||
} from '.';
|
||||
|
||||
import {
|
||||
LoggerProxy as Logger,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class Push {
|
||||
private channel: sseChannel;
|
||||
private connections: {
|
||||
|
@ -24,6 +28,7 @@ export class Push {
|
|||
|
||||
this.channel.on('disconnect', (channel: string, res: express.Response) => {
|
||||
if (res.req !== undefined) {
|
||||
Logger.debug(`Remove editor-UI session`, { sessionId: res.req.query.sessionId });
|
||||
delete this.connections[res.req.query.sessionId as string];
|
||||
}
|
||||
});
|
||||
|
@ -39,6 +44,8 @@ export class Push {
|
|||
* @memberof Push
|
||||
*/
|
||||
add(sessionId: string, req: express.Request, res: express.Response) {
|
||||
Logger.debug(`Add editor-UI session`, { sessionId });
|
||||
|
||||
if (this.connections[sessionId] !== undefined) {
|
||||
// Make sure to remove existing connection with the same session
|
||||
// id if one exists already
|
||||
|
@ -64,11 +71,12 @@ export class Push {
|
|||
|
||||
send(type: IPushDataType, data: any, sessionId?: string) { // tslint:disable-line:no-any
|
||||
if (sessionId !== undefined && this.connections[sessionId] === undefined) {
|
||||
// TODO: Log that properly!
|
||||
console.error(`The session "${sessionId}" is not registred.`);
|
||||
Logger.error(`The session "${sessionId}" is not registred.`, { sessionId });
|
||||
return;
|
||||
}
|
||||
|
||||
Logger.debug(`Send data of type "${type}" to editor-UI`, { dataType: type, sessionId });
|
||||
|
||||
const sendData: IPushData = {
|
||||
type,
|
||||
data,
|
||||
|
|
|
@ -30,13 +30,14 @@ import {
|
|||
IWebhookData,
|
||||
IWebhookResponseData,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
LoggerProxy as Logger,
|
||||
NodeHelpers,
|
||||
Workflow,
|
||||
WorkflowExecuteMode,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
const activeExecutions = ActiveExecutions.getInstance();
|
||||
|
||||
const activeExecutions = ActiveExecutions.getInstance();
|
||||
|
||||
/**
|
||||
* Returns all the webhooks which should be created for the give workflow
|
||||
|
@ -286,6 +287,8 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
|
|||
const workflowRunner = new WorkflowRunner();
|
||||
const executionId = await workflowRunner.run(runData, true, !didSendResponse);
|
||||
|
||||
Logger.verbose(`Started execution of workflow "${workflow.name}" from webhook with execution ID ${executionId}`, { executionId });
|
||||
|
||||
// Get a promise which resolves when the workflow did execute and send then response
|
||||
const executePromise = activeExecutions.getPostExecutePromise(executionId) as Promise<IExecutionDb | undefined>;
|
||||
executePromise.then((data) => {
|
||||
|
|
|
@ -37,6 +37,7 @@ import {
|
|||
IWorkflowExecuteAdditionalData,
|
||||
IWorkflowExecuteHooks,
|
||||
IWorkflowHooksOptionalParameters,
|
||||
LoggerProxy as Logger,
|
||||
Workflow,
|
||||
WorkflowExecuteMode,
|
||||
WorkflowHooks,
|
||||
|
@ -44,11 +45,10 @@ import {
|
|||
|
||||
import * as config from '../config';
|
||||
|
||||
import { LessThanOrEqual } from "typeorm";
|
||||
import { LessThanOrEqual } from 'typeorm';
|
||||
|
||||
const ERROR_TRIGGER_TYPE = config.get('nodes.errorTriggerType') as string;
|
||||
|
||||
|
||||
/**
|
||||
* Checks if there was an error and if errorWorkflow or a trigger is defined. If so it collects
|
||||
* all the data and executes it
|
||||
|
@ -85,9 +85,11 @@ function executeErrorWorkflow(workflowData: IWorkflowBase, fullRunData: IRun, mo
|
|||
// Run the error workflow
|
||||
// To avoid an infinite loop do not run the error workflow again if the error-workflow itself failed and it is its own error-workflow.
|
||||
if (workflowData.settings !== undefined && workflowData.settings.errorWorkflow && !(mode === 'error' && workflowData.id && workflowData.settings.errorWorkflow.toString() === workflowData.id.toString())) {
|
||||
Logger.verbose(`Start external error workflow`, { executionId: this.executionId, errorWorkflowId: workflowData.settings.errorWorkflow.toString(), workflowId: this.workflowData.id });
|
||||
// If a specific error workflow is set run only that one
|
||||
WorkflowHelpers.executeErrorWorkflow(workflowData.settings.errorWorkflow as string, workflowErrorData);
|
||||
} else if (mode !== 'error' && workflowData.id !== undefined && workflowData.nodes.some((node) => node.type === ERROR_TRIGGER_TYPE)) {
|
||||
Logger.verbose(`Start internal error workflow`, { executionId: this.executionId, workflowId: this.workflowData.id });
|
||||
// If the workflow contains
|
||||
WorkflowHelpers.executeErrorWorkflow(workflowData.id.toString(), workflowErrorData);
|
||||
}
|
||||
|
@ -102,6 +104,8 @@ function executeErrorWorkflow(workflowData: IWorkflowBase, fullRunData: IRun, mo
|
|||
let throttling = false;
|
||||
function pruneExecutionData(): void {
|
||||
if (!throttling) {
|
||||
Logger.verbose('Pruning execution data from database');
|
||||
|
||||
throttling = true;
|
||||
const timeout = config.get('executions.pruneDataTimeout') as number; // in seconds
|
||||
const maxAge = config.get('executions.pruneDataMaxAge') as number; // in h
|
||||
|
@ -133,6 +137,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
if (this.sessionId === undefined) {
|
||||
return;
|
||||
}
|
||||
Logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, { executionId: this.executionId, sessionId: this.sessionId, workflowId: this.workflowData.id });
|
||||
|
||||
const pushInstance = Push.getInstance();
|
||||
pushInstance.send('nodeExecuteBefore', {
|
||||
|
@ -147,6 +152,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
if (this.sessionId === undefined) {
|
||||
return;
|
||||
}
|
||||
Logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, { executionId: this.executionId, sessionId: this.sessionId, workflowId: this.workflowData.id });
|
||||
|
||||
const pushInstance = Push.getInstance();
|
||||
pushInstance.send('nodeExecuteAfter', {
|
||||
|
@ -158,6 +164,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
],
|
||||
workflowExecuteBefore: [
|
||||
async function (this: WorkflowHooks): Promise<void> {
|
||||
Logger.debug(`Executing hook (hookFunctionsPush)`, { executionId: this.executionId, sessionId: this.sessionId, workflowId: this.workflowData.id });
|
||||
// Push data to session which started the workflow
|
||||
if (this.sessionId === undefined) {
|
||||
return;
|
||||
|
@ -168,13 +175,14 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
mode: this.mode,
|
||||
startedAt: new Date(),
|
||||
retryOf: this.retryOf,
|
||||
workflowId: this.workflowData.id as string,
|
||||
workflowId: this.workflowData.id, sessionId: this.sessionId as string,
|
||||
workflowName: this.workflowData.name,
|
||||
}, this.sessionId);
|
||||
},
|
||||
],
|
||||
workflowExecuteAfter: [
|
||||
async function (this: WorkflowHooks, fullRunData: IRun, newStaticData: IDataObject): Promise<void> {
|
||||
Logger.debug(`Executing hook (hookFunctionsPush)`, { executionId: this.executionId, sessionId: this.sessionId, workflowId: this.workflowData.id });
|
||||
// Push data to session which started the workflow
|
||||
if (this.sessionId === undefined) {
|
||||
return;
|
||||
|
@ -195,6 +203,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
};
|
||||
|
||||
// Push data to editor-ui once workflow finished
|
||||
Logger.debug(`Save execution progress to database for execution ID ${this.executionId} `, { executionId: this.executionId, workflowId: this.workflowData.id });
|
||||
// TODO: Look at this again
|
||||
const sendData: IPushDataExecutionFinished = {
|
||||
executionId: this.executionId,
|
||||
|
@ -232,6 +241,8 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
|
|||
}
|
||||
|
||||
try {
|
||||
Logger.debug(`Save execution progress to database for execution ID ${this.executionId} `, { executionId: this.executionId, nodeName });
|
||||
|
||||
const execution = await Db.collections.Execution!.findOne(this.executionId);
|
||||
|
||||
if (execution === undefined) {
|
||||
|
@ -286,7 +297,7 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
|
|||
// For busy machines, we may get "Database is locked" errors.
|
||||
|
||||
// We do this to prevent crashes and executions ending in `unknown` state.
|
||||
console.log(`Failed saving execution progress to database for execution ID ${this.executionId}`, err);
|
||||
Logger.error(`Failed saving execution progress to database for execution ID ${this.executionId} (hookFunctionsPreExecute, nodeExecuteAfter)`, { ...err, executionId: this.executionId, sessionId: this.sessionId, workflowId: this.workflowData.id });
|
||||
}
|
||||
|
||||
},
|
||||
|
@ -307,6 +318,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
workflowExecuteBefore: [],
|
||||
workflowExecuteAfter: [
|
||||
async function (this: WorkflowHooks, fullRunData: IRun, newStaticData: IDataObject): Promise<void> {
|
||||
Logger.debug(`Executing hook (hookFunctionsSave)`, { executionId: this.executionId, workflowId: this.workflowData.id });
|
||||
|
||||
// Prune old execution data
|
||||
if (config.get('executions.pruneData')) {
|
||||
|
@ -321,8 +333,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
try {
|
||||
await WorkflowHelpers.saveStaticDataById(this.workflowData.id as string, newStaticData);
|
||||
} catch (e) {
|
||||
// TODO: Add proper logging!
|
||||
console.error(`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: ${e.message}`);
|
||||
Logger.error(`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (hookFunctionsSave)`, { executionId: this.executionId, workflowId: this.workflowData.id });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -375,6 +386,9 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
fullExecutionData.workflowId = this.workflowData.id.toString();
|
||||
}
|
||||
|
||||
// Leave log message before flatten as that operation increased memory usage a lot and the chance of a crash is highest here
|
||||
Logger.debug(`Save execution data to database for execution ID ${this.executionId}`, { executionId: this.executionId, workflowId: this.workflowData.id });
|
||||
|
||||
const executionData = ResponseHelper.flattenExecutionData(fullExecutionData);
|
||||
|
||||
// Save the Execution in DB
|
||||
|
@ -420,8 +434,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
|
|||
try {
|
||||
await WorkflowHelpers.saveStaticDataById(this.workflowData.id as string, newStaticData);
|
||||
} catch (e) {
|
||||
// TODO: Add proper logging!
|
||||
console.error(`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: ${e.message}`);
|
||||
Logger.error(`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (workflowExecuteAfter)`, { sessionId: this.sessionId, workflowId: this.workflowData.id });
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,8 +18,8 @@ import {
|
|||
IRunExecutionData,
|
||||
ITaskData,
|
||||
IWorkflowCredentials,
|
||||
Workflow,
|
||||
} from 'n8n-workflow';
|
||||
LoggerProxy as Logger,
|
||||
Workflow,} from 'n8n-workflow';
|
||||
|
||||
import * as config from '../config';
|
||||
|
||||
|
@ -86,7 +86,7 @@ export async function executeErrorWorkflow(workflowId: string, workflowErrorData
|
|||
|
||||
if (workflowData === undefined) {
|
||||
// The error workflow could not be found
|
||||
console.error(`ERROR: Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find error workflow "${workflowId}"`);
|
||||
Logger.error(`Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find error workflow "${workflowId}"`, { workflowId });
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -105,7 +105,7 @@ export async function executeErrorWorkflow(workflowId: string, workflowErrorData
|
|||
}
|
||||
|
||||
if (workflowStartNode === undefined) {
|
||||
console.error(`ERROR: Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find "${ERROR_TRIGGER_TYPE}" in workflow "${workflowId}"`);
|
||||
Logger.error(`Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find "${ERROR_TRIGGER_TYPE}" in workflow "${workflowId}"`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -153,7 +153,7 @@ export async function executeErrorWorkflow(workflowId: string, workflowErrorData
|
|||
const workflowRunner = new WorkflowRunner();
|
||||
await workflowRunner.run(runData);
|
||||
} catch (error) {
|
||||
console.error(`ERROR: Calling Error Workflow for "${workflowErrorData.workflow.id}": ${error.message}`);
|
||||
Logger.error(`Calling Error Workflow for "${workflowErrorData.workflow.id}": "${error.message}"`, { workflowId: workflowErrorData.workflow.id });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -315,8 +315,7 @@ export async function saveStaticData(workflow: Workflow): Promise <void> {
|
|||
await saveStaticDataById(workflow.id!, workflow.staticData);
|
||||
workflow.staticData.__dataChanged = false;
|
||||
} catch (e) {
|
||||
// TODO: Add proper logging!
|
||||
console.error(`There was a problem saving the workflow with id "${workflow.id}" to save changed staticData: ${e.message}`);
|
||||
Logger.error(`There was a problem saving the workflow with id "${workflow.id}" to save changed staticData: "${e.message}"`, { workflowId: workflow.id });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import {
|
|||
import {
|
||||
ExecutionError,
|
||||
IRun,
|
||||
LoggerProxy as Logger,
|
||||
Workflow,
|
||||
WorkflowExecuteMode,
|
||||
WorkflowHooks,
|
||||
|
@ -177,20 +178,24 @@ export class WorkflowRunner {
|
|||
|
||||
// Register the active execution
|
||||
const executionId = await this.activeExecutions.add(data, undefined);
|
||||
Logger.verbose(`Execution for workflow ${data.workflowData.name} was assigned id ${executionId}`, {executionId});
|
||||
|
||||
additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId, true);
|
||||
|
||||
let workflowExecution: PCancelable<IRun>;
|
||||
if (data.executionData !== undefined) {
|
||||
Logger.debug(`Execution ID ${executionId} had Execution data. Running with payload.`, {executionId});
|
||||
const workflowExecute = new WorkflowExecute(additionalData, data.executionMode, data.executionData);
|
||||
workflowExecution = workflowExecute.processRunExecutionData(workflow);
|
||||
} else if (data.runData === undefined || data.startNodes === undefined || data.startNodes.length === 0 || data.destinationNode === undefined) {
|
||||
Logger.debug(`Execution ID ${executionId} will run executing all nodes.`, {executionId});
|
||||
// Execute all nodes
|
||||
|
||||
// Can execute without webhook so go on
|
||||
const workflowExecute = new WorkflowExecute(additionalData, data.executionMode);
|
||||
workflowExecution = workflowExecute.run(workflow, undefined, data.destinationNode);
|
||||
} else {
|
||||
Logger.debug(`Execution ID ${executionId} is a partial execution.`, {executionId});
|
||||
// Execute only the nodes between start and destination nodes
|
||||
const workflowExecute = new WorkflowExecute(additionalData, data.executionMode);
|
||||
workflowExecution = workflowExecute.runPartialWorkflow(workflow, data.runData, data.startNodes, data.destinationNode);
|
||||
|
@ -450,6 +455,7 @@ export class WorkflowRunner {
|
|||
|
||||
// Listen to data from the subprocess
|
||||
subprocess.on('message', async (message: IProcessMessage) => {
|
||||
Logger.debug(`Received child process message of type ${message.type} for execution ID ${executionId}.`, {executionId});
|
||||
if (message.type === 'start') {
|
||||
// Now that the execution actually started set the timeout again so that does not time out to early.
|
||||
startedAt = new Date();
|
||||
|
@ -491,11 +497,13 @@ export class WorkflowRunner {
|
|||
// Also get informed when the processes does exit especially when it did crash or timed out
|
||||
subprocess.on('exit', async (code, signal) => {
|
||||
if (signal === 'SIGTERM'){
|
||||
Logger.debug(`Subprocess for execution ID ${executionId} timed out.`, {executionId});
|
||||
// Execution timed out and its process has been terminated
|
||||
const timeoutError = new WorkflowOperationError('Workflow execution timed out!');
|
||||
|
||||
this.processError(timeoutError, startedAt, data.executionMode, executionId);
|
||||
} else if (code !== 0) {
|
||||
Logger.debug(`Subprocess for execution ID ${executionId} finished with error code ${code}.`, {executionId});
|
||||
// Process did exit with error code, so something went wrong.
|
||||
const executionError = new WorkflowOperationError('Workflow execution process did crash for an unknown reason!');
|
||||
|
||||
|
|
|
@ -20,23 +20,29 @@ import {
|
|||
ExecutionError,
|
||||
IDataObject,
|
||||
IExecuteWorkflowInfo,
|
||||
ILogger,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeData,
|
||||
IRun,
|
||||
IRunExecutionData,
|
||||
ITaskData,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
IWorkflowExecuteHooks,
|
||||
LoggerProxy,
|
||||
Workflow,
|
||||
WorkflowHooks,
|
||||
WorkflowOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import {
|
||||
getLogger,
|
||||
} from '../src/Logger';
|
||||
|
||||
import * as config from '../config';
|
||||
|
||||
export class WorkflowRunnerProcess {
|
||||
data: IWorkflowExecutionDataProcessWithExecution | undefined;
|
||||
logger: ILogger;
|
||||
startedAt = new Date();
|
||||
workflow: Workflow | undefined;
|
||||
workflowExecute: WorkflowExecute | undefined;
|
||||
|
@ -57,7 +63,13 @@ export class WorkflowRunnerProcess {
|
|||
process.on('SIGTERM', WorkflowRunnerProcess.stopProcess);
|
||||
process.on('SIGINT', WorkflowRunnerProcess.stopProcess);
|
||||
|
||||
const logger = this.logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
|
||||
this.data = inputData;
|
||||
|
||||
logger.verbose('Initializing n8n sub-process', { pid: process.pid, workflowId: this.data.workflowData.id });
|
||||
|
||||
let className: string;
|
||||
let tempNode: INodeType;
|
||||
let filePath: string;
|
||||
|
@ -165,6 +177,8 @@ export class WorkflowRunnerProcess {
|
|||
throw e;
|
||||
}
|
||||
|
||||
await sendToParentProcess('finishExecution', { executionId, result });
|
||||
|
||||
const returnData = WorkflowHelpers.getDataLastExecutedNodeData(result);
|
||||
return returnData!.data!.main;
|
||||
};
|
||||
|
@ -200,12 +214,7 @@ export class WorkflowRunnerProcess {
|
|||
parameters,
|
||||
});
|
||||
} catch (error) {
|
||||
// TODO: Add proper logging
|
||||
console.error(`There was a problem sending hook: "${hook}"`);
|
||||
console.error('Parameters:');
|
||||
console.error(parameters);
|
||||
console.error('Error:');
|
||||
console.error(error);
|
||||
this.logger.error(`There was a problem sending hook: "${hook}"`, { parameters, error});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n-core",
|
||||
"version": "0.68.0",
|
||||
"version": "0.69.0",
|
||||
"description": "Core functionality of n8n",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
@ -47,7 +47,7 @@
|
|||
"file-type": "^14.6.2",
|
||||
"lodash.get": "^4.4.2",
|
||||
"mime-types": "^2.1.27",
|
||||
"n8n-workflow": "~0.56.0",
|
||||
"n8n-workflow": "~0.57.0",
|
||||
"oauth-1.0a": "^2.2.6",
|
||||
"p-cancelable": "^2.0.0",
|
||||
"request": "^2.88.2",
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
IPollResponse,
|
||||
ITriggerResponse,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
LoggerProxy as Logger,
|
||||
Workflow,
|
||||
WorkflowActivateMode,
|
||||
WorkflowExecuteMode,
|
||||
|
@ -17,6 +18,7 @@ import {
|
|||
IWorkflowData,
|
||||
} from './';
|
||||
|
||||
|
||||
export class ActiveWorkflows {
|
||||
private workflowData: {
|
||||
[key: string]: IWorkflowData;
|
||||
|
@ -163,6 +165,7 @@ export class ActiveWorkflows {
|
|||
|
||||
// The trigger function to execute when the cron-time got reached
|
||||
const executeTrigger = async () => {
|
||||
Logger.info(`Polling trigger initiated for workflow "${workflow.name}"`, {workflowName: workflow.name, workflowId: workflow.id});
|
||||
const pollResponse = await workflow.runPoll(node, pollFunctions);
|
||||
|
||||
if (pollResponse !== null) {
|
||||
|
|
|
@ -51,6 +51,9 @@ import * as requestPromise from 'request-promise-native';
|
|||
import { createHmac } from 'crypto';
|
||||
import { fromBuffer } from 'file-type';
|
||||
import { lookup } from 'mime-types';
|
||||
import {
|
||||
LoggerProxy as Logger,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
const requestPromiseWithDefaults = requestPromise.defaults({
|
||||
timeout: 300000, // 5 minutes
|
||||
|
@ -188,8 +191,12 @@ export async function requestOAuth2(this: IAllExecuteFunctions, credentialsType:
|
|||
};
|
||||
}
|
||||
|
||||
Logger.debug(`OAuth2 token for "${credentialsType}" used by node "${node.name}" expired. Should revalidate.`);
|
||||
|
||||
const newToken = await token.refresh(tokenRefreshOptions);
|
||||
|
||||
Logger.debug(`OAuth2 token for "${credentialsType}" used by node "${node.name}" has been renewed.`);
|
||||
|
||||
credentials.oauthTokenData = newToken.data;
|
||||
|
||||
// Find the name of the credentials
|
||||
|
@ -201,6 +208,8 @@ export async function requestOAuth2(this: IAllExecuteFunctions, credentialsType:
|
|||
// Save the refreshed token
|
||||
await additionalData.credentialsHelper.updateCredentials(name, credentialsType, credentials);
|
||||
|
||||
Logger.debug(`OAuth2 token for "${credentialsType}" used by node "${node.name}" has been saved to database successfully.`);
|
||||
|
||||
// Make the request again with the new token
|
||||
const newRequestOptions = newToken.sign(requestOptions as clientOAuth2.RequestObject);
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ import {
|
|||
ITaskDataConnections,
|
||||
IWaitingForExecution,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
LoggerProxy as Logger,
|
||||
Workflow,
|
||||
WorkflowExecuteMode,
|
||||
WorkflowOperationError,
|
||||
|
@ -482,6 +483,8 @@ export class WorkflowExecute {
|
|||
* @memberof WorkflowExecute
|
||||
*/
|
||||
processRunExecutionData(workflow: Workflow): PCancelable<IRun> {
|
||||
Logger.verbose('Workflow execution started', { workflowId: workflow.id });
|
||||
|
||||
const startedAt = new Date();
|
||||
|
||||
const workflowIssues = workflow.checkReadyForExecution();
|
||||
|
@ -502,7 +505,6 @@ export class WorkflowExecute {
|
|||
this.runExecutionData.startData = {};
|
||||
}
|
||||
|
||||
|
||||
let currentExecutionTry = '';
|
||||
let lastExecutionTry = '';
|
||||
|
||||
|
@ -564,6 +566,7 @@ export class WorkflowExecute {
|
|||
executionData = this.runExecutionData.executionData!.nodeExecutionStack.shift() as IExecuteData;
|
||||
executionNode = executionData.node;
|
||||
|
||||
Logger.debug(`Start processing node "${executionNode.name}"`, { node: executionNode.name, workflowId: workflow.id });
|
||||
await this.executeHook('nodeExecuteBefore', [executionNode.name]);
|
||||
|
||||
// Get the index of the current run
|
||||
|
@ -661,7 +664,9 @@ export class WorkflowExecute {
|
|||
}
|
||||
}
|
||||
|
||||
Logger.debug(`Running node "${executionNode.name}" started`, { node: executionNode.name, workflowId: workflow.id });
|
||||
nodeSuccessData = await workflow.runNode(executionData.node, executionData.data, this.runExecutionData, runIndex, this.additionalData, NodeExecuteFunctions, this.mode);
|
||||
Logger.debug(`Running node "${executionNode.name}" finished successfully`, { node: executionNode.name, workflowId: workflow.id });
|
||||
|
||||
if (nodeSuccessData === undefined) {
|
||||
// Node did not get executed
|
||||
|
@ -698,6 +703,8 @@ export class WorkflowExecute {
|
|||
message: error.message,
|
||||
stack: error.stack,
|
||||
};
|
||||
|
||||
Logger.debug(`Running node "${executionNode.name}" finished with error`, { node: executionNode.name, workflowId: workflow.id });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -829,8 +836,10 @@ export class WorkflowExecute {
|
|||
const fullRunData = this.getFullRunData(startedAt);
|
||||
|
||||
if (executionError !== undefined) {
|
||||
Logger.verbose(`Workflow execution finished with error`, { error: executionError, workflowId: workflow.id });
|
||||
fullRunData.data.resultData.error = executionError;
|
||||
} else {
|
||||
Logger.verbose(`Workflow execution finished successfully`, { workflowId: workflow.id });
|
||||
fullRunData.finished = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
|
||||
import {
|
||||
IConnections,
|
||||
ILogger,
|
||||
INode,
|
||||
IRun,
|
||||
LoggerProxy,
|
||||
Workflow,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -1154,9 +1156,19 @@ describe('WorkflowExecute', () => {
|
|||
},
|
||||
];
|
||||
|
||||
const fakeLogger = {
|
||||
log: () => {},
|
||||
debug: () => {},
|
||||
verbose: () => {},
|
||||
info: () => {},
|
||||
warn: () => {},
|
||||
error: () => {},
|
||||
} as ILogger;
|
||||
|
||||
|
||||
const executionMode = 'manual';
|
||||
const nodeTypes = Helpers.NodeTypes();
|
||||
LoggerProxy.init(fakeLogger);
|
||||
|
||||
for (const testData of tests) {
|
||||
test(testData.description, async () => {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n-editor-ui",
|
||||
"version": "0.87.0",
|
||||
"version": "0.88.0",
|
||||
"description": "Workflow Editor UI for n8n",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
@ -65,7 +65,7 @@
|
|||
"lodash.debounce": "^4.0.8",
|
||||
"lodash.get": "^4.4.2",
|
||||
"lodash.set": "^4.3.2",
|
||||
"n8n-workflow": "~0.56.0",
|
||||
"n8n-workflow": "~0.57.0",
|
||||
"node-sass": "^4.12.0",
|
||||
"normalize-wheel": "^1.0.1",
|
||||
"prismjs": "^1.17.1",
|
||||
|
|
|
@ -47,7 +47,7 @@
|
|||
"@oclif/dev-cli": "^1.22.2",
|
||||
"@types/copyfiles": "^2.1.1",
|
||||
"@types/inquirer": "^6.5.0",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/tmp": "^0.2.0",
|
||||
"@types/vorpal": "^1.11.0",
|
||||
"tslint": "^6.1.2"
|
||||
},
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
import { ChildProcess, spawn } from 'child_process';
|
||||
const copyfiles = require('copyfiles');
|
||||
|
||||
import {
|
||||
readFile as fsReadFile,
|
||||
} from 'fs/promises';
|
||||
import {
|
||||
write as fsWrite,
|
||||
} from 'fs';
|
||||
|
||||
import { join } from 'path';
|
||||
import { file } from 'tmp-promise';
|
||||
import { promisify } from 'util';
|
||||
|
@ -32,7 +36,7 @@ export async function createCustomTsconfig () {
|
|||
const tsconfigPath = join(__dirname, '../../src/tsconfig-build.json');
|
||||
|
||||
// Read the tsconfi file
|
||||
const tsConfigString = await fsReadFileAsync(tsconfigPath, { encoding: 'utf8'}) as string;
|
||||
const tsConfigString = await fsReadFile(tsconfigPath, { encoding: 'utf8'}) as string;
|
||||
const tsConfig = JSON.parse(tsConfigString);
|
||||
|
||||
// Set absolute include paths
|
||||
|
|
|
@ -20,13 +20,51 @@ export class ERPNextApi implements ICredentialType {
|
|||
type: 'string' as NodePropertyTypes,
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Environment',
|
||||
name: 'environment',
|
||||
type: 'options' as NodePropertyTypes,
|
||||
default: 'cloudHosted',
|
||||
options: [
|
||||
{
|
||||
name: 'Cloud-hosted',
|
||||
value: 'cloudHosted',
|
||||
},
|
||||
{
|
||||
name: 'Self-hosted',
|
||||
value: 'selfHosted',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Subdomain',
|
||||
name: 'subdomain',
|
||||
type: 'string' as NodePropertyTypes,
|
||||
default: '',
|
||||
placeholder: 'n8n',
|
||||
description: 'ERPNext subdomain. For instance, entering n8n will make the url look like: https://n8n.erpnext.com/.',
|
||||
description: 'Subdomain of cloud-hosted ERPNext instance. For example, "n8n" is the subdomain in: <code>https://n8n.erpnext.com</code>',
|
||||
displayOptions: {
|
||||
show: {
|
||||
environment: [
|
||||
'cloudHosted',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Domain',
|
||||
name: 'domain',
|
||||
type: 'string' as NodePropertyTypes,
|
||||
default: '',
|
||||
placeholder: 'https://www.mydomain.com',
|
||||
description: 'Fully qualified domain name of self-hosted ERPNext instance.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
environment: [
|
||||
'selfHosted',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
|
|
18
packages/nodes-base/credentials/KitemakerApi.credentials.ts
Normal file
18
packages/nodes-base/credentials/KitemakerApi.credentials.ts
Normal file
|
@ -0,0 +1,18 @@
|
|||
import {
|
||||
ICredentialType,
|
||||
NodePropertyTypes,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class KitemakerApi implements ICredentialType {
|
||||
name = 'kitemakerApi';
|
||||
displayName = 'Kitemaker API';
|
||||
documentationUrl = 'kitemaker';
|
||||
properties = [
|
||||
{
|
||||
displayName: 'Personal Access Token',
|
||||
name: 'personalAccessToken',
|
||||
type: 'string' as NodePropertyTypes,
|
||||
default: '',
|
||||
},
|
||||
];
|
||||
}
|
|
@ -3,15 +3,11 @@ import {
|
|||
NodePropertyTypes,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
||||
export class Mqtt implements ICredentialType {
|
||||
name = 'mqtt';
|
||||
displayName = 'MQTT';
|
||||
documentationUrl = 'mqtt';
|
||||
properties = [
|
||||
// The credentials to get from user and save encrypted.
|
||||
// Properties can be defined exactly in the same way
|
||||
// as node properties.
|
||||
{
|
||||
displayName: 'Protocol',
|
||||
name: 'protocol',
|
||||
|
@ -55,5 +51,19 @@ export class Mqtt implements ICredentialType {
|
|||
},
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Clean Session',
|
||||
name: 'clean',
|
||||
type: 'boolean' as NodePropertyTypes,
|
||||
default: true,
|
||||
description: `Set to false to receive QoS 1 and 2 messages while offline.`,
|
||||
},
|
||||
{
|
||||
displayName: 'Client ID',
|
||||
name: 'clientId',
|
||||
type: 'string' as NodePropertyTypes,
|
||||
default: '',
|
||||
description: 'Client ID. If left empty, one is autogenrated for you',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
|
|
@ -6,7 +6,9 @@ import {
|
|||
const scopes = [
|
||||
'attachments:write',
|
||||
'channels:remove',
|
||||
'comments:remove',
|
||||
'messages:remove',
|
||||
'threads:remove',
|
||||
'workspaces:read',
|
||||
];
|
||||
|
||||
|
|
|
@ -80,9 +80,9 @@ export class CrateDb implements INodeType {
|
|||
},
|
||||
},
|
||||
default: '',
|
||||
placeholder: 'SELECT id, name FROM product WHERE id < 40',
|
||||
placeholder: 'SELECT id, name FROM product WHERE quantity > $1 AND price <= $2',
|
||||
required: true,
|
||||
description: 'The SQL query to execute.',
|
||||
description: 'The SQL query to execute. You can use n8n expressions or $1 and $2 in conjunction with query parameters.',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
|
@ -235,6 +235,21 @@ export class CrateDb implements INodeType {
|
|||
'See the docs for more examples',
|
||||
].join('<br>'),
|
||||
},
|
||||
{
|
||||
displayName: 'Query Parameters',
|
||||
name: 'queryParams',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': [
|
||||
'executeQuery',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
placeholder: 'quantity,price',
|
||||
description: 'Comma separated list of properties which should be used as query parameters.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
|
|
@ -15,6 +15,13 @@
|
|||
{
|
||||
"url": "https://docs.n8n.io/nodes/n8n-nodes-base.dropbox/"
|
||||
}
|
||||
],
|
||||
"generic": [
|
||||
{
|
||||
"label": "Hey founders! Your business doesn't need you to operate",
|
||||
"icon": " 🖥️",
|
||||
"url": "https://n8n.io/blog/your-business-doesnt-need-you-to-operate/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -25,7 +25,8 @@ export async function erpNextApiRequest(
|
|||
option: IDataObject = {},
|
||||
) {
|
||||
|
||||
const credentials = await this.getCredentials('erpNextApi');
|
||||
const credentials = await this.getCredentials('erpNextApi') as ERPNextApiCredentials;
|
||||
const baseUrl = getBaseUrl(credentials);
|
||||
|
||||
if (credentials === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), 'No credentials got returned!');
|
||||
|
@ -40,7 +41,7 @@ export async function erpNextApiRequest(
|
|||
method,
|
||||
body,
|
||||
qs: query,
|
||||
uri: uri || `https://${credentials.subdomain}.erpnext.com${resource}`,
|
||||
uri: uri || `${baseUrl}${resource}`,
|
||||
json: true,
|
||||
};
|
||||
|
||||
|
@ -56,13 +57,12 @@ export async function erpNextApiRequest(
|
|||
try {
|
||||
return await this.helpers.request!(options);
|
||||
} catch (error) {
|
||||
|
||||
if (error.statusCode === 403) {
|
||||
throw new NodeApiError(this.getNode(), { message: `DocType unavailable.` });
|
||||
throw new NodeApiError(this.getNode(), { message: 'DocType unavailable.' });
|
||||
}
|
||||
|
||||
if (error.statusCode === 307) {
|
||||
throw new NodeApiError(this.getNode(), { message:`Please ensure the subdomain is correct.` });
|
||||
throw new NodeApiError(this.getNode(), { message: 'Please ensure the subdomain is correct.' });
|
||||
}
|
||||
|
||||
throw new NodeApiError(this.getNode(), error);
|
||||
|
@ -95,3 +95,19 @@ export async function erpNextApiRequestAllItems(
|
|||
|
||||
return returnData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the base API URL based on the user's environment.
|
||||
*/
|
||||
const getBaseUrl = ({ environment, domain, subdomain }: ERPNextApiCredentials) =>
|
||||
environment === 'cloudHosted'
|
||||
? `https://${subdomain}.erpnext.com`
|
||||
: domain;
|
||||
|
||||
type ERPNextApiCredentials = {
|
||||
apiKey: string;
|
||||
apiSecret: string;
|
||||
environment: 'cloudHosted' | 'selfHosted';
|
||||
subdomain?: string;
|
||||
domain?: string;
|
||||
};
|
||||
|
|
|
@ -15,6 +15,13 @@
|
|||
{
|
||||
"url": "https://docs.n8n.io/nodes/n8n-nodes-base.eventbriteTrigger/"
|
||||
}
|
||||
],
|
||||
"generic": [
|
||||
{
|
||||
"label": "Hey founders! Your business doesn't need you to operate",
|
||||
"icon": " 🖥️",
|
||||
"url": "https://n8n.io/blog/your-business-doesnt-need-you-to-operate/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -1,9 +1,6 @@
|
|||
import {
|
||||
readFile as fsReadFile,
|
||||
} from 'fs';
|
||||
import { promisify } from 'util';
|
||||
|
||||
const fsReadFileAsync = promisify(fsReadFile);
|
||||
} from 'fs/promises';
|
||||
|
||||
import { IExecuteFunctions } from 'n8n-core';
|
||||
import {
|
||||
|
@ -162,7 +159,7 @@ export class ExecuteWorkflow implements INodeType {
|
|||
|
||||
let workflowJson;
|
||||
try {
|
||||
workflowJson = await fsReadFileAsync(workflowPath, { encoding: 'utf8' }) as string;
|
||||
workflowJson = await fsReadFile(workflowPath, { encoding: 'utf8' }) as string;
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
throw new NodeOperationError(this.getNode(), `The file "${workflowPath}" could not be found.`);
|
||||
|
|
|
@ -20,7 +20,7 @@ export class FacebookGraphApi implements INodeType {
|
|||
description: INodeTypeDescription = {
|
||||
displayName: 'Facebook Graph API',
|
||||
name: 'facebookGraphApi',
|
||||
icon: 'file:facebook.png',
|
||||
icon: 'file:facebook.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Interacts with Facebook using the Graph API',
|
||||
|
|
|
@ -29,7 +29,7 @@ export class FacebookTrigger implements INodeType {
|
|||
description: INodeTypeDescription = {
|
||||
displayName: 'Facebook Trigger',
|
||||
name: 'facebookTrigger',
|
||||
icon: 'file:facebook.png',
|
||||
icon: 'file:facebook.svg',
|
||||
group: ['trigger'],
|
||||
version: 1,
|
||||
subtitle: '={{$parameter["appId"] +"/"+ $parameter["object"]}}',
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 2.4 KiB |
16
packages/nodes-base/nodes/Facebook/facebook.svg
Normal file
16
packages/nodes-base/nodes/Facebook/facebook.svg
Normal file
|
@ -0,0 +1,16 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 24.2.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 60 60" style="enable-background:new 0 0 60 60;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#1877F2;}
|
||||
.st1{fill:#FFFFFF;}
|
||||
</style>
|
||||
<g>
|
||||
<path class="st0" d="M59.5,30C59.5,13.71,46.29,0.5,30,0.5S0.5,13.71,0.5,30c0,14.72,10.79,26.93,24.89,29.14V38.53H17.9V30h7.49
|
||||
v-6.5c0-7.39,4.4-11.48,11.14-11.48c3.23,0,6.6,0.58,6.6,0.58v7.26h-3.72c-3.66,0-4.81,2.27-4.81,4.61V30h8.18l-1.31,8.53h-6.87
|
||||
v20.61C48.71,56.93,59.5,44.72,59.5,30z"/>
|
||||
<path class="st1" d="M41.48,38.53L42.79,30h-8.18v-5.53c0-2.33,1.14-4.61,4.81-4.61h3.72V12.6c0,0-3.38-0.58-6.6-0.58
|
||||
c-6.74,0-11.14,4.08-11.14,11.48V30H17.9v8.53h7.49v20.61c1.5,0.24,3.04,0.36,4.61,0.36s3.11-0.12,4.61-0.36V38.53H41.48z"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 972 B |
|
@ -102,7 +102,7 @@ export class Ftp implements INodeType {
|
|||
{
|
||||
name: 'Delete',
|
||||
value: 'delete',
|
||||
description: 'Delete a file.',
|
||||
description: 'Delete a file/folder.',
|
||||
},
|
||||
{
|
||||
name: 'Download',
|
||||
|
@ -148,6 +148,46 @@ export class Ftp implements INodeType {
|
|||
required: true,
|
||||
},
|
||||
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'delete',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Folder',
|
||||
name: 'folder',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'When set to true, folders can be deleted.',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Recursive',
|
||||
displayOptions: {
|
||||
show: {
|
||||
folder: [
|
||||
true,
|
||||
],
|
||||
},
|
||||
},
|
||||
name: 'recursive',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'If true, remove all files and directories in target directory.',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// download
|
||||
// ----------------------------------
|
||||
|
@ -401,8 +441,13 @@ export class Ftp implements INodeType {
|
|||
|
||||
if (operation === 'delete') {
|
||||
const path = this.getNodeParameter('path', i) as string;
|
||||
const options = this.getNodeParameter('options', i) as IDataObject;
|
||||
|
||||
if (options.folder === true) {
|
||||
responseData = await sftp!.rmdir(path, !!options.recursive);
|
||||
} else {
|
||||
responseData = await sftp!.delete(path);
|
||||
}
|
||||
|
||||
returnItems.push({ json: { success: true } });
|
||||
}
|
||||
|
@ -488,8 +533,13 @@ export class Ftp implements INodeType {
|
|||
|
||||
if (operation === 'delete') {
|
||||
const path = this.getNodeParameter('path', i) as string;
|
||||
const options = this.getNodeParameter('options', i) as IDataObject;
|
||||
|
||||
if (options.folder === true) {
|
||||
responseData = await ftp!.rmdir(path, !!options.recursive);
|
||||
} else {
|
||||
responseData = await ftp!.delete(path);
|
||||
}
|
||||
|
||||
returnItems.push({ json: { success: true } });
|
||||
}
|
||||
|
|
|
@ -180,7 +180,7 @@ export class GetResponseTrigger implements INodeType {
|
|||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.message.includes('[404]')) {
|
||||
if (error.httpCode === '404') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -353,7 +353,7 @@ export class GithubTrigger implements INodeType {
|
|||
try {
|
||||
await githubApiRequest.call(this, 'GET', endpoint, {});
|
||||
} catch (error) {
|
||||
if (error.message.includes('[404]:')) {
|
||||
if (error.httpCode === '404') {
|
||||
// Webhook does not exist
|
||||
delete webhookData.webhookId;
|
||||
delete webhookData.webhookEvents;
|
||||
|
@ -399,7 +399,7 @@ export class GithubTrigger implements INodeType {
|
|||
try {
|
||||
responseData = await githubApiRequest.call(this, 'POST', endpoint, body);
|
||||
} catch (error) {
|
||||
if (error.message.includes('[422]:')) {
|
||||
if (error.httpCode === '422') {
|
||||
// Webhook exists already
|
||||
|
||||
// Get the data of the already registered webhook
|
||||
|
|
|
@ -179,7 +179,7 @@ export class GitlabTrigger implements INodeType {
|
|||
try {
|
||||
await gitlabApiRequest.call(this, 'GET', endpoint, {});
|
||||
} catch (error) {
|
||||
if (error.message.includes('[404]:')) {
|
||||
if (error.httpCode === '404') {
|
||||
// Webhook does not exist
|
||||
delete webhookData.webhookId;
|
||||
delete webhookData.webhookEvents;
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"node": "n8n-nodes-base.googleBigQuery",
|
||||
"nodeVersion": "1.0",
|
||||
"codexVersion": "1.0",
|
||||
"categories": [
|
||||
"Data & Storage",
|
||||
"Development"
|
||||
],
|
||||
"resources": {
|
||||
"credentialDocumentation": [
|
||||
{
|
||||
"url": "https://docs.n8n.io/credentials/google"
|
||||
}
|
||||
],
|
||||
"primaryDocumentation": [
|
||||
{
|
||||
"url": "https://docs.n8n.io/nodes/n8n-nodes-base.googleBigQuery/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -27,6 +27,11 @@
|
|||
"icon": "🎫",
|
||||
"url": "https://n8n.io/blog/supercharging-your-conference-registration-process-with-n8n/"
|
||||
},
|
||||
{
|
||||
"label": "Hey founders! Your business doesn't need you to operate",
|
||||
"icon": " 🖥️",
|
||||
"url": "https://n8n.io/blog/your-business-doesnt-need-you-to-operate/"
|
||||
},
|
||||
{
|
||||
"label": "5 workflow automations for Mattermost that we love at n8n",
|
||||
"icon": "🤖",
|
||||
|
|
|
@ -180,6 +180,7 @@ export class GoogleCalendar implements INodeType {
|
|||
const resource = this.getNodeParameter('resource', 0) as string;
|
||||
const operation = this.getNodeParameter('operation', 0) as string;
|
||||
for (let i = 0; i < length; i++) {
|
||||
try {
|
||||
if (resource === 'calendar') {
|
||||
//https://developers.google.com/calendar/v3/reference/freebusy/query
|
||||
if (operation === 'availability') {
|
||||
|
@ -605,6 +606,19 @@ export class GoogleCalendar implements INodeType {
|
|||
);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (this.continueOnFail() !== true) {
|
||||
throw error;
|
||||
} else {
|
||||
// Return the actual reason as error
|
||||
returnData.push(
|
||||
{
|
||||
error: error.message,
|
||||
},
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(responseData)) {
|
||||
returnData.push.apply(returnData, responseData as IDataObject[]);
|
||||
|
|
|
@ -17,6 +17,11 @@
|
|||
}
|
||||
],
|
||||
"generic": [
|
||||
{
|
||||
"label": "Hey founders! Your business doesn't need you to operate",
|
||||
"icon": " 🖥️",
|
||||
"url": "https://n8n.io/blog/your-business-doesnt-need-you-to-operate/"
|
||||
},
|
||||
{
|
||||
"label": "Why this Product Manager loves workflow automation with n8n",
|
||||
"icon": "🧠",
|
||||
|
|
|
@ -274,6 +274,32 @@ export class GoogleDrive implements INodeType {
|
|||
},
|
||||
description: 'Name of the binary property to which to<br />write the data of the read file.',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'download',
|
||||
],
|
||||
resource: [
|
||||
'file',
|
||||
],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'File name. Ex: data.pdf',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
|
||||
// ----------------------------------
|
||||
|
@ -2011,6 +2037,7 @@ export class GoogleDrive implements INodeType {
|
|||
// ----------------------------------
|
||||
|
||||
const fileId = this.getNodeParameter('fileId', i) as string;
|
||||
const options = this.getNodeParameter('options', i) as IDataObject;
|
||||
|
||||
const requestOptions = {
|
||||
resolveWithFullResponse: true,
|
||||
|
@ -2021,10 +2048,15 @@ export class GoogleDrive implements INodeType {
|
|||
const response = await googleApiRequest.call(this, 'GET', `/drive/v3/files/${fileId}`, {}, { alt: 'media' }, undefined, requestOptions);
|
||||
|
||||
let mimeType: string | undefined;
|
||||
let fileName: string | undefined = undefined;
|
||||
if (response.headers['content-type']) {
|
||||
mimeType = response.headers['content-type'];
|
||||
}
|
||||
|
||||
if (options.fileName) {
|
||||
fileName = options.fileName as string;
|
||||
}
|
||||
|
||||
const newItem: INodeExecutionData = {
|
||||
json: items[i].json,
|
||||
binary: {},
|
||||
|
@ -2043,7 +2075,7 @@ export class GoogleDrive implements INodeType {
|
|||
|
||||
const data = Buffer.from(response.body as string);
|
||||
|
||||
items[i].binary![dataPropertyNameDownload] = await this.helpers.prepareBinaryData(data as unknown as Buffer, undefined, mimeType);
|
||||
items[i].binary![dataPropertyNameDownload] = await this.helpers.prepareBinaryData(data as unknown as Buffer, fileName, mimeType);
|
||||
|
||||
} else if (operation === 'list') {
|
||||
// ----------------------------------
|
||||
|
|
|
@ -209,7 +209,8 @@ export const draftFields = [
|
|||
name: 'property',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'Name of the binary property containing the data to be added to the email as an attachment',
|
||||
description: `Name of the binary property containing the data to be added to the email as an attachment.</br>
|
||||
Multiples can be set separated by comma.`,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
|
@ -27,6 +27,11 @@
|
|||
"icon": "🎫",
|
||||
"url": "https://n8n.io/blog/supercharging-your-conference-registration-process-with-n8n/"
|
||||
},
|
||||
{
|
||||
"label": "Hey founders! Your business doesn't need you to operate",
|
||||
"icon": " 🖥️",
|
||||
"url": "https://n8n.io/blog/your-business-doesnt-need-you-to-operate/"
|
||||
},
|
||||
{
|
||||
"label": "Using Automation to Boost Productivity in the Workplace",
|
||||
"icon": "💪",
|
||||
|
|
|
@ -325,28 +325,29 @@ export class Gmail implements INodeType {
|
|||
|
||||
if (additionalFields.attachmentsUi) {
|
||||
const attachmentsUi = additionalFields.attachmentsUi as IDataObject;
|
||||
let attachmentsBinary = [];
|
||||
const attachmentsBinary = [];
|
||||
if (!isEmpty(attachmentsUi)) {
|
||||
if (attachmentsUi.hasOwnProperty('attachmentsBinary')
|
||||
&& !isEmpty(attachmentsUi.attachmentsBinary)
|
||||
&& items[i].binary) {
|
||||
// @ts-ignore
|
||||
attachmentsBinary = attachmentsUi.attachmentsBinary.map((value) => {
|
||||
if (items[i].binary!.hasOwnProperty(value.property)) {
|
||||
const aux: IAttachments = { name: '', content: '', type: '' };
|
||||
aux.name = items[i].binary![value.property].fileName || 'unknown';
|
||||
aux.content = items[i].binary![value.property].data;
|
||||
aux.type = items[i].binary![value.property].mimeType;
|
||||
return aux;
|
||||
}
|
||||
for (const { property } of attachmentsUi.attachmentsBinary as IDataObject[]) {
|
||||
for (const binaryProperty of (property as string).split(',')) {
|
||||
if (items[i].binary![binaryProperty] !== undefined) {
|
||||
const binaryData = items[i].binary![binaryProperty];
|
||||
attachmentsBinary.push({
|
||||
name: binaryData.fileName || 'unknown',
|
||||
content: binaryData.data,
|
||||
type: binaryData.mimeType,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
qs = {
|
||||
userId: 'me',
|
||||
uploadType: 'media',
|
||||
};
|
||||
|
||||
attachmentsList = attachmentsBinary;
|
||||
}
|
||||
}
|
||||
|
@ -408,32 +409,32 @@ export class Gmail implements INodeType {
|
|||
|
||||
if (additionalFields.attachmentsUi) {
|
||||
const attachmentsUi = additionalFields.attachmentsUi as IDataObject;
|
||||
let attachmentsBinary = [];
|
||||
const attachmentsBinary = [];
|
||||
if (!isEmpty(attachmentsUi)) {
|
||||
if (attachmentsUi.hasOwnProperty('attachmentsBinary')
|
||||
&& !isEmpty(attachmentsUi.attachmentsBinary)
|
||||
&& items[i].binary) {
|
||||
// @ts-ignore
|
||||
attachmentsBinary = attachmentsUi.attachmentsBinary.map((value) => {
|
||||
if (items[i].binary!.hasOwnProperty(value.property)) {
|
||||
const aux: IAttachments = { name: '', content: '', type: '' };
|
||||
aux.name = items[i].binary![value.property].fileName || 'unknown';
|
||||
aux.content = items[i].binary![value.property].data;
|
||||
aux.type = items[i].binary![value.property].mimeType;
|
||||
return aux;
|
||||
}
|
||||
for (const { property } of attachmentsUi.attachmentsBinary as IDataObject[]) {
|
||||
for (const binaryProperty of (property as string).split(',')) {
|
||||
if (items[i].binary![binaryProperty] !== undefined) {
|
||||
const binaryData = items[i].binary![binaryProperty];
|
||||
attachmentsBinary.push({
|
||||
name: binaryData.fileName || 'unknown',
|
||||
content: binaryData.data,
|
||||
type: binaryData.mimeType,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
qs = {
|
||||
userId: 'me',
|
||||
uploadType: 'media',
|
||||
};
|
||||
|
||||
attachmentsList = attachmentsBinary;
|
||||
}
|
||||
}
|
||||
|
||||
// if no recipient is defined then grab the one who sent the email
|
||||
if (toStr === '') {
|
||||
endpoint = `/gmail/v1/users/me/messages/${id}`;
|
||||
|
@ -628,28 +629,29 @@ export class Gmail implements INodeType {
|
|||
|
||||
if (additionalFields.attachmentsUi) {
|
||||
const attachmentsUi = additionalFields.attachmentsUi as IDataObject;
|
||||
let attachmentsBinary = [];
|
||||
const attachmentsBinary = [];
|
||||
if (!isEmpty(attachmentsUi)) {
|
||||
if (attachmentsUi.hasOwnProperty('attachmentsBinary')
|
||||
&& !isEmpty(attachmentsUi.attachmentsBinary)
|
||||
&& items[i].binary) {
|
||||
// @ts-ignore
|
||||
attachmentsBinary = attachmentsUi.attachmentsBinary.map((value) => {
|
||||
if (items[i].binary!.hasOwnProperty(value.property)) {
|
||||
const aux: IAttachments = { name: '', content: '', type: '' };
|
||||
aux.name = items[i].binary![value.property].fileName || 'unknown';
|
||||
aux.content = items[i].binary![value.property].data;
|
||||
aux.type = items[i].binary![value.property].mimeType;
|
||||
return aux;
|
||||
}
|
||||
for (const { property } of attachmentsUi.attachmentsBinary as IDataObject[]) {
|
||||
for (const binaryProperty of (property as string).split(',')) {
|
||||
if (items[i].binary![binaryProperty] !== undefined) {
|
||||
const binaryData = items[i].binary![binaryProperty];
|
||||
attachmentsBinary.push({
|
||||
name: binaryData.fileName || 'unknown',
|
||||
content: binaryData.data,
|
||||
type: binaryData.mimeType,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
qs = {
|
||||
userId: 'me',
|
||||
uploadType: 'media',
|
||||
};
|
||||
|
||||
attachmentsList = attachmentsBinary;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -226,30 +226,6 @@ export const messageFields = [
|
|||
},
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'CC Email',
|
||||
name: 'ccList',
|
||||
type: 'string',
|
||||
description: 'The email addresses of the copy recipients.',
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
multipleValueButtonText: 'Add CC Email',
|
||||
},
|
||||
placeholder: 'info@example.com',
|
||||
default: [],
|
||||
},
|
||||
{
|
||||
displayName: 'BCC Email',
|
||||
name: 'bccList',
|
||||
type: 'string',
|
||||
description: 'The email addresses of the blind copy recipients.',
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
multipleValueButtonText: 'Add BCC Email',
|
||||
},
|
||||
placeholder: 'info@example.com',
|
||||
default: [],
|
||||
},
|
||||
{
|
||||
displayName: 'Attachments',
|
||||
name: 'attachmentsUi',
|
||||
|
@ -268,7 +244,8 @@ export const messageFields = [
|
|||
name: 'property',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'Name of the binary properties which contain data which should be added to email as attachment',
|
||||
description: `Name of the binary property containing the data to be added to the email as an attachment.</br>
|
||||
Multiples can be set separated by comma.`,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -276,6 +253,30 @@ export const messageFields = [
|
|||
default: '',
|
||||
description: 'Array of supported attachments to add to the message.',
|
||||
},
|
||||
{
|
||||
displayName: 'BCC Email',
|
||||
name: 'bccList',
|
||||
type: 'string',
|
||||
description: 'The email addresses of the blind copy recipients.',
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
multipleValueButtonText: 'Add BCC Email',
|
||||
},
|
||||
placeholder: 'info@example.com',
|
||||
default: [],
|
||||
},
|
||||
{
|
||||
displayName: 'CC Email',
|
||||
name: 'ccList',
|
||||
type: 'string',
|
||||
description: 'The email addresses of the copy recipients.',
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
multipleValueButtonText: 'Add CC Email',
|
||||
},
|
||||
placeholder: 'info@example.com',
|
||||
default: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
|
|
|
@ -415,6 +415,12 @@ export class GoogleSheet {
|
|||
for (let i = 0; i < keys.length; i++) {
|
||||
inputData[rowIndex][i] = '';
|
||||
}
|
||||
} else if (inputData[rowIndex].length < keys.length) {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
if (inputData[rowIndex][i] === undefined) {
|
||||
inputData[rowIndex].push('');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Loop over all the lookup values and try to find a row to return
|
||||
|
|
|
@ -48,6 +48,11 @@
|
|||
"icon": "📈",
|
||||
"url": "https://n8n.io/blog/migrating-community-metrics-to-orbit-using-n8n/"
|
||||
},
|
||||
{
|
||||
"label": "Hey founders! Your business doesn't need you to operate",
|
||||
"icon": " 🖥️",
|
||||
"url": "https://n8n.io/blog/your-business-doesnt-need-you-to-operate/"
|
||||
},
|
||||
{
|
||||
"label": "How Honest Burgers Use Automation to Save $100k per year",
|
||||
"icon": "🍔",
|
||||
|
|
|
@ -17,6 +17,11 @@
|
|||
}
|
||||
],
|
||||
"generic": [
|
||||
{
|
||||
"label": "Hey founders! Your business doesn't need you to operate",
|
||||
"icon": " 🖥️",
|
||||
"url": "https://n8n.io/blog/your-business-doesnt-need-you-to-operate/"
|
||||
},
|
||||
{
|
||||
"label": "Benefits of automation and n8n: An interview with HubSpot's Hugh Durkin",
|
||||
"icon": "🎖",
|
||||
|
|
85
packages/nodes-base/nodes/Kitemaker/GenericFunctions.ts
Normal file
85
packages/nodes-base/nodes/Kitemaker/GenericFunctions.ts
Normal file
|
@ -0,0 +1,85 @@
|
|||
import {
|
||||
IExecuteFunctions,
|
||||
ILoadOptionsFunctions,
|
||||
} from 'n8n-core';
|
||||
|
||||
import {
|
||||
IDataObject,
|
||||
IHookFunctions,
|
||||
NodeApiError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export async function kitemakerRequest(
|
||||
this: IExecuteFunctions | ILoadOptionsFunctions | IHookFunctions,
|
||||
body: IDataObject = {},
|
||||
) {
|
||||
const { personalAccessToken } = this.getCredentials('kitemakerApi') as { personalAccessToken: string };
|
||||
|
||||
const options = {
|
||||
headers: {
|
||||
Authorization: `Bearer ${personalAccessToken}`,
|
||||
},
|
||||
method: 'POST',
|
||||
body,
|
||||
uri: 'https://toil.kitemaker.co/developers/graphql',
|
||||
json: true,
|
||||
};
|
||||
|
||||
const responseData = await this.helpers.request!.call(this, options);
|
||||
|
||||
if (responseData.errors) {
|
||||
throw new NodeApiError(this.getNode(), responseData);
|
||||
}
|
||||
|
||||
return responseData;
|
||||
}
|
||||
|
||||
export async function kitemakerRequestAllItems(
|
||||
this: IHookFunctions | IExecuteFunctions | ILoadOptionsFunctions,
|
||||
body: { query: string; variables: { [key: string]: string } },
|
||||
) {
|
||||
const resource = this.getNodeParameter('resource', 0) as 'space' | 'user' | 'workItem';
|
||||
const [group, items] = getGroupAndItems(resource);
|
||||
|
||||
const returnAll = this.getNodeParameter('returnAll', 0, false) as boolean;
|
||||
const limit = this.getNodeParameter('limit', 0, 0) as number;
|
||||
|
||||
const returnData: IDataObject[] = [];
|
||||
let responseData;
|
||||
|
||||
do {
|
||||
responseData = await kitemakerRequest.call(this, body);
|
||||
body.variables.cursor = responseData.data[group].cursor;
|
||||
returnData.push(...responseData.data[group][items]);
|
||||
|
||||
if (!returnAll && returnData.length > limit) {
|
||||
return returnData.slice(0, limit);
|
||||
}
|
||||
|
||||
} while (responseData.data[group].hasMore);
|
||||
|
||||
return returnData;
|
||||
}
|
||||
|
||||
function getGroupAndItems(resource: 'space' | 'user' | 'workItem') {
|
||||
const map: { [key: string]: { [key: string]: string } } = {
|
||||
space: { group: 'organization', items: 'spaces' },
|
||||
user: { group: 'organization', items: 'users' },
|
||||
workItem: { group: 'workItems', items: 'workItems' },
|
||||
};
|
||||
|
||||
return [
|
||||
map[resource]['group'],
|
||||
map[resource]['items'],
|
||||
];
|
||||
}
|
||||
|
||||
export function createLoadOptions(
|
||||
resources: Array<{ name?: string; username?: string; title?: string; id: string }>,
|
||||
): Array<{ name: string; value: string }> {
|
||||
return resources.map(option => {
|
||||
if (option.username) return ({ name: option.username, value: option.id });
|
||||
if (option.title) return ({ name: option.title, value: option.id });
|
||||
return ({ name: option.name ?? 'Unnamed', value: option.id });
|
||||
});
|
||||
}
|
321
packages/nodes-base/nodes/Kitemaker/Kitemaker.node.ts
Normal file
321
packages/nodes-base/nodes/Kitemaker/Kitemaker.node.ts
Normal file
|
@ -0,0 +1,321 @@
|
|||
import {
|
||||
IExecuteFunctions
|
||||
} from 'n8n-core';
|
||||
|
||||
import {
|
||||
IDataObject,
|
||||
ILoadOptionsFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import {
|
||||
organizationOperations,
|
||||
spaceFields,
|
||||
spaceOperations,
|
||||
userFields,
|
||||
userOperations,
|
||||
workItemFields,
|
||||
workItemOperations,
|
||||
} from './descriptions';
|
||||
|
||||
import {
|
||||
createLoadOptions,
|
||||
kitemakerRequest,
|
||||
kitemakerRequestAllItems,
|
||||
} from './GenericFunctions';
|
||||
|
||||
import {
|
||||
getAllSpaces,
|
||||
getAllUsers,
|
||||
getAllWorkItems,
|
||||
getLabels,
|
||||
getOrganization,
|
||||
getSpaces,
|
||||
getStatuses,
|
||||
getUsers,
|
||||
getWorkItem,
|
||||
getWorkItems,
|
||||
} from './queries';
|
||||
|
||||
import {
|
||||
createWorkItem,
|
||||
editWorkItem,
|
||||
} from './mutations';
|
||||
|
||||
export class Kitemaker implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Kitemaker',
|
||||
name: 'kitemaker',
|
||||
icon: 'file:kitemaker.svg',
|
||||
group: ['input'],
|
||||
version: 1,
|
||||
subtitle: '={{$parameter["resource"] + ": " + $parameter["operation"]}}',
|
||||
description: 'Consume the Kitemaker GraphQL API',
|
||||
defaults: {
|
||||
name: 'Kitemaker',
|
||||
color: '#662482',
|
||||
},
|
||||
inputs: ['main'],
|
||||
outputs: ['main'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'kitemakerApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Resource',
|
||||
name: 'resource',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Organization',
|
||||
value: 'organization',
|
||||
},
|
||||
{
|
||||
name: 'Space',
|
||||
value: 'space',
|
||||
},
|
||||
{
|
||||
name: 'User',
|
||||
value: 'user',
|
||||
},
|
||||
{
|
||||
name: 'Work Item',
|
||||
value: 'workItem',
|
||||
},
|
||||
],
|
||||
default: 'workItem',
|
||||
required: true,
|
||||
description: 'Resource to operate on.',
|
||||
},
|
||||
...organizationOperations,
|
||||
...spaceOperations,
|
||||
...spaceFields,
|
||||
...userOperations,
|
||||
...userFields,
|
||||
...workItemOperations,
|
||||
...workItemFields,
|
||||
],
|
||||
};
|
||||
|
||||
methods = {
|
||||
loadOptions: {
|
||||
async getLabels(this: ILoadOptionsFunctions) {
|
||||
const responseData = await kitemakerRequest.call(this, { query: getLabels });
|
||||
const { data: { organization: { spaces } } } = responseData;
|
||||
|
||||
return createLoadOptions(spaces[0].labels);
|
||||
},
|
||||
|
||||
async getSpaces(this: ILoadOptionsFunctions) {
|
||||
const responseData = await kitemakerRequest.call(this, { query: getSpaces });
|
||||
const { data: { organization: { spaces } } } = responseData;
|
||||
|
||||
return createLoadOptions(spaces);
|
||||
},
|
||||
|
||||
async getStatuses(this: ILoadOptionsFunctions) {
|
||||
const responseData = await kitemakerRequest.call(this, { query: getStatuses });
|
||||
const { data: { organization: { spaces } } } = responseData;
|
||||
|
||||
return createLoadOptions(spaces[0].statuses);
|
||||
},
|
||||
|
||||
async getUsers(this: ILoadOptionsFunctions) {
|
||||
const responseData = await kitemakerRequest.call(this, { query: getUsers });
|
||||
const { data: { organization: { users } } } = responseData;
|
||||
|
||||
return createLoadOptions(users);
|
||||
},
|
||||
|
||||
async getWorkItems(this: ILoadOptionsFunctions) {
|
||||
const spaceId = this.getNodeParameter('spaceId', 0) as string;
|
||||
|
||||
const responseData = await kitemakerRequest.call(this, {
|
||||
query: getWorkItems,
|
||||
variables: { spaceId },
|
||||
});
|
||||
|
||||
const { data: { workItems: { workItems } } } = responseData;
|
||||
|
||||
return createLoadOptions(workItems);
|
||||
},
|
||||
|
||||
},
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const items = this.getInputData();
|
||||
|
||||
const resource = this.getNodeParameter('resource', 0);
|
||||
const operation = this.getNodeParameter('operation', 0);
|
||||
|
||||
let responseData;
|
||||
const returnData: IDataObject[] = [];
|
||||
|
||||
// https://github.com/kitemakerhq/docs/blob/main/kitemaker.graphql
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
|
||||
if (resource === 'organization') {
|
||||
|
||||
// *********************************************************************
|
||||
// organization
|
||||
// *********************************************************************
|
||||
|
||||
if (operation === 'get') {
|
||||
|
||||
// ----------------------------------
|
||||
// organization: get
|
||||
// ----------------------------------
|
||||
|
||||
responseData = await kitemakerRequest.call(this, {
|
||||
query: getOrganization,
|
||||
});
|
||||
|
||||
returnData.push(responseData.data.organization);
|
||||
|
||||
}
|
||||
|
||||
} else if (resource === 'space') {
|
||||
|
||||
// *********************************************************************
|
||||
// space
|
||||
// *********************************************************************
|
||||
|
||||
if (operation === 'getAll') {
|
||||
|
||||
// ----------------------------------
|
||||
// space: getAll
|
||||
// ----------------------------------
|
||||
|
||||
const allItems = await kitemakerRequestAllItems.call(this, {
|
||||
query: getAllSpaces,
|
||||
variables: {},
|
||||
});
|
||||
|
||||
returnData.push(...allItems);
|
||||
|
||||
}
|
||||
|
||||
} else if (resource === 'user') {
|
||||
|
||||
// *********************************************************************
|
||||
// user
|
||||
// *********************************************************************
|
||||
|
||||
if (operation === 'getAll') {
|
||||
|
||||
// ----------------------------------
|
||||
// user: getAll
|
||||
// ----------------------------------
|
||||
|
||||
const allItems = await kitemakerRequestAllItems.call(this, {
|
||||
query: getAllUsers,
|
||||
variables: {},
|
||||
});
|
||||
|
||||
returnData.push(...allItems);
|
||||
|
||||
}
|
||||
|
||||
} else if (resource === 'workItem') {
|
||||
|
||||
// *********************************************************************
|
||||
// workItem
|
||||
// *********************************************************************
|
||||
|
||||
if (operation === 'create') {
|
||||
|
||||
// ----------------------------------
|
||||
// workItem: create
|
||||
// ----------------------------------
|
||||
|
||||
const input = {
|
||||
title: this.getNodeParameter('title', i) as string,
|
||||
statusId: this.getNodeParameter('statusId', i) as string[],
|
||||
};
|
||||
|
||||
if (!input.statusId.length) {
|
||||
throw new Error('Please enter a status to set for the work item to create.');
|
||||
}
|
||||
|
||||
const additionalFields = this.getNodeParameter('additionalFields', i) as IDataObject;
|
||||
|
||||
if (Object.keys(additionalFields).length) {
|
||||
Object.assign(input, additionalFields);
|
||||
}
|
||||
|
||||
responseData = await kitemakerRequest.call(this, {
|
||||
query: createWorkItem,
|
||||
variables: { input },
|
||||
});
|
||||
|
||||
returnData.push(responseData.data.createWorkItem.workItem);
|
||||
|
||||
} else if (operation === 'get') {
|
||||
|
||||
// ----------------------------------
|
||||
// workItem: get
|
||||
// ----------------------------------
|
||||
|
||||
const workItemId = this.getNodeParameter('workItemId', i) as string;
|
||||
|
||||
responseData = await kitemakerRequest.call(this, {
|
||||
query: getWorkItem,
|
||||
variables: { workItemId },
|
||||
});
|
||||
|
||||
returnData.push(responseData.data.workItem);
|
||||
|
||||
} else if (operation === 'getAll') {
|
||||
|
||||
// ----------------------------------
|
||||
// workItem: getAll
|
||||
// ----------------------------------
|
||||
|
||||
const allItems = await kitemakerRequestAllItems.call(this, {
|
||||
query: getAllWorkItems,
|
||||
variables: {
|
||||
spaceId: this.getNodeParameter('spaceId', i) as string,
|
||||
},
|
||||
});
|
||||
|
||||
returnData.push(...allItems);
|
||||
|
||||
} else if (operation === 'update') {
|
||||
|
||||
// ----------------------------------
|
||||
// workItem: update
|
||||
// ----------------------------------
|
||||
|
||||
const input = {
|
||||
id: this.getNodeParameter('workItemId', i),
|
||||
};
|
||||
|
||||
const updateFields = this.getNodeParameter('updateFields', i) as IDataObject;
|
||||
|
||||
if (!Object.keys(updateFields).length) {
|
||||
throw new Error('Please enter at least one field to update for the work item.');
|
||||
}
|
||||
|
||||
Object.assign(input, updateFields);
|
||||
|
||||
responseData = await kitemakerRequest.call(this, {
|
||||
query: editWorkItem,
|
||||
variables: { input },
|
||||
});
|
||||
|
||||
returnData.push(responseData.data.editWorkItem.workItem);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return [this.helpers.returnJsonArray(returnData)];
|
||||
}
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
import {
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export const organizationOperations = [
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
default: 'get',
|
||||
description: 'Operation to perform.',
|
||||
options: [
|
||||
{
|
||||
name: 'Get',
|
||||
value: 'get',
|
||||
description: 'Retrieve data on the logged-in user\'s organization.',
|
||||
},
|
||||
],
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'organization',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
] as INodeProperties[];
|
|
@ -0,0 +1,71 @@
|
|||
import {
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export const spaceOperations = [
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
default: 'getAll',
|
||||
description: 'Operation to perform.',
|
||||
options: [
|
||||
{
|
||||
name: 'Get All',
|
||||
value: 'getAll',
|
||||
description: 'Retrieve data on all the spaces in the<br>logged-in user\'s organization.',
|
||||
},
|
||||
],
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'space',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
] as INodeProperties[];
|
||||
|
||||
export const spaceFields = [
|
||||
{
|
||||
displayName: 'Return All',
|
||||
name: 'returnAll',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Return all results.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'space',
|
||||
],
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Limit',
|
||||
name: 'limit',
|
||||
type: 'number',
|
||||
default: 5,
|
||||
description: 'The number of results to return.',
|
||||
typeOptions: {
|
||||
minValue: 1,
|
||||
maxValue: 1000,
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'space',
|
||||
],
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
returnAll: [
|
||||
false,
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
] as INodeProperties[];
|
|
@ -0,0 +1,71 @@
|
|||
import {
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export const userOperations = [
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
default: 'getAll',
|
||||
description: 'Operation to perform.',
|
||||
options: [
|
||||
{
|
||||
name: 'Get All',
|
||||
value: 'getAll',
|
||||
description: 'Retrieve data on all the users in the<br>logged-in user\'s organization.',
|
||||
},
|
||||
],
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'user',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
] as INodeProperties[];
|
||||
|
||||
export const userFields = [
|
||||
{
|
||||
displayName: 'Return All',
|
||||
name: 'returnAll',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Return all results.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'user',
|
||||
],
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Limit',
|
||||
name: 'limit',
|
||||
type: 'number',
|
||||
default: 5,
|
||||
description: 'The number of results to return.',
|
||||
typeOptions: {
|
||||
minValue: 1,
|
||||
maxValue: 1000,
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'user',
|
||||
],
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
returnAll: [
|
||||
false,
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
] as INodeProperties[];
|
|
@ -0,0 +1,372 @@
|
|||
import {
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export const workItemOperations = [
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
default: 'get',
|
||||
description: 'Operation to perform.',
|
||||
options: [
|
||||
{
|
||||
name: 'Create',
|
||||
value: 'create',
|
||||
},
|
||||
{
|
||||
name: 'Get',
|
||||
value: 'get',
|
||||
},
|
||||
{
|
||||
name: 'Get All',
|
||||
value: 'getAll',
|
||||
},
|
||||
{
|
||||
name: 'Update',
|
||||
value: 'update',
|
||||
},
|
||||
],
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'workItem',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
] as INodeProperties[];
|
||||
|
||||
export const workItemFields = [
|
||||
// ----------------------------------
|
||||
// workItem: create
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Title',
|
||||
name: 'title',
|
||||
type: 'string',
|
||||
default: '',
|
||||
required: true,
|
||||
description: 'Title of the work item to create.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'workItem',
|
||||
],
|
||||
operation: [
|
||||
'create',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Status ID',
|
||||
name: 'statusId',
|
||||
type: 'options',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getStatuses',
|
||||
},
|
||||
default: [],
|
||||
required: true,
|
||||
description: 'ID of the status to set on the item to create.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'workItem',
|
||||
],
|
||||
operation: [
|
||||
'create',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Additional Fields',
|
||||
name: 'additionalFields',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Field',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'workItem',
|
||||
],
|
||||
operation: [
|
||||
'create',
|
||||
],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Description',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
default: '',
|
||||
typeOptions: {
|
||||
alwaysOpenEditWindow: true,
|
||||
},
|
||||
description: 'Description of the item to create. Markdown supported.',
|
||||
},
|
||||
{
|
||||
displayName: 'Effort',
|
||||
name: 'effort',
|
||||
type: 'options',
|
||||
default: 'SMALL',
|
||||
description: 'Effort to set for the item to create.',
|
||||
options: [
|
||||
{
|
||||
name: 'Small',
|
||||
value: 'SMALL',
|
||||
},
|
||||
{
|
||||
name: 'Medium',
|
||||
value: 'MEDIUM',
|
||||
},
|
||||
{
|
||||
name: 'Large',
|
||||
value: 'LARGE',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Impact',
|
||||
name: 'impact',
|
||||
type: 'options',
|
||||
default: 'SMALL',
|
||||
description: 'Impact to set for the item to create.',
|
||||
options: [
|
||||
{
|
||||
name: 'Small',
|
||||
value: 'SMALL',
|
||||
},
|
||||
{
|
||||
name: 'Medium',
|
||||
value: 'MEDIUM',
|
||||
},
|
||||
{
|
||||
name: 'Large',
|
||||
value: 'LARGE',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Label IDs',
|
||||
name: 'labelIds',
|
||||
type: 'multiOptions',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getLabels',
|
||||
},
|
||||
default: [],
|
||||
description: 'ID of the label to set on the item to create.',
|
||||
},
|
||||
{
|
||||
displayName: 'Member IDs',
|
||||
name: 'memberIds',
|
||||
type: 'multiOptions',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getUsers',
|
||||
},
|
||||
default: [],
|
||||
description: 'ID of the user to assign to the item to create.',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// workItem: get
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Work Item ID',
|
||||
name: 'workItemId',
|
||||
type: 'string',
|
||||
default: '',
|
||||
required: true,
|
||||
description: 'ID of the work item to retrieve.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'workItem',
|
||||
],
|
||||
operation: [
|
||||
'get',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// workItem: getAll
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Space ID',
|
||||
name: 'spaceId',
|
||||
type: 'options',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getSpaces',
|
||||
},
|
||||
default: [],
|
||||
required: true,
|
||||
description: 'ID of the space to retrieve the work items from.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'workItem',
|
||||
],
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Return All',
|
||||
name: 'returnAll',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Return all results.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'workItem',
|
||||
],
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Limit',
|
||||
name: 'limit',
|
||||
type: 'number',
|
||||
default: 5,
|
||||
description: 'The number of results to return.',
|
||||
typeOptions: {
|
||||
minValue: 1,
|
||||
maxValue: 1000,
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'workItem',
|
||||
],
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
returnAll: [
|
||||
false,
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// workItem: update
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Work Item ID',
|
||||
name: 'workItemId',
|
||||
type: 'string',
|
||||
default: '',
|
||||
required: true,
|
||||
description: 'ID of the work item to update.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'workItem',
|
||||
],
|
||||
operation: [
|
||||
'update',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Update Fields',
|
||||
name: 'updateFields',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Field',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'workItem',
|
||||
],
|
||||
operation: [
|
||||
'update',
|
||||
],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Description',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
default: '',
|
||||
typeOptions: {
|
||||
alwaysOpenEditWindow: true,
|
||||
},
|
||||
description: 'Description of the item to update. Markdown supported.',
|
||||
},
|
||||
{
|
||||
displayName: 'Effort',
|
||||
name: 'effort',
|
||||
type: 'options',
|
||||
default: 'SMALL',
|
||||
description: 'Effort to set for the item to update.',
|
||||
options: [
|
||||
{
|
||||
name: 'Small',
|
||||
value: 'SMALL',
|
||||
},
|
||||
{
|
||||
name: 'Medium',
|
||||
value: 'MEDIUM',
|
||||
},
|
||||
{
|
||||
name: 'Large',
|
||||
value: 'LARGE',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Impact',
|
||||
name: 'impact',
|
||||
type: 'options',
|
||||
default: 'SMALL',
|
||||
description: 'Impact to set for the item to update.',
|
||||
options: [
|
||||
{
|
||||
name: 'Small',
|
||||
value: 'SMALL',
|
||||
},
|
||||
{
|
||||
name: 'Medium',
|
||||
value: 'MEDIUM',
|
||||
},
|
||||
{
|
||||
name: 'Large',
|
||||
value: 'LARGE',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Status ID',
|
||||
name: 'statusId',
|
||||
type: 'options',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getStatuses',
|
||||
},
|
||||
default: [],
|
||||
description: 'ID of the status to set on the item to update.',
|
||||
},
|
||||
{
|
||||
displayName: 'Title',
|
||||
name: 'title',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'Title to set for the work item to update.',
|
||||
},
|
||||
],
|
||||
},
|
||||
] as INodeProperties[];
|
|
@ -0,0 +1,4 @@
|
|||
export * from './OrganizationDescription';
|
||||
export * from './SpaceDescription';
|
||||
export * from './UserDescription';
|
||||
export * from './WorkItemDescription';
|
18
packages/nodes-base/nodes/Kitemaker/kitemaker.svg
Normal file
18
packages/nodes-base/nodes/Kitemaker/kitemaker.svg
Normal file
|
@ -0,0 +1,18 @@
|
|||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
|
||||
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
||||
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
|
||||
width="170.000000pt" height="170.000000pt" viewBox="-30 -25 220.000000 220.000000"
|
||||
preserveAspectRatio="xMidYMid meet">
|
||||
|
||||
<g transform="translate(0.000000,170.000000) scale(0.100000,-0.100000)"
|
||||
fill="#000000" stroke="none">
|
||||
<path fill="#662482" d="M1065 1445 c-302 -140 -560 -254 -572 -254 -17 -1 -61 50 -200 229
|
||||
-178 229 -206 256 -242 233 -14 -9 -16 -89 -19 -793 -1 -570 1 -786 9 -796 35
|
||||
-43 33 -46 852 774 558 558 787 794 787 809 0 26 -25 53 -48 52 -9 0 -264
|
||||
-115 -567 -254z"/>
|
||||
<path fill="#e61b73" d="M694 448 c-133 -134 -244 -251 -247 -260 -3 -9 2 -26 11 -38 16 -18
|
||||
67 -26 599 -85 320 -35 589 -62 597 -59 19 7 29 38 21 62 -3 9 -159 152 -346
|
||||
317 -241 212 -348 301 -367 303 -23 2 -59 -30 -268 -240z"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 922 B |
69
packages/nodes-base/nodes/Kitemaker/mutations.ts
Normal file
69
packages/nodes-base/nodes/Kitemaker/mutations.ts
Normal file
|
@ -0,0 +1,69 @@
|
|||
// ----------------------------------
|
||||
// mutations
|
||||
// ----------------------------------
|
||||
|
||||
export const createWorkItem = `
|
||||
mutation($input: CreateWorkItemInput!) {
|
||||
createWorkItem(input: $input) {
|
||||
workItem {
|
||||
id
|
||||
number
|
||||
title
|
||||
description
|
||||
status {
|
||||
id
|
||||
name
|
||||
}
|
||||
members {
|
||||
id
|
||||
username
|
||||
}
|
||||
watchers {
|
||||
id
|
||||
username
|
||||
}
|
||||
labels {
|
||||
id
|
||||
name
|
||||
}
|
||||
effort
|
||||
impact
|
||||
updatedAt
|
||||
createdAt
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const editWorkItem = `
|
||||
mutation ($input: EditWorkItemInput!) {
|
||||
editWorkItem(input: $input) {
|
||||
workItem {
|
||||
id
|
||||
number
|
||||
title
|
||||
description
|
||||
status {
|
||||
id
|
||||
name
|
||||
}
|
||||
members {
|
||||
id
|
||||
username
|
||||
}
|
||||
watchers {
|
||||
id
|
||||
username
|
||||
}
|
||||
labels {
|
||||
id
|
||||
name
|
||||
}
|
||||
effort
|
||||
impact
|
||||
updatedAt
|
||||
createdAt
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
199
packages/nodes-base/nodes/Kitemaker/queries.ts
Normal file
199
packages/nodes-base/nodes/Kitemaker/queries.ts
Normal file
|
@ -0,0 +1,199 @@
|
|||
// ----------------------------------
|
||||
// queries
|
||||
// ----------------------------------
|
||||
|
||||
export const getAllSpaces = `
|
||||
query {
|
||||
organization {
|
||||
spaces {
|
||||
id
|
||||
name
|
||||
labels {
|
||||
id
|
||||
name
|
||||
color
|
||||
}
|
||||
statuses {
|
||||
id
|
||||
name
|
||||
type
|
||||
default
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const getAllUsers = `
|
||||
query {
|
||||
organization {
|
||||
users {
|
||||
id
|
||||
username
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const getLabels = `
|
||||
query {
|
||||
organization {
|
||||
spaces {
|
||||
labels {
|
||||
id
|
||||
name
|
||||
color
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const getOrganization = `
|
||||
query {
|
||||
organization {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const getSpaces = `
|
||||
query {
|
||||
organization {
|
||||
spaces {
|
||||
id
|
||||
name
|
||||
labels {
|
||||
id
|
||||
name
|
||||
color
|
||||
}
|
||||
statuses {
|
||||
id
|
||||
name
|
||||
type
|
||||
default
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const getStatuses = `
|
||||
query {
|
||||
organization {
|
||||
spaces {
|
||||
statuses {
|
||||
id
|
||||
name
|
||||
type
|
||||
default
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const getUsers = `
|
||||
query {
|
||||
organization {
|
||||
users {
|
||||
id
|
||||
username
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const getWorkItems = `
|
||||
query($spaceId: ID!) {
|
||||
workItems(spaceId: $spaceId) {
|
||||
workItems {
|
||||
id
|
||||
title
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const getWorkItem = `
|
||||
query($workItemId: ID!) {
|
||||
workItem(id: $workItemId) {
|
||||
id
|
||||
number
|
||||
title
|
||||
description
|
||||
status {
|
||||
id
|
||||
name
|
||||
}
|
||||
sort
|
||||
members {
|
||||
id
|
||||
username
|
||||
}
|
||||
watchers {
|
||||
id
|
||||
username
|
||||
}
|
||||
labels {
|
||||
id
|
||||
name
|
||||
}
|
||||
comments {
|
||||
id
|
||||
actor {
|
||||
__typename
|
||||
}
|
||||
body
|
||||
threadId
|
||||
updatedAt
|
||||
createdAt
|
||||
}
|
||||
effort
|
||||
impact
|
||||
updatedAt
|
||||
createdAt
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const getAllWorkItems = `
|
||||
query($spaceId: ID!, $cursor: String) {
|
||||
workItems(spaceId: $spaceId, cursor: $cursor) {
|
||||
hasMore,
|
||||
cursor,
|
||||
workItems {
|
||||
id
|
||||
title
|
||||
description
|
||||
labels {
|
||||
id
|
||||
}
|
||||
comments {
|
||||
id
|
||||
body
|
||||
actor {
|
||||
... on User {
|
||||
id
|
||||
username
|
||||
}
|
||||
... on IntegrationUser {
|
||||
id
|
||||
externalName
|
||||
}
|
||||
... on Integration {
|
||||
id
|
||||
type
|
||||
}
|
||||
... on Application {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
21
packages/nodes-base/nodes/MQTT/Mqtt.node.json
Normal file
21
packages/nodes-base/nodes/MQTT/Mqtt.node.json
Normal file
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"node": "n8n-nodes-base.mqtt",
|
||||
"nodeVersion": "1.0",
|
||||
"codexVersion": "1.0",
|
||||
"categories": [
|
||||
"Communication",
|
||||
"Development"
|
||||
],
|
||||
"resources": {
|
||||
"credentialDocumentation": [
|
||||
{
|
||||
"url": "https://docs.n8n.io/credentials/mqtt"
|
||||
}
|
||||
],
|
||||
"primaryDocumentation": [
|
||||
{
|
||||
"url": "https://docs.n8n.io/nodes/n8n-nodes-base.mqtt/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
171
packages/nodes-base/nodes/MQTT/Mqtt.node.ts
Normal file
171
packages/nodes-base/nodes/MQTT/Mqtt.node.ts
Normal file
|
@ -0,0 +1,171 @@
|
|||
import {
|
||||
IExecuteFunctions,
|
||||
} from 'n8n-core';
|
||||
|
||||
import {
|
||||
IDataObject,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as mqtt from 'mqtt';
|
||||
|
||||
import {
|
||||
IClientOptions,
|
||||
} from 'mqtt';
|
||||
|
||||
export class Mqtt implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'MQTT',
|
||||
name: 'mqtt',
|
||||
icon: 'file:mqtt.svg',
|
||||
group: ['input'],
|
||||
version: 1,
|
||||
description: 'Push messages to MQTT',
|
||||
defaults: {
|
||||
name: 'MQTT',
|
||||
color: '#9b27af',
|
||||
},
|
||||
inputs: ['main'],
|
||||
outputs: ['main'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'mqtt',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Topic',
|
||||
name: 'topic',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: '',
|
||||
description: `The topic to publish to`,
|
||||
},
|
||||
{
|
||||
displayName: 'Send Input Data',
|
||||
name: 'sendInputData',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description: 'Send the the data the node receives as JSON.',
|
||||
},
|
||||
{
|
||||
displayName: 'Message',
|
||||
name: 'message',
|
||||
type: 'string',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
sendInputData: [
|
||||
false,
|
||||
],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
description: 'The message to publish',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'QoS',
|
||||
name: 'qos',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Received at Most Once',
|
||||
value: 0,
|
||||
},
|
||||
{
|
||||
name: 'Received at Least Once',
|
||||
value: 1,
|
||||
},
|
||||
{
|
||||
name: 'Exactly Once',
|
||||
value: 2,
|
||||
},
|
||||
],
|
||||
default: 0,
|
||||
description: 'QoS subscription level',
|
||||
},
|
||||
{
|
||||
displayName: 'Retain',
|
||||
name: 'retain',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: `Normally if a publisher publishes a message to a topic, and no one is subscribed to<br>
|
||||
that topic the message is simply discarded by the broker. However the publisher can tell the broker<br>
|
||||
to keep the last message on that topic by setting the retain flag to true.`,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const items = this.getInputData();
|
||||
const length = (items.length as unknown) as number;
|
||||
const credentials = this.getCredentials('mqtt') as IDataObject;
|
||||
|
||||
const protocol = credentials.protocol as string || 'mqtt';
|
||||
const host = credentials.host as string;
|
||||
const brokerUrl = `${protocol}://${host}`;
|
||||
const port = credentials.port as number || 1883;
|
||||
const clientId = credentials.clientId as string || `mqttjs_${Math.random().toString(16).substr(2, 8)}`;
|
||||
const clean = credentials.clean as boolean;
|
||||
|
||||
const clientOptions: IClientOptions = {
|
||||
port,
|
||||
clean,
|
||||
clientId,
|
||||
};
|
||||
|
||||
if (credentials.username && credentials.password) {
|
||||
clientOptions.username = credentials.username as string;
|
||||
clientOptions.password = credentials.password as string;
|
||||
}
|
||||
|
||||
const client = mqtt.connect(brokerUrl, clientOptions);
|
||||
const sendInputData = this.getNodeParameter('sendInputData', 0) as boolean;
|
||||
|
||||
// tslint:disable-next-line: no-any
|
||||
const data = await new Promise((resolve, reject): any => {
|
||||
client.on('connect', () => {
|
||||
for (let i = 0; i < length; i++) {
|
||||
|
||||
let message;
|
||||
const topic = (this.getNodeParameter('topic', i) as string);
|
||||
const options = (this.getNodeParameter('options', i) as IDataObject);
|
||||
|
||||
try {
|
||||
if (sendInputData === true) {
|
||||
message = JSON.stringify(items[i].json);
|
||||
} else {
|
||||
message = this.getNodeParameter('message', i) as string;
|
||||
}
|
||||
client.publish(topic, message, options);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
}
|
||||
//wait for the in-flight messages to be acked.
|
||||
//needed for messages with QoS 1 & 2
|
||||
client.end(false, {}, () => {
|
||||
resolve([items]);
|
||||
});
|
||||
|
||||
client.on('error', (e: string | undefined) => {
|
||||
reject(e);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return data as INodeExecutionData[][];
|
||||
}
|
||||
}
|
|
@ -13,14 +13,14 @@ import {
|
|||
import * as mqtt from 'mqtt';
|
||||
|
||||
import {
|
||||
IClientOptions,
|
||||
IClientOptions, ISubscriptionMap,
|
||||
} from 'mqtt';
|
||||
|
||||
export class MqttTrigger implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'MQTT Trigger',
|
||||
name: 'mqttTrigger',
|
||||
icon: 'file:mqtt.png',
|
||||
icon: 'file:mqtt.svg',
|
||||
group: ['trigger'],
|
||||
version: 1,
|
||||
description: 'Listens to MQTT events',
|
||||
|
@ -43,7 +43,9 @@ export class MqttTrigger implements INodeType {
|
|||
type: 'string',
|
||||
default: '',
|
||||
description: `Topics to subscribe to, multiple can be defined with comma.<br/>
|
||||
wildcard characters are supported (+ - for single level and # - for multi level)`,
|
||||
wildcard characters are supported (+ - for single level and # - for multi level)<br>
|
||||
By default all subscription used QoS=0. To set a different QoS, write the QoS desired<br>
|
||||
after the topic preceded by a colom. For Example: topicA:1,topicB:2`,
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
|
@ -52,6 +54,13 @@ export class MqttTrigger implements INodeType {
|
|||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'JSON Parse Body',
|
||||
name: 'jsonParseBody',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Try to parse the message to an object.',
|
||||
},
|
||||
{
|
||||
displayName: 'Only Message',
|
||||
name: 'onlyMessage',
|
||||
|
@ -59,13 +68,6 @@ export class MqttTrigger implements INodeType {
|
|||
default: false,
|
||||
description: 'Returns only the message property.',
|
||||
},
|
||||
{
|
||||
displayName: 'JSON Parse Message',
|
||||
name: 'jsonParseMessage',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Try to parse the message to an object.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
@ -81,6 +83,13 @@ export class MqttTrigger implements INodeType {
|
|||
|
||||
const topics = (this.getNodeParameter('topics') as string).split(',');
|
||||
|
||||
const topicsQoS: IDataObject = {};
|
||||
|
||||
for (const data of topics) {
|
||||
const [topic, qos] = data.split(':');
|
||||
topicsQoS[topic] = (qos) ? { qos: parseInt(qos, 10) } : { qos: 0 };
|
||||
}
|
||||
|
||||
const options = this.getNodeParameter('options') as IDataObject;
|
||||
|
||||
if (!topics) {
|
||||
|
@ -91,9 +100,13 @@ export class MqttTrigger implements INodeType {
|
|||
const host = credentials.host as string;
|
||||
const brokerUrl = `${protocol}://${host}`;
|
||||
const port = credentials.port as number || 1883;
|
||||
const clientId = credentials.clientId as string || `mqttjs_${Math.random().toString(16).substr(2, 8)}`;
|
||||
const clean = credentials.clean as boolean;
|
||||
|
||||
const clientOptions: IClientOptions = {
|
||||
port,
|
||||
clean,
|
||||
clientId,
|
||||
};
|
||||
|
||||
if (credentials.username && credentials.password) {
|
||||
|
@ -108,20 +121,19 @@ export class MqttTrigger implements INodeType {
|
|||
async function manualTriggerFunction() {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.on('connect', () => {
|
||||
client.subscribe(topics, (err, granted) => {
|
||||
client.subscribe(topicsQoS as ISubscriptionMap, (err, granted) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
client.on('message', (topic: string, message: Buffer | string) => { // tslint:disable-line:no-any
|
||||
|
||||
let result: IDataObject = {};
|
||||
|
||||
message = message.toString() as string;
|
||||
|
||||
if (options.jsonParseMessage) {
|
||||
if (options.jsonParseBody) {
|
||||
try {
|
||||
message = JSON.parse(message.toString());
|
||||
} catch (error) { }
|
||||
} catch (err) { }
|
||||
}
|
||||
|
||||
result.message = message;
|
||||
|
@ -129,10 +141,9 @@ export class MqttTrigger implements INodeType {
|
|||
|
||||
if (options.onlyMessage) {
|
||||
//@ts-ignore
|
||||
result = message;
|
||||
result = [message as string];
|
||||
}
|
||||
|
||||
self.emit([self.helpers.returnJsonArray([result])]);
|
||||
self.emit([self.helpers.returnJsonArray(result)]);
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
|
@ -144,7 +155,9 @@ export class MqttTrigger implements INodeType {
|
|||
});
|
||||
}
|
||||
|
||||
if (this.getMode() === 'trigger') {
|
||||
manualTriggerFunction();
|
||||
}
|
||||
|
||||
async function closeFunction() {
|
||||
client.end();
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 2.3 KiB |
21
packages/nodes-base/nodes/MQTT/mqtt.svg
Normal file
21
packages/nodes-base/nodes/MQTT/mqtt.svg
Normal file
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 24.3.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.2" baseProfile="tiny" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px"
|
||||
y="0px" viewBox="0 0 320 320" overflow="visible" xml:space="preserve">
|
||||
<g id="black_bg" display="none">
|
||||
</g>
|
||||
<g id="logos">
|
||||
<g>
|
||||
<path fill="#FFFFFF" d="M7.1,133.9v46.7c73.8,0.1,134,59.3,135,132.4h45.5C186.5,214.6,106.1,134.8,7.1,133.9z"/>
|
||||
<path fill="#FFFFFF" d="M7.1,37.3v46.7c127.4,0.1,231.1,102.5,232.1,228.9h45.5C283.7,161.4,159.7,38.3,7.1,37.3z"/>
|
||||
<path fill="#FFFFFF" d="M312.9,193.5V97.6c-11.8-16.1-25.9-33.4-40.4-47.8c-16-15.9-34.1-30.1-52.3-42.7H119
|
||||
C207.3,38.9,278.1,107.2,312.9,193.5z"/>
|
||||
<path fill="#660066" d="M7.1,180.6v117.1c0,8.4,6.8,15.3,15.3,15.3H142C141,239.8,80.9,180.7,7.1,180.6z"/>
|
||||
<path fill="#660066" d="M7.1,84.1v49.8c99,0.9,179.4,80.7,180.4,179.1h51.7C238.2,186.6,134.5,84.2,7.1,84.1z"/>
|
||||
<path fill="#660066" d="M312.9,297.6V193.5C278.1,107.2,207.3,38.9,119,7.1H22.4c-8.4,0-15.3,6.8-15.3,15.3v15
|
||||
c152.6,0.9,276.6,124,277.6,275.6h13C306.1,312.9,312.9,306.1,312.9,297.6z"/>
|
||||
<path fill="#660066" d="M272.6,49.8c14.5,14.4,28.6,31.7,40.4,47.8V22.4c0-8.4-6.8-15.3-15.3-15.3h-77.3
|
||||
C238.4,19.7,256.6,33.9,272.6,49.8z"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.3 KiB |
|
@ -3,13 +3,19 @@
|
|||
"nodeVersion": "1.0",
|
||||
"codexVersion": "1.0",
|
||||
"categories": [
|
||||
"Utility"
|
||||
"Utility",
|
||||
"Marketing & Content"
|
||||
],
|
||||
"resources": {
|
||||
"credentialDocumentation": [
|
||||
{
|
||||
"url": "https://docs.n8n.io/credentials/mailcheck"
|
||||
}
|
||||
],
|
||||
"primaryDocumentation": [
|
||||
{
|
||||
"url": "https://docs.n8n.io/nodes/n8n-nodes-base.mailcheck/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -15,6 +15,13 @@
|
|||
{
|
||||
"url": "https://docs.n8n.io/nodes/n8n-nodes-base.microsoftOneDrive/"
|
||||
}
|
||||
],
|
||||
"generic": [
|
||||
{
|
||||
"label": "Hey founders! Your business doesn't need you to operate",
|
||||
"icon": " 🖥️",
|
||||
"url": "https://n8n.io/blog/your-business-doesnt-need-you-to-operate/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -175,6 +175,13 @@ export const draftMessageSharedFields = [
|
|||
],
|
||||
default: 'Low',
|
||||
},
|
||||
{
|
||||
displayName: 'Is Read',
|
||||
name: 'isRead',
|
||||
description: 'Indicates whether the message has been read.',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
displayName: 'Read Receipt Requested',
|
||||
name: 'isReadReceiptRequested',
|
||||
|
|
|
@ -35,6 +35,11 @@ export const messageOperations = [
|
|||
value: 'getMime',
|
||||
description: 'Get MIME content of a message',
|
||||
},
|
||||
{
|
||||
name: 'Move',
|
||||
value: 'move',
|
||||
description: 'Move a message',
|
||||
},
|
||||
{
|
||||
name: 'Reply',
|
||||
value: 'reply',
|
||||
|
@ -75,6 +80,7 @@ export const messageFields = [
|
|||
'get',
|
||||
'getAttachment',
|
||||
'getMime',
|
||||
'move',
|
||||
'update',
|
||||
'reply',
|
||||
],
|
||||
|
@ -615,7 +621,7 @@ export const messageFields = [
|
|||
{
|
||||
displayName: 'Folder ID',
|
||||
name: 'folderId',
|
||||
description: 'Folder ID',
|
||||
description: 'Target Folder ID.',
|
||||
type: 'string',
|
||||
default: '',
|
||||
required: true,
|
||||
|
|
15
packages/nodes-base/nodes/N8nTrigger.node.json
Normal file
15
packages/nodes-base/nodes/N8nTrigger.node.json
Normal file
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"node": "n8n-nodes-base.n8nTrigger",
|
||||
"nodeVersion": "1.0",
|
||||
"codexVersion": "1.0",
|
||||
"categories": [
|
||||
"Core Nodes"
|
||||
],
|
||||
"resources": {
|
||||
"primaryDocumentation": [
|
||||
{
|
||||
"url": "https://docs.n8n.io/nodes/n8n-nodes-base.n8nTrigger/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -413,22 +413,24 @@ export class Orbit implements INodeType {
|
|||
const url = this.getNodeParameter('url', i) as string;
|
||||
const additionalFields = this.getNodeParameter('additionalFields', i) as IDataObject;
|
||||
const body: IDataObject = {
|
||||
type: 'post',
|
||||
url,
|
||||
};
|
||||
if (additionalFields.publishedAt) {
|
||||
body.published_at = additionalFields.publishedAt as string;
|
||||
body.occurred_at = additionalFields.publishedAt as string;
|
||||
}
|
||||
|
||||
responseData = await orbitApiRequest.call(this, 'POST', `/${workspaceId}/members/${memberId}/posts`, body);
|
||||
responseData = await orbitApiRequest.call(this, 'POST', `/${workspaceId}/members/${memberId}/activities/`, body);
|
||||
responseData = responseData.data;
|
||||
}
|
||||
if (operation === 'getAll') {
|
||||
const workspaceId = this.getNodeParameter('workspaceId', i) as string;
|
||||
const returnAll = this.getNodeParameter('returnAll', i) as boolean;
|
||||
const filters = this.getNodeParameter('filters', i) as IDataObject;
|
||||
let endpoint = `/${workspaceId}/posts`;
|
||||
let endpoint = `/${workspaceId}/activities`;
|
||||
qs.type = 'content';
|
||||
if (filters.memberId) {
|
||||
endpoint = `/${workspaceId}/members/${filters.memberId}/posts`;
|
||||
endpoint = `/${workspaceId}/members/${filters.memberId}/activities`;
|
||||
}
|
||||
if (returnAll === true) {
|
||||
responseData = await orbitApiRequestAllItems.call(this, 'data', 'GET', endpoint, {}, qs);
|
||||
|
@ -443,7 +445,7 @@ export class Orbit implements INodeType {
|
|||
const memberId = this.getNodeParameter('memberId', i) as string;
|
||||
const postId = this.getNodeParameter('postId', i) as string;
|
||||
|
||||
responseData = await orbitApiRequest.call(this, 'DELETE', `/${workspaceId}/members/${memberId}/posts/${postId}`);
|
||||
responseData = await orbitApiRequest.call(this, 'DELETE', `/${workspaceId}/members/${memberId}/activities/${postId}`);
|
||||
responseData = { success: true };
|
||||
}
|
||||
}
|
||||
|
|
|
@ -114,7 +114,7 @@ export const postFields = [
|
|||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Published At',
|
||||
displayName: 'Occurred At',
|
||||
name: 'publishedAt',
|
||||
type: 'dateTime',
|
||||
default: '',
|
||||
|
|
|
@ -60,27 +60,40 @@ export async function pgQuery(
|
|||
getNodeParam: Function,
|
||||
pgp: pgPromise.IMain<{}, pg.IClient>,
|
||||
db: pgPromise.IDatabase<{}, pg.IClient>,
|
||||
input: INodeExecutionData[],
|
||||
items: INodeExecutionData[],
|
||||
continueOnFail: boolean,
|
||||
overrideMode?: string,
|
||||
): Promise<IDataObject[]> {
|
||||
const additionalFields = getNodeParam('additionalFields', 0) as IDataObject;
|
||||
|
||||
let valuesArray = [] as string[][];
|
||||
if (additionalFields.queryParams) {
|
||||
const propertiesString = additionalFields.queryParams as string;
|
||||
const properties = propertiesString.split(',').map(column => column.trim());
|
||||
const paramsItems = getItemsCopy(items, properties);
|
||||
valuesArray = paramsItems.map((row) => properties.map(col => row[col])) as string[][];
|
||||
}
|
||||
|
||||
const allQueries = [] as Array<{query: string, values?: string[]}>;
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
const query = getNodeParam('query', i) as string;
|
||||
const values = valuesArray[i];
|
||||
const queryFormat = { query, values };
|
||||
allQueries.push(queryFormat);
|
||||
}
|
||||
|
||||
const mode = overrideMode ? overrideMode : (additionalFields.mode ?? 'multiple') as string;
|
||||
if (mode === 'multiple') {
|
||||
const queries: string[] = [];
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
queries.push(getNodeParam('query', i) as string);
|
||||
}
|
||||
return (await db.multi(pgp.helpers.concat(queries))).flat(1);
|
||||
return (await db.multi(pgp.helpers.concat(allQueries))).flat(1);
|
||||
} else if (mode === 'transaction') {
|
||||
return db.tx(async t => {
|
||||
const result: IDataObject[] = [];
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
for (let i = 0; i < allQueries.length; i++) {
|
||||
try {
|
||||
Array.prototype.push.apply(result, await t.any(getNodeParam('query', i) as string));
|
||||
Array.prototype.push.apply(result, await t.any(allQueries[i].query, allQueries[i].values));
|
||||
} catch (err) {
|
||||
if (continueOnFail === false) throw err;
|
||||
result.push({ ...input[i].json, code: err.code, message: err.message });
|
||||
result.push({ ...items[i].json, code: err.code, message: err.message });
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
@ -89,12 +102,12 @@ export async function pgQuery(
|
|||
} else if (mode === 'independently') {
|
||||
return db.task(async t => {
|
||||
const result: IDataObject[] = [];
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
for (let i = 0; i < allQueries.length; i++) {
|
||||
try {
|
||||
Array.prototype.push.apply(result, await t.any(getNodeParam('query', i) as string));
|
||||
Array.prototype.push.apply(result, await t.any(allQueries[i].query, allQueries[i].values));
|
||||
} catch (err) {
|
||||
if (continueOnFail === false) throw err;
|
||||
result.push({ ...input[i].json, code: err.code, message: err.message });
|
||||
result.push({ ...items[i].json, code: err.code, message: err.message });
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
|
|
@ -73,9 +73,9 @@ export class Postgres implements INodeType {
|
|||
},
|
||||
},
|
||||
default: '',
|
||||
placeholder: 'SELECT id, name FROM product WHERE id < 40',
|
||||
placeholder: 'SELECT id, name FROM product WHERE quantity > $1 AND price <= $2',
|
||||
required: true,
|
||||
description: 'The SQL query to execute.',
|
||||
description: 'The SQL query to execute. You can use n8n expressions or $1 and $2 in conjunction with query parameters.',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
|
@ -232,6 +232,21 @@ export class Postgres implements INodeType {
|
|||
'See the docs for more examples',
|
||||
].join('<br>'),
|
||||
},
|
||||
{
|
||||
displayName: 'Query Parameters',
|
||||
name: 'queryParams',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': [
|
||||
'executeQuery',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
placeholder: 'quantity,price',
|
||||
description: 'Comma separated list of properties which should be used as query parameters.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
|
|
@ -73,9 +73,9 @@ export class QuestDb implements INodeType {
|
|||
},
|
||||
},
|
||||
default: '',
|
||||
placeholder: 'SELECT id, name FROM product WHERE id < 40',
|
||||
placeholder: 'SELECT id, name FROM product WHERE quantity > $1 AND price <= $2',
|
||||
required: true,
|
||||
description: 'The SQL query to execute.',
|
||||
description: 'The SQL query to execute. You can use n8n expressions or $1 and $2 in conjunction with query parameters.',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
|
@ -176,6 +176,21 @@ export class QuestDb implements INodeType {
|
|||
'See the docs for more examples',
|
||||
].join('<br>'),
|
||||
},
|
||||
{
|
||||
displayName: 'Query Parameters',
|
||||
name: 'queryParams',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': [
|
||||
'executeQuery',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
placeholder: 'quantity,price',
|
||||
description: 'Comma separated list of properties which should be used as query parameters.',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
|
@ -215,7 +230,7 @@ export class QuestDb implements INodeType {
|
|||
|
||||
const db = pgp(config);
|
||||
|
||||
let returnItems = [];
|
||||
let returnItems: INodeExecutionData[] = [];
|
||||
|
||||
const items = this.getInputData();
|
||||
const operation = this.getNodeParameter('operation', 0) as string;
|
||||
|
|
|
@ -43,6 +43,8 @@ export async function quickbaseApiRequest(this: IExecuteFunctions | ILoadOptions
|
|||
uri: `https://api.quickbase.com/v1${resource}`,
|
||||
json: true,
|
||||
};
|
||||
|
||||
|
||||
if (Object.keys(body).length === 0) {
|
||||
delete options.body;
|
||||
}
|
||||
|
|
|
@ -230,8 +230,6 @@ export class QuickBase implements INodeType {
|
|||
if (operation === 'create') {
|
||||
const tableId = this.getNodeParameter('tableId', 0) as string;
|
||||
|
||||
const { fieldsLabelKey, fieldsIdKey } = await getFieldsObject.call(this, tableId);
|
||||
|
||||
const simple = this.getNodeParameter('simple', 0) as boolean;
|
||||
|
||||
const data: IDataObject[] = [];
|
||||
|
@ -244,12 +242,18 @@ export class QuickBase implements INodeType {
|
|||
const columns = this.getNodeParameter('columns', i) as string;
|
||||
|
||||
const columnList = columns.split(',').map(column => column.trim());
|
||||
|
||||
if (options.useFieldIDs === true) {
|
||||
for (const key of Object.keys(items[i].json)) {
|
||||
record[key] = { value: items[i].json[key] };
|
||||
}
|
||||
} else {
|
||||
const { fieldsLabelKey } = await getFieldsObject.call(this, tableId);
|
||||
for (const key of Object.keys(items[i].json)) {
|
||||
if (fieldsLabelKey.hasOwnProperty(key) && columnList.includes(key)) {
|
||||
record[fieldsLabelKey[key].toString()] = { value: items[i].json[key] };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data.push(record);
|
||||
}
|
||||
|
@ -259,8 +263,9 @@ export class QuickBase implements INodeType {
|
|||
to: tableId,
|
||||
};
|
||||
|
||||
// If not fields are set return at least the record id
|
||||
body.fieldsToReturn = [fieldsLabelKey['Record ID#']];
|
||||
// If no fields are set return at least the record id
|
||||
// 3 == Default Quickbase RecordID #
|
||||
body.fieldsToReturn = [3];
|
||||
|
||||
if (options.fields) {
|
||||
body.fieldsToReturn = options.fields as string[];
|
||||
|
@ -275,7 +280,7 @@ export class QuickBase implements INodeType {
|
|||
for (const record of records) {
|
||||
const data: IDataObject = {};
|
||||
for (const [key, value] of Object.entries(record)) {
|
||||
data[fieldsIdKey[key]] = (value as IDataObject).value;
|
||||
data[key] = (value as IDataObject).value;
|
||||
}
|
||||
responseData.push(data);
|
||||
}
|
||||
|
@ -380,18 +385,23 @@ export class QuickBase implements INodeType {
|
|||
|
||||
const columnList = columns.split(',').map(column => column.trim());
|
||||
|
||||
if (options.useFieldIDs === true) {
|
||||
for (const key of Object.keys(items[i].json)) {
|
||||
record[key] = { value: items[i].json[key] };
|
||||
}
|
||||
} else {
|
||||
const { fieldsLabelKey } = await getFieldsObject.call(this, tableId);
|
||||
for (const key of Object.keys(items[i].json)) {
|
||||
if (fieldsLabelKey.hasOwnProperty(key) && columnList.includes(key)) {
|
||||
record[fieldsLabelKey[key].toString()] = { value: items[i].json[key] };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (items[i].json[updateKey] === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), `The update key ${updateKey} could not be found in the input`);
|
||||
}
|
||||
|
||||
record[fieldsLabelKey['Record ID#']] = { value: items[i].json[updateKey] };
|
||||
|
||||
data.push(record);
|
||||
}
|
||||
|
||||
|
@ -400,8 +410,9 @@ export class QuickBase implements INodeType {
|
|||
to: tableId,
|
||||
};
|
||||
|
||||
// If not fields are set return at least the record id
|
||||
body.fieldsToReturn = [fieldsLabelKey['Record ID#']];
|
||||
// If no fields are set return at least the record id
|
||||
// 3 == Default Quickbase RecordID #
|
||||
//body.fieldsToReturn = [fieldsLabelKey['Record ID#']];
|
||||
|
||||
if (options.fields) {
|
||||
body.fieldsToReturn = options.fields as string[];
|
||||
|
@ -432,8 +443,6 @@ export class QuickBase implements INodeType {
|
|||
if (operation === 'upsert') {
|
||||
const tableId = this.getNodeParameter('tableId', 0) as string;
|
||||
|
||||
const { fieldsLabelKey, fieldsIdKey } = await getFieldsObject.call(this, tableId);
|
||||
|
||||
const simple = this.getNodeParameter('simple', 0) as boolean;
|
||||
|
||||
const updateKey = this.getNodeParameter('updateKey', 0) as string;
|
||||
|
@ -451,11 +460,18 @@ export class QuickBase implements INodeType {
|
|||
|
||||
const columnList = columns.split(',').map(column => column.trim());
|
||||
|
||||
if (options.useFieldIDs === true) {
|
||||
for (const key of Object.keys(items[i].json)) {
|
||||
record[key] = { value: items[i].json[key] };
|
||||
}
|
||||
} else {
|
||||
const { fieldsLabelKey } = await getFieldsObject.call(this, tableId);
|
||||
for (const key of Object.keys(items[i].json)) {
|
||||
if (fieldsLabelKey.hasOwnProperty(key) && columnList.includes(key)) {
|
||||
record[fieldsLabelKey[key].toString()] = { value: items[i].json[key] };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (items[i].json[updateKey] === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), `The update key ${updateKey} could not be found in the input`);
|
||||
|
@ -472,8 +488,9 @@ export class QuickBase implements INodeType {
|
|||
mergeFieldId,
|
||||
};
|
||||
|
||||
// If not fields are set return at least the record id
|
||||
body.fieldsToReturn = [fieldsLabelKey['Record ID#']];
|
||||
// If no fields are set return at least the record id
|
||||
// 3 == Default Quickbase RecordID #
|
||||
body.fieldsToReturn = [3];
|
||||
|
||||
if (options.fields) {
|
||||
body.fieldsToReturn = options.fields as string[];
|
||||
|
@ -488,7 +505,7 @@ export class QuickBase implements INodeType {
|
|||
for (const record of records) {
|
||||
const data: IDataObject = {};
|
||||
for (const [key, value] of Object.entries(record)) {
|
||||
data[fieldsIdKey[key]] = (value as IDataObject).value;
|
||||
data[key] = (value as IDataObject).value;
|
||||
}
|
||||
responseData.push(data);
|
||||
}
|
||||
|
|
|
@ -84,11 +84,11 @@ export const recordFields = [
|
|||
},
|
||||
default: '',
|
||||
required: true,
|
||||
placeholder: 'id,name,description',
|
||||
placeholder: 'Select Fields...',
|
||||
description: 'Comma separated list of the properties which should used as columns for the new rows.',
|
||||
},
|
||||
{
|
||||
displayName: 'Simple',
|
||||
displayName: 'Simplified Response',
|
||||
name: 'simple',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
|
@ -122,7 +122,7 @@ export const recordFields = [
|
|||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Fields',
|
||||
displayName: 'Return Fields',
|
||||
name: 'fields',
|
||||
type: 'multiOptions',
|
||||
typeOptions: {
|
||||
|
@ -134,6 +134,13 @@ export const recordFields = [
|
|||
default: [],
|
||||
description: `Specify an array of field ids that will return data for any updates or added record. Record ID (FID 3) is always returned if any field ID is requested.`,
|
||||
},
|
||||
{
|
||||
displayName: 'Use Field IDs',
|
||||
name: 'useFieldIDs',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Use Field IDs instead of Field Names in Columns.',
|
||||
},
|
||||
],
|
||||
},
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
@ -255,54 +262,6 @@ export const recordFields = [
|
|||
},
|
||||
},
|
||||
options: [
|
||||
// {
|
||||
// displayName: 'Group By',
|
||||
// name: 'groupByUi',
|
||||
// placeholder: 'Add Group By',
|
||||
// type: 'fixedCollection',
|
||||
// typeOptions: {
|
||||
// multipleValues: true,
|
||||
// },
|
||||
// default: {},
|
||||
// options: [
|
||||
// {
|
||||
// name: 'groupByValues',
|
||||
// displayName: 'Group By',
|
||||
// values: [
|
||||
// {
|
||||
// displayName: 'Field ID',
|
||||
// name: 'fieldId',
|
||||
// type: 'options',
|
||||
// typeOptions: {
|
||||
// loadOptionsMethod: 'getTableFields',
|
||||
// },
|
||||
// default: '',
|
||||
// description: 'The unique identifier of a field in a table.',
|
||||
// },
|
||||
// {
|
||||
// displayName: 'Grouping',
|
||||
// name: 'grouping',
|
||||
// type: 'options',
|
||||
// options: [
|
||||
// {
|
||||
// name: 'ASC',
|
||||
// value: 'ASC',
|
||||
// },
|
||||
// {
|
||||
// name: 'DESC',
|
||||
// value: 'DESC',
|
||||
// },
|
||||
// {
|
||||
// name: 'Equal Values',
|
||||
// value: 'equal-values',
|
||||
// },
|
||||
// ],
|
||||
// default: 'ASC',
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
{
|
||||
displayName: 'Select',
|
||||
name: 'select',
|
||||
|
@ -472,6 +431,13 @@ export const recordFields = [
|
|||
default: [],
|
||||
description: `Specify an array of field ids that will return data for any updates or added record. Record ID (FID 3) is always returned if any field ID is requested.`,
|
||||
},
|
||||
{
|
||||
displayName: 'Use Field IDs',
|
||||
name: 'useFieldIDs',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Use Field IDs instead of Field Names in Columns.',
|
||||
},
|
||||
// {
|
||||
// displayName: 'Merge Field ID',
|
||||
// name: 'mergeFieldId',
|
||||
|
@ -612,6 +578,13 @@ export const recordFields = [
|
|||
default: [],
|
||||
description: `Specify an array of field ids that will return data for any updates or added record. Record ID (FID 3) is always returned if any field ID is requested.`,
|
||||
},
|
||||
{
|
||||
displayName: 'Use Field IDs',
|
||||
name: 'useFieldIDs',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Use Field IDs instead of Field Names in Columns.',
|
||||
},
|
||||
],
|
||||
},
|
||||
] as INodeProperties[];
|
||||
|
|
|
@ -8,10 +8,7 @@ import {
|
|||
|
||||
import {
|
||||
readFile as fsReadFile,
|
||||
} from 'fs';
|
||||
import { promisify } from 'util';
|
||||
|
||||
const fsReadFileAsync = promisify(fsReadFile);
|
||||
} from 'fs/promises';
|
||||
|
||||
|
||||
export class ReadBinaryFile implements INodeType {
|
||||
|
@ -64,7 +61,7 @@ export class ReadBinaryFile implements INodeType {
|
|||
|
||||
let data;
|
||||
try {
|
||||
data = await fsReadFileAsync(filePath) as Buffer;
|
||||
data = await fsReadFile(filePath) as Buffer;
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
throw new NodeOperationError(this.getNode(), `The file "${filePath}" could not be found.`);
|
||||
|
|
|
@ -9,10 +9,7 @@ import * as path from 'path';
|
|||
|
||||
import {
|
||||
readFile as fsReadFile,
|
||||
} from 'fs';
|
||||
import { promisify } from 'util';
|
||||
|
||||
const fsReadFileAsync = promisify(fsReadFile);
|
||||
} from 'fs/promises';
|
||||
|
||||
|
||||
export class ReadBinaryFiles implements INodeType {
|
||||
|
@ -61,7 +58,7 @@ export class ReadBinaryFiles implements INodeType {
|
|||
let item: INodeExecutionData;
|
||||
let data: Buffer;
|
||||
for (const filePath of files) {
|
||||
data = await fsReadFileAsync(filePath) as Buffer;
|
||||
data = await fsReadFile(filePath) as Buffer;
|
||||
|
||||
item = {
|
||||
binary: {
|
||||
|
|
|
@ -451,7 +451,7 @@ export class Redis implements INodeType {
|
|||
});
|
||||
|
||||
client.on('ready', async (err: Error | null) => {
|
||||
|
||||
try {
|
||||
if (operation === 'info') {
|
||||
const clientInfo = util.promisify(client.info).bind(client);
|
||||
const result = await clientInfo();
|
||||
|
@ -523,6 +523,9 @@ export class Redis implements INodeType {
|
|||
client.quit();
|
||||
resolve(this.prepareOutputData(returnItems));
|
||||
}
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -18,6 +18,10 @@ import * as moment from 'moment-timezone';
|
|||
|
||||
import * as jwt from 'jsonwebtoken';
|
||||
|
||||
import {
|
||||
LoggerProxy as Logger
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export async function salesforceApiRequest(this: IExecuteFunctions | IExecuteSingleFunctions | ILoadOptionsFunctions, method: string, endpoint: string, body: any = {}, qs: IDataObject = {}, uri?: string, option: IDataObject = {}): Promise<any> { // tslint:disable-line:no-any
|
||||
const authenticationMethod = this.getNodeParameter('authentication', 0, 'oAuth2') as string;
|
||||
|
||||
|
@ -29,6 +33,7 @@ export async function salesforceApiRequest(this: IExecuteFunctions | IExecuteSin
|
|||
const response = await getAccessToken.call(this, credentials as IDataObject);
|
||||
const { instance_url, access_token } = response;
|
||||
const options = getOptions.call(this, method, (uri || endpoint), body, qs, instance_url as string);
|
||||
Logger.debug(`Authentication for "Salesforce" node is using "jwt". Invoking URI ${options.uri}`);
|
||||
options.headers!.Authorization = `Bearer ${access_token}`;
|
||||
//@ts-ignore
|
||||
return await this.helpers.request(options);
|
||||
|
@ -38,6 +43,7 @@ export async function salesforceApiRequest(this: IExecuteFunctions | IExecuteSin
|
|||
const credentials = await this.getCredentials(credentialsType);
|
||||
const subdomain = ((credentials!.accessTokenUrl as string).match(/https:\/\/(.+).salesforce\.com/) || [])[1];
|
||||
const options = getOptions.call(this, method, (uri || endpoint), body, qs, `https://${subdomain}.salesforce.com`);
|
||||
Logger.debug(`Authentication for "Salesforce" node is using "OAuth2". Invoking URI ${options.uri}`);
|
||||
//@ts-ignore
|
||||
return await this.helpers.requestOAuth2.call(this, credentialsType, options);
|
||||
}
|
||||
|
|
|
@ -112,6 +112,10 @@ import {
|
|||
userOperations,
|
||||
} from './UserDescription';
|
||||
|
||||
import {
|
||||
LoggerProxy as Logger,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class Salesforce implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Salesforce',
|
||||
|
@ -923,6 +927,8 @@ export class Salesforce implements INodeType {
|
|||
const resource = this.getNodeParameter('resource', 0) as string;
|
||||
const operation = this.getNodeParameter('operation', 0) as string;
|
||||
|
||||
Logger.debug(`Running "Salesforce" node named "${this.getNode.name}" resource "${resource}" operation "${operation}"`);
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
if (resource === 'lead') {
|
||||
//https://developer.salesforce.com/docs/api-explorer/sobject/Lead/post-lead
|
||||
|
|
|
@ -11,7 +11,6 @@ import {
|
|||
} from 'n8n-workflow';
|
||||
|
||||
import {
|
||||
getItemCopy,
|
||||
pgInsert,
|
||||
pgQuery,
|
||||
pgUpdate,
|
||||
|
@ -77,15 +76,13 @@ export class TimescaleDb implements INodeType {
|
|||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'executeQuery',
|
||||
],
|
||||
operation: ['executeQuery'],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
placeholder: 'SELECT id, name FROM product WHERE id < 40',
|
||||
placeholder: 'SELECT id, name FROM product WHERE quantity > $1 AND price <= $2',
|
||||
required: true,
|
||||
description: 'The SQL query to execute.',
|
||||
description: 'The SQL query to execute. You can use n8n expressions or $1 and $2 in conjunction with query parameters.',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
|
@ -256,6 +253,21 @@ export class TimescaleDb implements INodeType {
|
|||
'See the docs for more examples',
|
||||
].join('<br>'),
|
||||
},
|
||||
{
|
||||
displayName: 'Query Parameters',
|
||||
name: 'queryParams',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': [
|
||||
'executeQuery',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
placeholder: 'quantity,price',
|
||||
description: 'Comma separated list of properties which should be used as query parameters.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
|
|
@ -17,6 +17,11 @@
|
|||
}
|
||||
],
|
||||
"generic": [
|
||||
{
|
||||
"label": "Hey founders! Your business doesn't need you to operate",
|
||||
"icon": " 🖥️",
|
||||
"url": "https://n8n.io/blog/your-business-doesnt-need-you-to-operate/"
|
||||
},
|
||||
{
|
||||
"label": "How a digital strategist uses n8n for online marketing",
|
||||
"icon": "💻",
|
||||
|
|
|
@ -15,6 +15,13 @@
|
|||
{
|
||||
"url": "https://docs.n8n.io/nodes/n8n-nodes-base.trelloTrigger/"
|
||||
}
|
||||
],
|
||||
"generic": [
|
||||
{
|
||||
"label": "Hey founders! Your business doesn't need you to operate",
|
||||
"icon": " 🖥️",
|
||||
"url": "https://n8n.io/blog/your-business-doesnt-need-you-to-operate/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -15,11 +15,21 @@ export const channelOperations = [
|
|||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
name: 'Archive',
|
||||
value: 'archive',
|
||||
description: 'Archive a channel',
|
||||
},
|
||||
{
|
||||
name: 'Create',
|
||||
value: 'create',
|
||||
description: 'Initiates a public or private channel-based conversation',
|
||||
},
|
||||
{
|
||||
name: 'Delete',
|
||||
value: 'delete',
|
||||
description: 'Delete a channel',
|
||||
},
|
||||
{
|
||||
name: 'Get',
|
||||
value: 'get',
|
||||
|
@ -30,6 +40,11 @@ export const channelOperations = [
|
|||
value: 'getAll',
|
||||
description: 'Get all channels',
|
||||
},
|
||||
{
|
||||
name: 'Unarchive',
|
||||
value: 'unarchive',
|
||||
description: 'Unarchive a channel',
|
||||
},
|
||||
{
|
||||
name: 'Update',
|
||||
value: 'update',
|
||||
|
@ -64,7 +79,7 @@ export const channelFields = [
|
|||
},
|
||||
},
|
||||
required: true,
|
||||
description: 'The id of the workspace.',
|
||||
description: 'The ID of the workspace.',
|
||||
},
|
||||
{
|
||||
displayName: 'Name',
|
||||
|
@ -156,28 +171,28 @@ export const channelFields = [
|
|||
},
|
||||
],
|
||||
default: 0,
|
||||
description: 'The color of the channel',
|
||||
description: 'The color of the channel.',
|
||||
},
|
||||
{
|
||||
displayName: 'Description',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'The description of the channel',
|
||||
description: 'The description of the channel.',
|
||||
},
|
||||
{
|
||||
displayName: 'Public',
|
||||
name: 'public',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'If enabled, the channel will be marked as public',
|
||||
description: 'If enabled, the channel will be marked as public.',
|
||||
},
|
||||
{
|
||||
displayName: 'Temp ID',
|
||||
name: 'temp_id',
|
||||
type: 'number',
|
||||
default: -1,
|
||||
description: 'The temporary id of the channel. It needs to be a negative number.',
|
||||
description: 'The temporary ID of the channel. It needs to be a negative number.',
|
||||
},
|
||||
{
|
||||
displayName: 'User IDs',
|
||||
|
@ -194,8 +209,9 @@ export const channelFields = [
|
|||
},
|
||||
],
|
||||
},
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
/* channel:get */
|
||||
/* channel:get/archive/unarchive/delete */
|
||||
/* -------------------------------------------------------------------------- */
|
||||
{
|
||||
displayName: 'Channel ID',
|
||||
|
@ -205,7 +221,10 @@ export const channelFields = [
|
|||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'archive',
|
||||
'delete',
|
||||
'get',
|
||||
'unarchive',
|
||||
],
|
||||
resource: [
|
||||
'channel',
|
||||
|
@ -213,8 +232,9 @@ export const channelFields = [
|
|||
},
|
||||
},
|
||||
required: true,
|
||||
description: 'The ID of the channel',
|
||||
description: 'The ID of the channel.',
|
||||
},
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
/* channel:getAll */
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
@ -302,7 +322,7 @@ export const channelFields = [
|
|||
name: 'archived',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'If enabled, only archived conversations are returned',
|
||||
description: 'If enabled, only archived conversations are returned.',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -400,28 +420,28 @@ export const channelFields = [
|
|||
},
|
||||
],
|
||||
default: 0,
|
||||
description: 'The color of the channel',
|
||||
description: 'The color of the channel.',
|
||||
},
|
||||
{
|
||||
displayName: 'Description',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'The description of the channel',
|
||||
description: 'The description of the channel.',
|
||||
},
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'The name of the channel',
|
||||
description: 'The name of the channel.',
|
||||
},
|
||||
{
|
||||
displayName: 'Public',
|
||||
name: 'public',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'If enabled, the channel will be marked as public',
|
||||
description: 'If enabled, the channel will be marked as public.',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
561
packages/nodes-base/nodes/Twist/CommentDescription.ts
Normal file
561
packages/nodes-base/nodes/Twist/CommentDescription.ts
Normal file
|
@ -0,0 +1,561 @@
|
|||
import {
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export const commentOperations = [
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'comment',
|
||||
],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
name: 'Create',
|
||||
value: 'create',
|
||||
description: 'Create a new comment to a thread',
|
||||
},
|
||||
{
|
||||
name: 'Delete',
|
||||
value: 'delete',
|
||||
description: 'Delete a comment',
|
||||
},
|
||||
{
|
||||
name: 'Get',
|
||||
value: 'get',
|
||||
description: 'Get information about a comment',
|
||||
},
|
||||
{
|
||||
name: 'Get All',
|
||||
value: 'getAll',
|
||||
description: 'Get all comments',
|
||||
},
|
||||
{
|
||||
name: 'Update',
|
||||
value: 'update',
|
||||
description: 'Update a comment',
|
||||
},
|
||||
],
|
||||
default: 'create',
|
||||
description: 'The operation to perform.',
|
||||
},
|
||||
] as INodeProperties[];
|
||||
|
||||
export const commentFields = [
|
||||
/*-------------------------------------------------------------------------- */
|
||||
/* comment:create */
|
||||
/* ------------------------------------------------------------------------- */
|
||||
{
|
||||
displayName: 'Thread ID',
|
||||
name: 'threadId',
|
||||
type: 'string',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'create',
|
||||
],
|
||||
resource: [
|
||||
'comment',
|
||||
],
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
description: 'The ID of the thread.',
|
||||
},
|
||||
{
|
||||
displayName: 'Content',
|
||||
name: 'content',
|
||||
type: 'string',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'create',
|
||||
],
|
||||
resource: [
|
||||
'comment',
|
||||
],
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
description: 'The content of the comment.',
|
||||
},
|
||||
{
|
||||
displayName: 'Additional Fields',
|
||||
name: 'additionalFields',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Field',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'comment',
|
||||
],
|
||||
operation: [
|
||||
'create',
|
||||
],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Actions',
|
||||
name: 'actionsUi',
|
||||
type: 'fixedCollection',
|
||||
placeholder: 'Add Action',
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Action',
|
||||
name: 'actionValues',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Action',
|
||||
name: 'action',
|
||||
type: 'options',
|
||||
description: 'The action of the button.',
|
||||
options: [
|
||||
{
|
||||
name: 'Open URL',
|
||||
value: 'open_url',
|
||||
},
|
||||
{
|
||||
name: 'Prefill Message',
|
||||
value: 'prefill_message',
|
||||
},
|
||||
{
|
||||
name: 'Send Reply',
|
||||
value: 'send_reply',
|
||||
},
|
||||
],
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Button Text',
|
||||
name: 'button_text',
|
||||
type: 'string',
|
||||
description: 'The text for the action button.',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Message',
|
||||
name: 'message',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
action: [
|
||||
'send_reply',
|
||||
'prefill_message',
|
||||
],
|
||||
},
|
||||
},
|
||||
description: 'The text for the action button.',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
description: 'The type of the button. (Currently only <code>action</code> is available).',
|
||||
options: [
|
||||
{
|
||||
name: 'Action',
|
||||
value: 'action',
|
||||
},
|
||||
],
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'URL',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
action: [
|
||||
'open_url',
|
||||
],
|
||||
},
|
||||
},
|
||||
description: 'URL to redirect.',
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Attachments',
|
||||
name: 'binaryProperties',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
description: 'Name of the property that holds the binary data. Multiple can be defined separated by comma.',
|
||||
},
|
||||
{
|
||||
displayName: 'Direct Mentions',
|
||||
name: 'direct_mentions',
|
||||
type: 'multiOptions',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getUsers',
|
||||
loadOptionsDependsOn: [
|
||||
'workspaceId',
|
||||
],
|
||||
},
|
||||
default: [],
|
||||
description: 'The users that are directly mentioned.',
|
||||
},
|
||||
{
|
||||
displayName: 'Mark thread position',
|
||||
name: 'mark_thread_position',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description: 'By default, the position of the thread is marked.',
|
||||
},
|
||||
{
|
||||
displayName: 'Recipients',
|
||||
name: 'recipients',
|
||||
type: 'multiOptions',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getUsers',
|
||||
loadOptionsDependsOn: [
|
||||
'workspaceId',
|
||||
],
|
||||
},
|
||||
default: [],
|
||||
description: 'The users that will attached to the comment.',
|
||||
},
|
||||
{
|
||||
displayName: 'Temporary ID',
|
||||
name: 'temp_id',
|
||||
type: 'number',
|
||||
default: 0,
|
||||
description: 'The temporary ID of the comment.',
|
||||
},
|
||||
{
|
||||
displayName: 'Send as integration',
|
||||
name: 'send_as_integration',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Displays the integration as the comment creator.',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
/* comment:get/delete */
|
||||
/* -------------------------------------------------------------------------- */
|
||||
{
|
||||
displayName: 'Comment ID',
|
||||
name: 'commentId',
|
||||
type: 'string',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'get',
|
||||
'delete',
|
||||
],
|
||||
resource: [
|
||||
'comment',
|
||||
],
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
description: 'The ID of the comment.',
|
||||
},
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
/* comment:getAll */
|
||||
/* -------------------------------------------------------------------------- */
|
||||
{
|
||||
displayName: 'Thread ID',
|
||||
name: 'threadId',
|
||||
type: 'string',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
resource: [
|
||||
'comment',
|
||||
],
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
description: 'The ID of the channel.',
|
||||
},
|
||||
{
|
||||
displayName: 'Return All',
|
||||
name: 'returnAll',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'comment',
|
||||
],
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description: 'If all results should be returned or only up to a given limit.',
|
||||
},
|
||||
{
|
||||
displayName: 'Limit',
|
||||
name: 'limit',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'comment',
|
||||
],
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
returnAll: [
|
||||
false,
|
||||
],
|
||||
},
|
||||
},
|
||||
typeOptions: {
|
||||
minValue: 1,
|
||||
maxValue: 100,
|
||||
},
|
||||
default: 50,
|
||||
description: 'How many results to return.',
|
||||
},
|
||||
{
|
||||
displayName: 'Filters',
|
||||
name: 'filters',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Field',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'comment',
|
||||
],
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'As IDs',
|
||||
name: 'as_ids',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'If enabled, only the ids of the comments are returned.',
|
||||
},
|
||||
{
|
||||
displayName: 'Ending Object Index',
|
||||
name: 'to_obj_index',
|
||||
type: 'number',
|
||||
default: 50,
|
||||
description: 'Limit comments ending at the specified object index.',
|
||||
},
|
||||
{
|
||||
displayName: 'Newer Than',
|
||||
name: 'newer_than_ts',
|
||||
type: 'dateTime',
|
||||
default: '',
|
||||
description: 'Limits comments to those newer when the specified Unix time.',
|
||||
},
|
||||
{
|
||||
displayName: 'Older Than',
|
||||
name: 'older_than_ts',
|
||||
type: 'dateTime',
|
||||
default: '',
|
||||
description: 'Limits comments to those older than the specified Unix time.',
|
||||
},
|
||||
{
|
||||
displayName: 'Order By',
|
||||
name: 'order_by',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'ASC',
|
||||
value: 'ASC',
|
||||
},
|
||||
{
|
||||
name: 'DESC',
|
||||
value: 'DESC',
|
||||
},
|
||||
],
|
||||
default: 'ASC',
|
||||
description: 'The order of the comments returned - one of DESC or ASC.',
|
||||
},
|
||||
{
|
||||
displayName: 'Starting Object Index',
|
||||
name: 'from_obj_index',
|
||||
type: 'number',
|
||||
default: 0,
|
||||
description: 'Limit comments starting at the specified object index.',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
/* comment:update */
|
||||
/* -------------------------------------------------------------------------- */
|
||||
{
|
||||
displayName: 'Comment ID',
|
||||
name: 'commentId',
|
||||
type: 'string',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'update',
|
||||
],
|
||||
resource: [
|
||||
'comment',
|
||||
],
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
description: 'The ID of the comment.',
|
||||
},
|
||||
{
|
||||
displayName: 'Update Fields',
|
||||
name: 'updateFields',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Field',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: [
|
||||
'comment',
|
||||
],
|
||||
operation: [
|
||||
'update',
|
||||
],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Actions',
|
||||
name: 'actionsUi',
|
||||
type: 'fixedCollection',
|
||||
placeholder: 'Add Action',
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Action',
|
||||
name: 'actionValues',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Action',
|
||||
name: 'action',
|
||||
type: 'options',
|
||||
description: 'The action of the button.',
|
||||
options: [
|
||||
{
|
||||
name: 'Open URL',
|
||||
value: 'open_url',
|
||||
},
|
||||
{
|
||||
name: 'Prefill Message',
|
||||
value: 'prefill_message',
|
||||
},
|
||||
{
|
||||
name: 'Send Reply',
|
||||
value: 'send_reply',
|
||||
},
|
||||
],
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Button Text',
|
||||
name: 'button_text',
|
||||
type: 'string',
|
||||
description: 'The text for the action button.',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Message',
|
||||
name: 'message',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
action: [
|
||||
'send_reply',
|
||||
'prefill_message',
|
||||
],
|
||||
},
|
||||
},
|
||||
description: 'The text for the action button.',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
description: 'The type of the button. (Currently only <code>action</code> is available).',
|
||||
options: [
|
||||
{
|
||||
name: 'Action',
|
||||
value: 'action',
|
||||
},
|
||||
],
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'URL',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
action: [
|
||||
'open_url',
|
||||
],
|
||||
},
|
||||
},
|
||||
description: 'URL to redirect.',
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Attachments',
|
||||
name: 'binaryProperties',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
description: 'Name of the property that holds the binary data. Multiple can be defined separated by comma.',
|
||||
},
|
||||
{
|
||||
displayName: 'Content',
|
||||
name: 'content',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'The content of the comment.',
|
||||
},
|
||||
{
|
||||
displayName: 'Direct Mentions',
|
||||
name: 'direct_mentions',
|
||||
type: 'multiOptions',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getUsers',
|
||||
loadOptionsDependsOn: [
|
||||
'workspaceId',
|
||||
],
|
||||
},
|
||||
default: [],
|
||||
description: 'The users that are directly mentioned.',
|
||||
},
|
||||
],
|
||||
},
|
||||
] as INodeProperties[];
|
|
@ -20,6 +20,26 @@ export const messageConversationOperations = [
|
|||
value: 'create',
|
||||
description: 'Create a message in a conversation',
|
||||
},
|
||||
{
|
||||
name: 'Delete',
|
||||
value: 'delete',
|
||||
description: 'Delete a message in a conversation',
|
||||
},
|
||||
{
|
||||
name: 'Get',
|
||||
value: 'get',
|
||||
description: 'Get a message in a conversation',
|
||||
},
|
||||
{
|
||||
name: 'Get All',
|
||||
value: 'getAll',
|
||||
description: 'Get all messages in a conversation',
|
||||
},
|
||||
{
|
||||
name: 'Update',
|
||||
value: 'update',
|
||||
description: 'Update a message in a conversation',
|
||||
},
|
||||
],
|
||||
default: 'create',
|
||||
description: 'The operation to perform.',
|
||||
|
@ -91,7 +111,7 @@ export const messageConversationFields = [
|
|||
],
|
||||
},
|
||||
},
|
||||
description: `The content of the new message. Mentions can be used as [Name](twist-mention://user_id) for users or [Group name](twist-group-mention://group_id) for groups.`,
|
||||
description: 'The content of the new message. Mentions can be used as <code>[Name](twist-mention://user_id)</code> for users or <code>[Group name](twist-group-mention://group_id)</code> for groups.',
|
||||
},
|
||||
{
|
||||
displayName: 'Additional Fields',
|
||||
|
@ -108,7 +128,7 @@ export const messageConversationFields = [
|
|||
},
|
||||
},
|
||||
default: {},
|
||||
description: 'Other options to set',
|
||||
description: 'Other options to set.',
|
||||
placeholder: 'Add options',
|
||||
options: [
|
||||
{
|
||||
|
@ -128,7 +148,7 @@ export const messageConversationFields = [
|
|||
displayName: 'Action',
|
||||
name: 'action',
|
||||
type: 'options',
|
||||
description: 'The action of the button',
|
||||
description: 'The action of the button.',
|
||||
options: [
|
||||
{
|
||||
name: 'Open URL',
|
||||
|
@ -171,7 +191,7 @@ export const messageConversationFields = [
|
|||
displayName: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
description: 'The type of the button, for now just action is available.',
|
||||
description: 'The type of the button. (Currently only <code>action</code> is available).',
|
||||
options: [
|
||||
{
|
||||
name: 'Action',
|
||||
|
@ -191,7 +211,7 @@ export const messageConversationFields = [
|
|||
],
|
||||
},
|
||||
},
|
||||
description: 'URL to redirect',
|
||||
description: 'URL to redirect.',
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
|
@ -213,7 +233,7 @@ export const messageConversationFields = [
|
|||
loadOptionsMethod: 'getUsers',
|
||||
},
|
||||
default: [],
|
||||
description: `The users that are directly mentioned`,
|
||||
description: 'The users that are directly mentioned.',
|
||||
},
|
||||
// {
|
||||
// displayName: 'Direct Group Mentions ',
|
||||
|
@ -223,8 +243,289 @@ export const messageConversationFields = [
|
|||
// loadOptionsMethod: 'getGroups',
|
||||
// },
|
||||
// default: [],
|
||||
// description: `The groups that are directly mentioned`,
|
||||
// description: 'The groups that are directly mentioned.',
|
||||
// },
|
||||
],
|
||||
},
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
/* messageConversation:getAll */
|
||||
/* -------------------------------------------------------------------------- */
|
||||
{
|
||||
displayName: 'Workspace ID',
|
||||
name: 'workspaceId',
|
||||
type: 'options',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getWorkspaces',
|
||||
},
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
resource: [
|
||||
'messageConversation',
|
||||
],
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
description: 'The ID of the workspace.',
|
||||
},
|
||||
{
|
||||
displayName: 'Conversation ID',
|
||||
name: 'conversationId',
|
||||
type: 'options',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getConversations',
|
||||
loadOptionsDependsOn: [
|
||||
'workspaceId',
|
||||
],
|
||||
},
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
resource: [
|
||||
'messageConversation',
|
||||
],
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
description: 'The ID of the conversation.',
|
||||
},
|
||||
{
|
||||
displayName: 'Additional Fields',
|
||||
name: 'additionalFields',
|
||||
type: 'collection',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'getAll',
|
||||
],
|
||||
resource: [
|
||||
'messageConversation',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: {},
|
||||
description: 'Other options to set.',
|
||||
options: [
|
||||
{
|
||||
displayName: 'Ending Object Index',
|
||||
name: 'to_obj_index',
|
||||
type: 'number',
|
||||
default: 50,
|
||||
description: 'Limit messages ending at the specified object index.',
|
||||
},
|
||||
{
|
||||
displayName: 'Limit',
|
||||
name: 'limit',
|
||||
type: 'number',
|
||||
default: 50,
|
||||
description: 'Limits the number of messages returned.',
|
||||
},
|
||||
{
|
||||
displayName: 'Order By',
|
||||
name: 'order_by',
|
||||
type: 'options',
|
||||
default: 'ASC',
|
||||
description: 'The order of the conversations returned - one of DESC or ASC.',
|
||||
options: [
|
||||
{
|
||||
name: 'ASC',
|
||||
value: 'ASC',
|
||||
},
|
||||
{
|
||||
name: 'DESC',
|
||||
value: 'DESC',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Starting Object Index',
|
||||
name: 'from_obj_index',
|
||||
type: 'number',
|
||||
default: 0,
|
||||
description: 'Limit messages starting at the specified object index.',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
/* messageConversation:get/delete/update */
|
||||
/* -------------------------------------------------------------------------- */
|
||||
{
|
||||
displayName: 'Message ID',
|
||||
name: 'id',
|
||||
type: 'string',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'delete',
|
||||
'get',
|
||||
],
|
||||
resource: [
|
||||
'messageConversation',
|
||||
],
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
description: 'The ID of the conversation message.',
|
||||
},
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
/* messageConversation:update */
|
||||
/* -------------------------------------------------------------------------- */
|
||||
{
|
||||
displayName: 'Conversation Message ID',
|
||||
name: 'id',
|
||||
type: 'string',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'update',
|
||||
],
|
||||
resource: [
|
||||
'messageConversation',
|
||||
],
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
description: 'The ID of the conversation message.',
|
||||
},
|
||||
{
|
||||
displayName: 'Update Fields',
|
||||
name: 'updateFields',
|
||||
type: 'collection',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'update',
|
||||
],
|
||||
resource: [
|
||||
'messageConversation',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: {},
|
||||
description: 'Other options to set.',
|
||||
options: [
|
||||
{
|
||||
displayName: 'Actions',
|
||||
name: 'actionsUi',
|
||||
type: 'fixedCollection',
|
||||
placeholder: 'Add Action',
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Action',
|
||||
name: 'actionValues',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Action',
|
||||
name: 'action',
|
||||
type: 'options',
|
||||
description: 'The action of the button.',
|
||||
options: [
|
||||
{
|
||||
name: 'Open URL',
|
||||
value: 'open_url',
|
||||
},
|
||||
{
|
||||
name: 'Prefill Message',
|
||||
value: 'prefill_message',
|
||||
},
|
||||
{
|
||||
name: 'Send Reply',
|
||||
value: 'send_reply',
|
||||
},
|
||||
],
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Button Text',
|
||||
name: 'button_text',
|
||||
type: 'string',
|
||||
description: 'The text for the action button.',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Message',
|
||||
name: 'message',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
action: [
|
||||
'send_reply',
|
||||
'prefill_message',
|
||||
],
|
||||
},
|
||||
},
|
||||
description: 'The text for the action button.',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
description: 'The type of the button. (Currently only <code>action</code> is available).',
|
||||
options: [
|
||||
{
|
||||
name: 'Action',
|
||||
value: 'action',
|
||||
},
|
||||
],
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'URL',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
action: [
|
||||
'open_url',
|
||||
],
|
||||
},
|
||||
},
|
||||
description: 'URL to redirect.',
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Attachments',
|
||||
name: 'binaryProperties',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
description: 'Name of the property that holds the binary data. Multiple can be defined separated by comma.',
|
||||
},
|
||||
{
|
||||
displayName: 'Content',
|
||||
name: 'content',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'The content of the new message. Mentions can be used as <code>[Name](twist-mention://user_id)</code> for users or <code>[Group name](twist-group-mention://group_id)</code> for groups.',
|
||||
},
|
||||
{
|
||||
displayName: 'Direct Mentions',
|
||||
name: 'direct_mentions',
|
||||
type: 'multiOptions',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getUsers',
|
||||
},
|
||||
default: [],
|
||||
description: 'The users that are directly mentioned.',
|
||||
},
|
||||
],
|
||||
},
|
||||
] as INodeProperties[];
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue