mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-14 08:34:07 -08:00
Merge branch 'master' of https://github.com/n8n-io/n8n
This commit is contained in:
commit
8c7450218a
BIN
packages/cli/databases/sqlite/database.sqlite
Normal file
BIN
packages/cli/databases/sqlite/database.sqlite
Normal file
Binary file not shown.
108
packages/cli/migrations/ormconfig.ts
Normal file
108
packages/cli/migrations/ormconfig.ts
Normal file
|
@ -0,0 +1,108 @@
|
|||
import {MongoDb, SQLite, MySQLDb, PostgresDb} from '../src/databases/index';
|
||||
|
||||
module.exports = [
|
||||
{
|
||||
"name": "sqlite",
|
||||
"type": "sqlite",
|
||||
"logging": true,
|
||||
"entities": Object.values(SQLite),
|
||||
"database": "./packages/cli/database.sqlite",
|
||||
"migrations": [
|
||||
"./src/databases/sqlite/migrations/*.ts"
|
||||
],
|
||||
"subscribers": [
|
||||
"./src/databases/sqlite/subscribers/*.ts"
|
||||
],
|
||||
"cli": {
|
||||
"entitiesDir": "./src/databases/sqlite",
|
||||
"migrationsDir": "./src/databases/sqlite/migrations",
|
||||
"subscribersDir": "./src/databases/sqlite/subscribers"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mongodb",
|
||||
"type": "mongodb",
|
||||
"logging": false,
|
||||
"entities": Object.values(MongoDb),
|
||||
"url": "mongodb://root:example@localhost:27017/n8n",
|
||||
"authSource": 'admin',
|
||||
"migrations": [
|
||||
"./src/databases/mongodb/migrations/*.ts"
|
||||
],
|
||||
"subscribers": [
|
||||
"src/subscriber/**/*.ts"
|
||||
],
|
||||
"cli": {
|
||||
"entitiesDir": "./src/databases/mongodb",
|
||||
"migrationsDir": "./src/databases/mongodb/Migrations",
|
||||
"subscribersDir": "./src/databases/mongodb/Subscribers"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "postgres",
|
||||
"type": "postgres",
|
||||
"logging": false,
|
||||
"host": "localhost",
|
||||
"username": "postgres",
|
||||
"password": "docker",
|
||||
"port": 5432,
|
||||
"database": "postgres",
|
||||
"schema": "public",
|
||||
"entities": Object.values(PostgresDb),
|
||||
"migrations": [
|
||||
"./src/databases/postgresdb/migrations/*.ts"
|
||||
],
|
||||
"subscribers": [
|
||||
"src/subscriber/**/*.ts"
|
||||
],
|
||||
"cli": {
|
||||
"entitiesDir": "./src/databases/postgresdb",
|
||||
"migrationsDir": "./src/databases/postgresdb/migrations",
|
||||
"subscribersDir": "./src/databases/postgresdb/subscribers"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mysql",
|
||||
"type": "mysql",
|
||||
"database": "n8n",
|
||||
"username": "root",
|
||||
"password": "password",
|
||||
"host": "localhost",
|
||||
"port": "3308",
|
||||
"logging": false,
|
||||
"entities": Object.values(MySQLDb),
|
||||
"migrations": [
|
||||
"./src/databases/mysqldb/migrations/*.ts"
|
||||
],
|
||||
"subscribers": [
|
||||
"src/subscriber/**/*.ts"
|
||||
],
|
||||
"cli": {
|
||||
"entitiesDir": "./src/databases/mysqldb",
|
||||
"migrationsDir": "./src/databases/mysqldb/migrations",
|
||||
"subscribersDir": "./src/databases/mysqldb/Subscribers"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mariadb",
|
||||
"type": "mariadb",
|
||||
"database": "n8n",
|
||||
"username": "root",
|
||||
"password": "password",
|
||||
"host": "localhost",
|
||||
"port": "3308",
|
||||
"logging": false,
|
||||
"entities": Object.values(MySQLDb),
|
||||
"migrations": [
|
||||
"./src/databases/mysqldb/migrations/*.ts"
|
||||
],
|
||||
"subscribers": [
|
||||
"src/subscriber/**/*.ts"
|
||||
],
|
||||
"cli": {
|
||||
"entitiesDir": "./src/databases/mysqldb",
|
||||
"migrationsDir": "./src/databases/mysqldb/migrations",
|
||||
"subscribersDir": "./src/databases/mysqldb/Subscribers"
|
||||
}
|
||||
},
|
||||
];
|
|
@ -28,7 +28,8 @@
|
|||
"start:windows": "cd bin && n8n",
|
||||
"test": "jest",
|
||||
"tslint": "tslint -p tsconfig.json -c tslint.json",
|
||||
"watch": "tsc --watch"
|
||||
"watch": "tsc --watch",
|
||||
"typeorm": "ts-node ./node_modules/typeorm/cli.js"
|
||||
},
|
||||
"bin": {
|
||||
"n8n": "./bin/n8n"
|
||||
|
@ -70,8 +71,9 @@
|
|||
"p-cancelable": "^2.0.0",
|
||||
"run-script-os": "^1.0.7",
|
||||
"ts-jest": "^24.0.2",
|
||||
"tslint": "^5.17.0",
|
||||
"typescript": "~3.7.4"
|
||||
"tslint": "^6.1.2",
|
||||
"typescript": "~3.7.4",
|
||||
"ts-node": "^8.9.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@oclif/command": "^1.5.18",
|
||||
|
@ -102,9 +104,9 @@
|
|||
"open": "^7.0.0",
|
||||
"pg": "^7.11.0",
|
||||
"request-promise-native": "^1.0.7",
|
||||
"sqlite3": "^4.0.6",
|
||||
"sqlite3": "^4.2.0",
|
||||
"sse-channel": "^3.1.1",
|
||||
"typeorm": "^0.2.16"
|
||||
"typeorm": "^0.2.24"
|
||||
},
|
||||
"jest": {
|
||||
"transform": {
|
||||
|
|
0
packages/cli/packages/cli/database.sqlite
Normal file
0
packages/cli/packages/cli/database.sqlite
Normal file
|
@ -315,6 +315,7 @@ export class ActiveWorkflowRunner {
|
|||
return ((workflow: Workflow, node: INode) => {
|
||||
const returnFunctions = NodeExecuteFunctions.getExecuteTriggerFunctions(workflow, node, additionalData, mode);
|
||||
returnFunctions.emit = (data: INodeExecutionData[][]): void => {
|
||||
WorkflowHelpers.saveStaticData(workflow);
|
||||
this.runWorkflow(workflowData, node, data, additionalData, mode);
|
||||
};
|
||||
return returnFunctions;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import {
|
||||
DatabaseType,
|
||||
GenericHelpers,
|
||||
IDatabaseCollections,
|
||||
DatabaseType,
|
||||
} from './';
|
||||
|
||||
import {
|
||||
|
@ -16,9 +16,9 @@ import {
|
|||
|
||||
import {
|
||||
MongoDb,
|
||||
MySQLDb,
|
||||
PostgresDb,
|
||||
SQLite,
|
||||
MySQLDb,
|
||||
} from './databases';
|
||||
|
||||
export let collections: IDatabaseCollections = {
|
||||
|
@ -27,16 +27,27 @@ export let collections: IDatabaseCollections = {
|
|||
Workflow: null,
|
||||
};
|
||||
|
||||
import {
|
||||
InitialMigration1587669153312
|
||||
} from './databases/postgresdb/migrations';
|
||||
|
||||
import {
|
||||
InitialMigration1588157391238
|
||||
} from './databases/mysqldb/migrations';
|
||||
|
||||
import {
|
||||
InitialMigration1588102412422
|
||||
} from './databases/sqlite/migrations';
|
||||
|
||||
import * as path from 'path';
|
||||
|
||||
export async function init(synchronize?: boolean): Promise<IDatabaseCollections> {
|
||||
export async function init(): Promise<IDatabaseCollections> {
|
||||
const dbType = await GenericHelpers.getConfigValue('database.type') as DatabaseType;
|
||||
const n8nFolder = UserSettings.getUserN8nFolderPath();
|
||||
|
||||
let entities;
|
||||
let connectionOptions: ConnectionOptions;
|
||||
|
||||
let dbNotExistError: string | undefined;
|
||||
switch (dbType) {
|
||||
case 'mongodb':
|
||||
entities = MongoDb;
|
||||
|
@ -49,7 +60,6 @@ export async function init(synchronize?: boolean): Promise<IDatabaseCollections>
|
|||
break;
|
||||
|
||||
case 'postgresdb':
|
||||
dbNotExistError = 'does not exist';
|
||||
entities = PostgresDb;
|
||||
connectionOptions = {
|
||||
type: 'postgres',
|
||||
|
@ -60,12 +70,13 @@ export async function init(synchronize?: boolean): Promise<IDatabaseCollections>
|
|||
port: await GenericHelpers.getConfigValue('database.postgresdb.port') as number,
|
||||
username: await GenericHelpers.getConfigValue('database.postgresdb.user') as string,
|
||||
schema: await GenericHelpers.getConfigValue('database.postgresdb.schema') as string,
|
||||
migrations: [InitialMigration1587669153312],
|
||||
migrationsRun: true
|
||||
};
|
||||
break;
|
||||
|
||||
case 'mariadb':
|
||||
case 'mysqldb':
|
||||
dbNotExistError = 'does not exist';
|
||||
entities = MySQLDb;
|
||||
connectionOptions = {
|
||||
type: dbType === 'mysqldb' ? 'mysql' : 'mariadb',
|
||||
|
@ -75,16 +86,19 @@ export async function init(synchronize?: boolean): Promise<IDatabaseCollections>
|
|||
password: await GenericHelpers.getConfigValue('database.mysqldb.password') as string,
|
||||
port: await GenericHelpers.getConfigValue('database.mysqldb.port') as number,
|
||||
username: await GenericHelpers.getConfigValue('database.mysqldb.user') as string,
|
||||
migrations: [InitialMigration1588157391238],
|
||||
migrationsRun: true
|
||||
};
|
||||
break;
|
||||
|
||||
case 'sqlite':
|
||||
dbNotExistError = 'no such table:';
|
||||
entities = SQLite;
|
||||
connectionOptions = {
|
||||
type: 'sqlite',
|
||||
database: path.join(n8nFolder, 'database.sqlite'),
|
||||
database: path.join(n8nFolder, 'database.sqlite'),
|
||||
entityPrefix: await GenericHelpers.getConfigValue('database.tablePrefix') as string,
|
||||
migrations: [InitialMigration1588102412422],
|
||||
migrationsRun: true,
|
||||
};
|
||||
break;
|
||||
|
||||
|
@ -94,38 +108,19 @@ export async function init(synchronize?: boolean): Promise<IDatabaseCollections>
|
|||
|
||||
Object.assign(connectionOptions, {
|
||||
entities: Object.values(entities),
|
||||
synchronize: synchronize === true || process.env['NODE_ENV'] !== 'production',
|
||||
logging: false
|
||||
synchronize: false,
|
||||
logging: false,
|
||||
});
|
||||
|
||||
const connection = await createConnection(connectionOptions);
|
||||
|
||||
// TODO: Fix that properly
|
||||
// @ts-ignore
|
||||
await connection.runMigrations({
|
||||
transaction: 'none',
|
||||
});
|
||||
|
||||
collections.Credentials = getRepository(entities.CredentialsEntity);
|
||||
// @ts-ignore
|
||||
collections.Execution = getRepository(entities.ExecutionEntity);
|
||||
// @ts-ignore
|
||||
collections.Workflow = getRepository(entities.WorkflowEntity);
|
||||
|
||||
// Make sure that database did already get initialized
|
||||
try {
|
||||
// Try a simple query, if it fails it is normally a sign that
|
||||
// database did not get initialized
|
||||
await collections.Workflow!.findOne({ id: 1 });
|
||||
} catch (error) {
|
||||
// If query errors and the problem is that the database does not exist
|
||||
// run the init again with "synchronize: true"
|
||||
if (dbNotExistError !== undefined && error.message.includes(dbNotExistError)) {
|
||||
// Disconnect before we try to connect again
|
||||
if (connection.isConnected) {
|
||||
await connection.close();
|
||||
}
|
||||
|
||||
return init(true);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
return collections;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class InitialMigration1587563438936 implements MigrationInterface {
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
}
|
||||
|
||||
}
|
1
packages/cli/src/databases/mongodb/Migrations/index.ts
Normal file
1
packages/cli/src/databases/mongodb/Migrations/index.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export * from './1587563438936-InitialMigration';
|
|
@ -0,0 +1,20 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class InitialMigration1588157391238 implements MigrationInterface {
|
||||
name = 'InitialMigration1588157391238';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('CREATE TABLE IF NOT EXISTS `credentials_entity` (`id` int NOT NULL AUTO_INCREMENT, `name` varchar(128) NOT NULL, `data` text NOT NULL, `type` varchar(32) NOT NULL, `nodesAccess` json NOT NULL, `createdAt` datetime NOT NULL, `updatedAt` datetime NOT NULL, INDEX `IDX_07fde106c0b471d8cc80a64fc8` (`type`), PRIMARY KEY (`id`)) ENGINE=InnoDB', undefined);
|
||||
await queryRunner.query('CREATE TABLE IF NOT EXISTS `execution_entity` (`id` int NOT NULL AUTO_INCREMENT, `data` text NOT NULL, `finished` tinyint NOT NULL, `mode` varchar(255) NOT NULL, `retryOf` varchar(255) NULL, `retrySuccessId` varchar(255) NULL, `startedAt` datetime NOT NULL, `stoppedAt` datetime NOT NULL, `workflowData` json NOT NULL, `workflowId` varchar(255) NULL, INDEX `IDX_c4d999a5e90784e8caccf5589d` (`workflowId`), PRIMARY KEY (`id`)) ENGINE=InnoDB', undefined);
|
||||
await queryRunner.query('CREATE TABLE IF NOT EXISTS`workflow_entity` (`id` int NOT NULL AUTO_INCREMENT, `name` varchar(128) NOT NULL, `active` tinyint NOT NULL, `nodes` json NOT NULL, `connections` json NOT NULL, `createdAt` datetime NOT NULL, `updatedAt` datetime NOT NULL, `settings` json NULL, `staticData` json NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB', undefined);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('DROP TABLE `workflow_entity`', undefined);
|
||||
await queryRunner.query('DROP INDEX `IDX_c4d999a5e90784e8caccf5589d` ON `execution_entity`', undefined);
|
||||
await queryRunner.query('DROP TABLE `execution_entity`', undefined);
|
||||
await queryRunner.query('DROP INDEX `IDX_07fde106c0b471d8cc80a64fc8` ON `credentials_entity`', undefined);
|
||||
await queryRunner.query('DROP TABLE `credentials_entity`', undefined);
|
||||
}
|
||||
|
||||
}
|
1
packages/cli/src/databases/mysqldb/migrations/index.ts
Normal file
1
packages/cli/src/databases/mysqldb/migrations/index.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export * from './1588157391238-InitialMigration';
|
|
@ -41,4 +41,5 @@ export class CredentialsEntity implements ICredentialsDb {
|
|||
|
||||
@Column('timestamp')
|
||||
updatedAt: Date;
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
import { MigrationInterface, QueryRunner } from "typeorm";
|
||||
|
||||
export class InitialMigration1587669153312 implements MigrationInterface {
|
||||
name = 'InitialMigration1587669153312';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS credentials_entity ("id" SERIAL NOT NULL, "name" character varying(128) NOT NULL, "data" text NOT NULL, "type" character varying(32) NOT NULL, "nodesAccess" json NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, CONSTRAINT PK_814c3d3c36e8a27fa8edb761b0e PRIMARY KEY ("id"))`, undefined);
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_07fde106c0b471d8cc80a64fc8 ON credentials_entity (type) `, undefined);
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS execution_entity ("id" SERIAL NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" character varying NOT NULL, "retryOf" character varying, "retrySuccessId" character varying, "startedAt" TIMESTAMP NOT NULL, "stoppedAt" TIMESTAMP NOT NULL, "workflowData" json NOT NULL, "workflowId" character varying, CONSTRAINT PK_e3e63bbf986767844bbe1166d4e PRIMARY KEY ("id"))`, undefined);
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_c4d999a5e90784e8caccf5589d ON execution_entity ("workflowId") `, undefined);
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS workflow_entity ("id" SERIAL NOT NULL, "name" character varying(128) NOT NULL, "active" boolean NOT NULL, "nodes" json NOT NULL, "connections" json NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, "settings" json, "staticData" json, CONSTRAINT PK_eded7d72664448da7745d551207 PRIMARY KEY ("id"))`, undefined);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP TABLE workflow_entity`, undefined);
|
||||
await queryRunner.query(`DROP INDEX IDX_c4d999a5e90784e8caccf5589d`, undefined);
|
||||
await queryRunner.query(`DROP TABLE execution_entity`, undefined);
|
||||
await queryRunner.query(`DROP INDEX IDX_07fde106c0b471d8cc80a64fc8`, undefined);
|
||||
await queryRunner.query(`DROP TABLE credentials_entity`, undefined);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
export * from './1587669153312-InitialMigration';
|
|
@ -1,3 +1,4 @@
|
|||
export * from './CredentialsEntity';
|
||||
export * from './ExecutionEntity';
|
||||
export * from './WorkflowEntity';
|
||||
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
import { MigrationInterface, QueryRunner } from "typeorm";
|
||||
|
||||
export class InitialMigration1588102412422 implements MigrationInterface {
|
||||
name = 'InitialMigration1588102412422';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS "credentials_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(32) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`, undefined);
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS "IDX_07fde106c0b471d8cc80a64fc8" ON "credentials_entity" ("type") `, undefined);
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS "execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime NOT NULL, "workflowData" text NOT NULL, "workflowId" varchar)`, undefined);
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS "IDX_c4d999a5e90784e8caccf5589d" ON "execution_entity" ("workflowId") `, undefined);
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS "workflow_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text NOT NULL, "connections" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL, "settings" text, "staticData" text)`, undefined);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP TABLE "workflow_entity"`, undefined);
|
||||
await queryRunner.query(`DROP INDEX "IDX_c4d999a5e90784e8caccf5589d"`, undefined);
|
||||
await queryRunner.query(`DROP TABLE "execution_entity"`, undefined);
|
||||
await queryRunner.query(`DROP INDEX "IDX_07fde106c0b471d8cc80a64fc8"`, undefined);
|
||||
await queryRunner.query(`DROP TABLE "credentials_entity"`, undefined);
|
||||
}
|
||||
|
||||
}
|
1
packages/cli/src/databases/sqlite/migrations/index.ts
Normal file
1
packages/cli/src/databases/sqlite/migrations/index.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export * from './1588102412422-InitialMigration';
|
|
@ -36,7 +36,7 @@
|
|||
"jest": "^24.9.0",
|
||||
"source-map-support": "^0.5.9",
|
||||
"ts-jest": "^24.0.2",
|
||||
"tslint": "^5.17.0",
|
||||
"tslint": "6.1.2",
|
||||
"typescript": "~3.7.4"
|
||||
},
|
||||
"dependencies": {
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
"@types/dateformat": "^3.0.0",
|
||||
"@types/express": "^4.17.6",
|
||||
"@types/file-saver": "^2.0.1",
|
||||
"@types/jest": "^24.0.18",
|
||||
"@types/jest": "^25.2.1",
|
||||
"@types/lodash.get": "^4.4.6",
|
||||
"@types/lodash.set": "^4.3.6",
|
||||
"@types/node": "12.12.22",
|
||||
|
@ -71,8 +71,8 @@
|
|||
"quill-autoformat": "^0.1.1",
|
||||
"sass-loader": "^8.0.0",
|
||||
"string-template-parser": "^1.2.6",
|
||||
"ts-jest": "^24.0.2",
|
||||
"tslint": "^5.17.0",
|
||||
"ts-jest": "^25.4.0",
|
||||
"tslint": "^6.1.2",
|
||||
"typescript": "~3.7.4",
|
||||
"vue": "^2.6.9",
|
||||
"vue-cli-plugin-webpack-bundle-analyzer": "^2.0.0",
|
||||
|
|
|
@ -140,7 +140,7 @@ export class New extends Command {
|
|||
// in the correct way
|
||||
const replaceValues = {
|
||||
ClassNameReplace: changeCase.pascalCase(nodeName),
|
||||
DisplayNameReplace: changeCase.titleCase(nodeName),
|
||||
DisplayNameReplace: changeCase.capitalCase(nodeName),
|
||||
N8nNameReplace: changeCase.camelCase(nodeName),
|
||||
NodeDescriptionReplace: additionalAnswers.description,
|
||||
};
|
||||
|
|
|
@ -55,12 +55,12 @@
|
|||
"@oclif/errors": "^1.2.2",
|
||||
"@types/express": "^4.16.1",
|
||||
"@types/node": "^10.10.1",
|
||||
"change-case": "^3.1.0",
|
||||
"change-case": "^4.1.1",
|
||||
"copyfiles": "^2.1.1",
|
||||
"inquirer": "^7.0.0",
|
||||
"n8n-core": "^0.21.0",
|
||||
"n8n-workflow": "^0.20.0",
|
||||
"replace-in-file": "^4.1.0",
|
||||
"replace-in-file": "^6.0.0",
|
||||
"request": "^2.88.2",
|
||||
"tmp-promise": "^2.0.2",
|
||||
"typescript": "~3.7.4"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
|
||||
import * as fs from 'fs';
|
||||
import replaceInFile, { ReplaceInFileConfig } from 'replace-in-file';
|
||||
import {replaceInFile, ReplaceInFileConfig } from 'replace-in-file';
|
||||
|
||||
const { promisify } = require('util');
|
||||
const fsCopyFile = promisify(fs.copyFile);
|
||||
|
|
|
@ -76,7 +76,7 @@ export class Bannerbear implements INodeType {
|
|||
|
||||
methods = {
|
||||
loadOptions: {
|
||||
// Get all the available escalation policies to display them to user so that he can
|
||||
// Get all the available templates to display them to user so that he can
|
||||
// select them easily
|
||||
async getTemplates(this: ILoadOptionsFunctions): Promise<INodePropertyOptions[]> {
|
||||
const returnData: INodePropertyOptions[] = [];
|
||||
|
@ -91,6 +91,23 @@ export class Bannerbear implements INodeType {
|
|||
}
|
||||
return returnData;
|
||||
},
|
||||
|
||||
// Get all the available modifications to display them to user so that he can
|
||||
// select them easily
|
||||
async getModificationNames(this: ILoadOptionsFunctions): Promise<INodePropertyOptions[]> {
|
||||
const templateId = this.getCurrentNodeParameter('templateId');
|
||||
const returnData: INodePropertyOptions[] = [];
|
||||
const { available_modifications } = await bannerbearApiRequest.call(this, 'GET', `/templates/${templateId}`);
|
||||
for (const modification of available_modifications) {
|
||||
const modificationName = modification.name;
|
||||
const modificationId = modification.name;
|
||||
returnData.push({
|
||||
name: modificationName,
|
||||
value: modificationId,
|
||||
});
|
||||
}
|
||||
return returnData;
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -130,6 +147,29 @@ export class Bannerbear implements INodeType {
|
|||
}
|
||||
}
|
||||
responseData = await bannerbearApiRequest.call(this, 'POST', '/images', body);
|
||||
if (additionalFields.waitForImage && responseData.status !== 'completed') {
|
||||
let maxTries = (additionalFields.waitForImageMaxTries as number) || 3;
|
||||
|
||||
const promise = (uid: string) => {
|
||||
let data: IDataObject = {};
|
||||
return new Promise((resolve, reject) => {
|
||||
const timeout = setInterval(async () => {
|
||||
data = await bannerbearApiRequest.call(this, 'GET', `/images/${uid}`);
|
||||
|
||||
if (data.status === 'completed') {
|
||||
clearInterval(timeout);
|
||||
resolve(data);
|
||||
}
|
||||
if (--maxTries === 0) {
|
||||
clearInterval(timeout);
|
||||
reject(new Error('Image did not finish processing after multiple tries.'));
|
||||
}
|
||||
}, 2000);
|
||||
});
|
||||
};
|
||||
|
||||
responseData = await promise(responseData.uid);
|
||||
}
|
||||
}
|
||||
//https://developers.bannerbear.com/#get-a-specific-image
|
||||
if (operation === 'get') {
|
||||
|
|
|
@ -81,6 +81,33 @@ export const imageFields = [
|
|||
default: '',
|
||||
description: 'Metadata that you need to store e.g. ID of a record in your DB',
|
||||
},
|
||||
{
|
||||
displayName: 'Wait for Image',
|
||||
name: 'waitForImage',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: `Wait for the image to be proccesed before returning.<br />
|
||||
If after three tries the images is not ready, an error will be thrown.<br />
|
||||
Number of tries can be increased by setting "Wait Max Tries".`,
|
||||
},
|
||||
{
|
||||
displayName: 'Wait Max Tries',
|
||||
name: 'waitForImageMaxTries',
|
||||
type: 'number',
|
||||
typeOptions: {
|
||||
minValue: 1,
|
||||
maxValue: 10,
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
waitForImage: [
|
||||
true,
|
||||
],
|
||||
},
|
||||
},
|
||||
default: 3,
|
||||
description: `How often it should check if the image is available before it fails.`,
|
||||
},
|
||||
{
|
||||
displayName: 'Webhook URL',
|
||||
name: 'webhookUrl',
|
||||
|
@ -117,7 +144,13 @@ export const imageFields = [
|
|||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
type: 'options',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getModificationNames',
|
||||
loadOptionsDependsOn: [
|
||||
'templateId',
|
||||
],
|
||||
},
|
||||
default: '',
|
||||
description: 'The name of the item you want to change',
|
||||
},
|
||||
|
|
|
@ -29,14 +29,13 @@ export async function rocketchatApiRequest(this: IHookFunctions | IExecuteFuncti
|
|||
try {
|
||||
return await this.helpers.request!(options);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
let errorMessage = error.message;
|
||||
|
||||
const errorMessage = error.response.body.message || error.response.body.Message;
|
||||
|
||||
if (errorMessage !== undefined) {
|
||||
throw errorMessage;
|
||||
if (error.response.body.error) {
|
||||
errorMessage = error.response.body.error;
|
||||
}
|
||||
throw error.response.body;
|
||||
|
||||
throw new Error(`Rocket.chat error response [${error.statusCode}]: ${errorMessage}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -397,12 +397,12 @@ export class Rocketchat implements INodeType {
|
|||
|
||||
async executeSingle(this: IExecuteSingleFunctions): Promise<INodeExecutionData> {
|
||||
const resource = this.getNodeParameter('resource') as string;
|
||||
const opeation = this.getNodeParameter('operation') as string;
|
||||
const operation = this.getNodeParameter('operation') as string;
|
||||
let response;
|
||||
|
||||
if (resource === 'chat') {
|
||||
//https://rocket.chat/docs/developer-guides/rest-api/chat/postmessage
|
||||
if (opeation === 'postMessage') {
|
||||
if (operation === 'postMessage') {
|
||||
const channel = this.getNodeParameter('channel') as string;
|
||||
const text = this.getNodeParameter('text') as string;
|
||||
const options = this.getNodeParameter('options') as IDataObject;
|
||||
|
@ -489,11 +489,7 @@ export class Rocketchat implements INodeType {
|
|||
body.attachments = validateJSON(this.getNodeParameter('attachmentsJson') as string);
|
||||
}
|
||||
|
||||
try {
|
||||
response = await rocketchatApiRequest.call(this, '/chat', 'POST', 'postMessage', body);
|
||||
} catch (err) {
|
||||
throw new Error(`Rocketchat Error: ${err}`);
|
||||
}
|
||||
response = await rocketchatApiRequest.call(this, '/chat', 'POST', 'postMessage', body);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
10
packages/nodes-base/nodes/Slack/GenericFunctions.ts
Normal file
10
packages/nodes-base/nodes/Slack/GenericFunctions.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
|
||||
export function validateJSON(json: string | undefined): any { // tslint:disable-line:no-any
|
||||
let result;
|
||||
try {
|
||||
result = JSON.parse(json!);
|
||||
} catch (exception) {
|
||||
result = undefined;
|
||||
}
|
||||
return result;
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -269,7 +269,7 @@
|
|||
"@types/moment-timezone": "^0.5.12",
|
||||
"@types/mongodb": "^3.5.4",
|
||||
"@types/node": "^10.10.1",
|
||||
"@types/nodemailer": "^4.6.5",
|
||||
"@types/nodemailer": "^6.4.0",
|
||||
"@types/redis": "^2.8.11",
|
||||
"@types/request-promise-native": "~1.0.15",
|
||||
"@types/uuid": "^3.4.6",
|
||||
|
@ -291,7 +291,7 @@
|
|||
"formidable": "^1.2.1",
|
||||
"glob-promise": "^3.4.0",
|
||||
"gm": "^1.23.1",
|
||||
"googleapis": "~46.0.0",
|
||||
"googleapis": "~50.0.0",
|
||||
"imap-simple": "^4.3.0",
|
||||
"lodash.get": "^4.4.2",
|
||||
"lodash.set": "^4.3.2",
|
||||
|
@ -301,7 +301,7 @@
|
|||
"mongodb": "^3.5.5",
|
||||
"mysql2": "^2.0.1",
|
||||
"n8n-core": "~0.33.0",
|
||||
"nodemailer": "^5.1.1",
|
||||
"nodemailer": "^6.4.6",
|
||||
"pdf-parse": "^1.1.1",
|
||||
"pg-promise": "^9.0.3",
|
||||
"redis": "^2.8.0",
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
"@types/node": "^10.10.1",
|
||||
"jest": "^24.9.0",
|
||||
"ts-jest": "^24.0.2",
|
||||
"tslint": "^5.17.0",
|
||||
"tslint": "^6.1.2",
|
||||
"typescript": "~3.7.4"
|
||||
},
|
||||
"dependencies": {
|
||||
|
|
|
@ -277,6 +277,7 @@ export function displayParameter(nodeValues: INodeParameters, parameter: INodePr
|
|||
nodeValuesRoot = nodeValuesRoot || nodeValues;
|
||||
|
||||
let value;
|
||||
const values: any[] = []; // tslint:disable-line:no-any
|
||||
if (parameter.displayOptions.show) {
|
||||
// All the defined rules have to match to display parameter
|
||||
for (const propertyName of Object.keys(parameter.displayOptions.show)) {
|
||||
|
@ -288,7 +289,14 @@ export function displayParameter(nodeValues: INodeParameters, parameter: INodePr
|
|||
value = get(nodeValues, propertyName);
|
||||
}
|
||||
|
||||
if (value === undefined || !parameter.displayOptions.show[propertyName].includes(value as string)) {
|
||||
values.length = 0;
|
||||
if (!Array.isArray(value)) {
|
||||
values.push(value);
|
||||
} else {
|
||||
values.push.apply(values, value);
|
||||
}
|
||||
|
||||
if (values.length === 0 || !parameter.displayOptions.show[propertyName].some(v => values.includes(v))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -304,7 +312,15 @@ export function displayParameter(nodeValues: INodeParameters, parameter: INodePr
|
|||
// Get the value from current level
|
||||
value = get(nodeValues, propertyName);
|
||||
}
|
||||
if (value !== undefined && parameter.displayOptions.hide[propertyName].includes(value as string)) {
|
||||
|
||||
values.length = 0;
|
||||
if (!Array.isArray(value)) {
|
||||
values.push(value);
|
||||
} else {
|
||||
values.push.apply(values, value);
|
||||
}
|
||||
|
||||
if (values.length !== 0 && parameter.displayOptions.hide[propertyName].some(v => values.includes(v))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue