feat(core, editor): Support pairedItem for pinned data (#3843)

* 📘 Adjust interface

*  Adjust pindata in state store

*  Add utils

*  Replace utils calls

*  Adjust pindata intake and display

* 🔥 Remove excess BE fixes

* 📝 Update comment

* 🧪 Adjust tests

* 🔥 Remove unneeded helper

* 🚚 Improve naming

* 🧹 Clean up `ormconfig.ts`

* 📘 Add types and type guards

*  Improve serializer for sqlite

*  Create migration utils

*  Set up sqlite serializer

* 🗃️ Write sqlite migration

* 🗃️ Write MySQL migration

* 🗃️ Write Postgres migration

*  Add imports and exports to barrels

* 🚚 Rename `runChunked` to `runInBatches`

*  Improve migration loggers

* ♻️ Address feedback

* 🚚 Improve naming
This commit is contained in:
Iván Ovejero 2022-08-22 17:46:22 +02:00 committed by GitHub
parent 6bd7a09a45
commit b1e715299d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
24 changed files with 399 additions and 143 deletions

View file

@ -22,7 +22,7 @@ import * as config from '../../../config';
import { DatabaseType, IWorkflowDb } from '../..'; import { DatabaseType, IWorkflowDb } from '../..';
import { TagEntity } from './TagEntity'; import { TagEntity } from './TagEntity';
import { SharedWorkflow } from './SharedWorkflow'; import { SharedWorkflow } from './SharedWorkflow';
import { objectRetriever, serializer } from '../utils/transformers'; import { objectRetriever, sqlite } from '../utils/transformers';
function resolveDataType(dataType: string) { function resolveDataType(dataType: string) {
const dbType = config.getEnv('database.type'); const dbType = config.getEnv('database.type');
@ -120,7 +120,7 @@ export class WorkflowEntity implements IWorkflowDb {
@Column({ @Column({
type: config.getEnv('database.type') === 'sqlite' ? 'text' : 'json', type: config.getEnv('database.type') === 'sqlite' ? 'text' : 'json',
nullable: true, nullable: true,
transformer: serializer, transformer: sqlite.jsonColumn,
}) })
pinData: IPinData; pinData: IPinData;

View file

@ -1,6 +1,6 @@
import { MigrationInterface, QueryRunner } from 'typeorm'; import { MigrationInterface, QueryRunner } from 'typeorm';
import * as config from '../../../../config'; import * as config from '../../../../config';
import { runChunked } from '../../utils/migrationHelpers'; import { runInBatches } from '../../utils/migrationHelpers';
// replacing the credentials in workflows and execution // replacing the credentials in workflows and execution
// `nodeType: name` changes to `nodeType: { id, name }` // `nodeType: name` changes to `nodeType: { id, name }`
@ -22,7 +22,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
const nodes = workflow.nodes; const nodes = workflow.nodes;
let credentialsUpdated = false; let credentialsUpdated = false;
@ -65,7 +65,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
WHERE waitTill IS NOT NULL AND finished = 0 WHERE waitTill IS NOT NULL AND finished = 0
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => { await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
waitingExecutions.forEach(async (execution) => { waitingExecutions.forEach(async (execution) => {
const data = execution.workflowData; const data = execution.workflowData;
let credentialsUpdated = false; let credentialsUpdated = false;
@ -158,7 +158,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
FROM ${tablePrefix}workflow_entity FROM ${tablePrefix}workflow_entity
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
const nodes = workflow.nodes; const nodes = workflow.nodes;
let credentialsUpdated = false; let credentialsUpdated = false;
@ -206,7 +206,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
WHERE waitTill IS NOT NULL AND finished = 0 WHERE waitTill IS NOT NULL AND finished = 0
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => { await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
waitingExecutions.forEach(async (execution) => { waitingExecutions.forEach(async (execution) => {
const data = execution.workflowData; const data = execution.workflowData;
let credentialsUpdated = false; let credentialsUpdated = false;

View file

@ -1,6 +1,6 @@
import { MigrationInterface, QueryRunner } from 'typeorm'; import { MigrationInterface, QueryRunner } from 'typeorm';
import * as config from '../../../../config'; import * as config from '../../../../config';
import { runChunked } from '../../utils/migrationHelpers'; import { runInBatches } from '../../utils/migrationHelpers';
import { v4 as uuid } from 'uuid'; import { v4 as uuid } from 'uuid';
// add node ids in workflow objects // add node ids in workflow objects
@ -17,7 +17,7 @@ export class AddNodeIds1658932910559 implements MigrationInterface {
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
let nodes = workflow.nodes; let nodes = workflow.nodes;
if (typeof nodes === 'string') { if (typeof nodes === 'string') {
@ -31,8 +31,7 @@ export class AddNodeIds1658932910559 implements MigrationInterface {
} }
}); });
const [updateQuery, updateParams] = const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
queryRunner.connection.driver.escapeQueryWithParameters(
` `
UPDATE ${tablePrefix}workflow_entity UPDATE ${tablePrefix}workflow_entity
SET nodes = :nodes SET nodes = :nodes
@ -56,14 +55,13 @@ export class AddNodeIds1658932910559 implements MigrationInterface {
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
const nodes = workflow.nodes; const nodes = workflow.nodes;
// @ts-ignore // @ts-ignore
nodes.forEach((node) => delete node.id ); nodes.forEach((node) => delete node.id);
const [updateQuery, updateParams] = const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
queryRunner.connection.driver.escapeQueryWithParameters(
` `
UPDATE ${tablePrefix}workflow_entity UPDATE ${tablePrefix}workflow_entity
SET nodes = :nodes SET nodes = :nodes

View file

@ -0,0 +1,46 @@
import {
logMigrationStart,
logMigrationEnd,
runInBatches,
getTablePrefix,
} from '../../utils/migrationHelpers';
import { addJsonKeyToPinDataColumn } from '../sqlite/1659888469333-AddJsonKeyPinData';
import type { MigrationInterface, QueryRunner } from 'typeorm';
/**
* Convert JSON-type `pinData` column in `workflow_entity` table from
* `{ [nodeName: string]: IDataObject[] }` to `{ [nodeName: string]: INodeExecutionData[] }`
*/
export class AddJsonKeyPinData1659895550980 implements MigrationInterface {
name = 'AddJsonKeyPinData1659895550980';
async up(queryRunner: QueryRunner) {
logMigrationStart(this.name);
const workflowTable = `${getTablePrefix()}workflow_entity`;
const PINDATA_SELECT_QUERY = `
SELECT id, pinData
FROM \`${workflowTable}\`
WHERE pinData IS NOT NULL;
`;
const PINDATA_UPDATE_STATEMENT = `
UPDATE \`${workflowTable}\`
SET \`pinData\` = :pinData
WHERE id = :id;
`;
await runInBatches(
queryRunner,
PINDATA_SELECT_QUERY,
addJsonKeyToPinDataColumn(queryRunner, PINDATA_UPDATE_STATEMENT),
);
logMigrationEnd(this.name);
}
async down() {
// irreversible migration
}
}

View file

@ -18,6 +18,7 @@ import { CommunityNodes1652254514003 } from './1652254514003-CommunityNodes';
import { AddAPIKeyColumn1652905585850 } from './1652905585850-AddAPIKeyColumn'; import { AddAPIKeyColumn1652905585850 } from './1652905585850-AddAPIKeyColumn';
import { IntroducePinData1654090101303 } from './1654090101303-IntroducePinData'; import { IntroducePinData1654090101303 } from './1654090101303-IntroducePinData';
import { AddNodeIds1658932910559 } from './1658932910559-AddNodeIds'; import { AddNodeIds1658932910559 } from './1658932910559-AddNodeIds';
import { AddJsonKeyPinData1659895550980 } from './1659895550980-AddJsonKeyPinData';
export const mysqlMigrations = [ export const mysqlMigrations = [
InitialMigration1588157391238, InitialMigration1588157391238,
@ -40,4 +41,5 @@ export const mysqlMigrations = [
AddAPIKeyColumn1652905585850, AddAPIKeyColumn1652905585850,
IntroducePinData1654090101303, IntroducePinData1654090101303,
AddNodeIds1658932910559, AddNodeIds1658932910559,
AddJsonKeyPinData1659895550980,
]; ];

View file

@ -1,6 +1,6 @@
import { MigrationInterface, QueryRunner } from 'typeorm'; import { MigrationInterface, QueryRunner } from 'typeorm';
import * as config from '../../../../config'; import * as config from '../../../../config';
import { runChunked } from '../../utils/migrationHelpers'; import { runInBatches } from '../../utils/migrationHelpers';
// replacing the credentials in workflows and execution // replacing the credentials in workflows and execution
// `nodeType: name` changes to `nodeType: { id, name }` // `nodeType: name` changes to `nodeType: { id, name }`
@ -17,7 +17,6 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
await queryRunner.query(`SET search_path TO ${schema};`); await queryRunner.query(`SET search_path TO ${schema};`);
const credentialsEntities = await queryRunner.query(` const credentialsEntities = await queryRunner.query(`
SELECT id, name, type SELECT id, name, type
FROM ${tablePrefix}credentials_entity FROM ${tablePrefix}credentials_entity
@ -29,7 +28,7 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
const nodes = workflow.nodes; const nodes = workflow.nodes;
let credentialsUpdated = false; let credentialsUpdated = false;
@ -72,7 +71,7 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
WHERE "waitTill" IS NOT NULL AND finished = FALSE WHERE "waitTill" IS NOT NULL AND finished = FALSE
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => { await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
waitingExecutions.forEach(async (execution) => { waitingExecutions.forEach(async (execution) => {
const data = execution.workflowData; const data = execution.workflowData;
let credentialsUpdated = false; let credentialsUpdated = false;
@ -172,7 +171,7 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
FROM ${tablePrefix}workflow_entity FROM ${tablePrefix}workflow_entity
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
const nodes = workflow.nodes; const nodes = workflow.nodes;
let credentialsUpdated = false; let credentialsUpdated = false;
@ -221,7 +220,7 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
WHERE "waitTill" IS NOT NULL AND finished = FALSE WHERE "waitTill" IS NOT NULL AND finished = FALSE
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => { await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
waitingExecutions.forEach(async (execution) => { waitingExecutions.forEach(async (execution) => {
const data = execution.workflowData; const data = execution.workflowData;
let credentialsUpdated = false; let credentialsUpdated = false;

View file

@ -1,6 +1,6 @@
import { MigrationInterface, QueryRunner } from 'typeorm'; import { MigrationInterface, QueryRunner } from 'typeorm';
import * as config from '../../../../config'; import * as config from '../../../../config';
import { runChunked } from '../../utils/migrationHelpers'; import { runInBatches } from '../../utils/migrationHelpers';
import { v4 as uuid } from 'uuid'; import { v4 as uuid } from 'uuid';
// add node ids in workflow objects // add node ids in workflow objects
@ -23,7 +23,7 @@ export class AddNodeIds1658932090381 implements MigrationInterface {
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
const nodes = workflow.nodes; const nodes = workflow.nodes;
// @ts-ignore // @ts-ignore
@ -33,8 +33,7 @@ export class AddNodeIds1658932090381 implements MigrationInterface {
} }
}); });
const [updateQuery, updateParams] = const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
queryRunner.connection.driver.escapeQueryWithParameters(
` `
UPDATE ${tablePrefix}workflow_entity UPDATE ${tablePrefix}workflow_entity
SET nodes = :nodes SET nodes = :nodes
@ -64,14 +63,13 @@ export class AddNodeIds1658932090381 implements MigrationInterface {
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
const nodes = workflow.nodes; const nodes = workflow.nodes;
// @ts-ignore // @ts-ignore
nodes.forEach((node) => delete node.id ); nodes.forEach((node) => delete node.id);
const [updateQuery, updateParams] = const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
queryRunner.connection.driver.escapeQueryWithParameters(
` `
UPDATE ${tablePrefix}workflow_entity UPDATE ${tablePrefix}workflow_entity
SET nodes = :nodes SET nodes = :nodes

View file

@ -0,0 +1,46 @@
import {
getTablePrefix,
logMigrationEnd,
logMigrationStart,
runInBatches,
} from '../../utils/migrationHelpers';
import { addJsonKeyToPinDataColumn } from '../sqlite/1659888469333-AddJsonKeyPinData';
import type { MigrationInterface, QueryRunner } from 'typeorm';
/**
* Convert JSON-type `pinData` column in `workflow_entity` table from
* `{ [nodeName: string]: IDataObject[] }` to `{ [nodeName: string]: INodeExecutionData[] }`
*/
export class AddJsonKeyPinData1659902242948 implements MigrationInterface {
name = 'AddJsonKeyPinData1659902242948';
async up(queryRunner: QueryRunner) {
logMigrationStart(this.name);
const workflowTable = `${getTablePrefix()}workflow_entity`;
const PINDATA_SELECT_QUERY = `
SELECT id, "pinData"
FROM ${workflowTable}
WHERE "pinData" IS NOT NULL;
`;
const PINDATA_UPDATE_STATEMENT = `
UPDATE ${workflowTable}
SET "pinData" = :pinData
WHERE id = :id;
`;
await runInBatches(
queryRunner,
PINDATA_SELECT_QUERY,
addJsonKeyToPinDataColumn(queryRunner, PINDATA_UPDATE_STATEMENT),
);
logMigrationEnd(this.name);
}
async down() {
// irreversible migration
}
}

View file

@ -16,6 +16,7 @@ import { CommunityNodes1652254514002 } from './1652254514002-CommunityNodes';
import { AddAPIKeyColumn1652905585850 } from './1652905585850-AddAPIKeyColumn'; import { AddAPIKeyColumn1652905585850 } from './1652905585850-AddAPIKeyColumn';
import { IntroducePinData1654090467022 } from './1654090467022-IntroducePinData'; import { IntroducePinData1654090467022 } from './1654090467022-IntroducePinData';
import { AddNodeIds1658932090381 } from './1658932090381-AddNodeIds'; import { AddNodeIds1658932090381 } from './1658932090381-AddNodeIds';
import { AddJsonKeyPinData1659902242948 } from './1659902242948-AddJsonKeyPinData';
export const postgresMigrations = [ export const postgresMigrations = [
InitialMigration1587669153312, InitialMigration1587669153312,
@ -36,4 +37,5 @@ export const postgresMigrations = [
AddAPIKeyColumn1652905585850, AddAPIKeyColumn1652905585850,
IntroducePinData1654090467022, IntroducePinData1654090467022,
AddNodeIds1658932090381, AddNodeIds1658932090381,
AddJsonKeyPinData1659902242948,
]; ];

View file

@ -1,7 +1,7 @@
import { MigrationInterface, QueryRunner } from 'typeorm'; import { MigrationInterface, QueryRunner } from 'typeorm';
import * as config from '../../../../config'; import * as config from '../../../../config';
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers'; import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
import { runChunked } from '../../utils/migrationHelpers'; import { runInBatches } from '../../utils/migrationHelpers';
// replacing the credentials in workflows and execution // replacing the credentials in workflows and execution
// `nodeType: name` changes to `nodeType: { id, name }` // `nodeType: name` changes to `nodeType: { id, name }`
@ -25,7 +25,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
const nodes = JSON.parse(workflow.nodes); const nodes = JSON.parse(workflow.nodes);
let credentialsUpdated = false; let credentialsUpdated = false;
@ -68,7 +68,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
WHERE "waitTill" IS NOT NULL AND finished = 0 WHERE "waitTill" IS NOT NULL AND finished = 0
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => { await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
waitingExecutions.forEach(async (execution) => { waitingExecutions.forEach(async (execution) => {
const data = JSON.parse(execution.workflowData); const data = JSON.parse(execution.workflowData);
let credentialsUpdated = false; let credentialsUpdated = false;
@ -164,7 +164,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
// @ts-ignore // @ts-ignore
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
const nodes = JSON.parse(workflow.nodes); const nodes = JSON.parse(workflow.nodes);
@ -214,7 +214,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => { await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
// @ts-ignore // @ts-ignore
waitingExecutions.forEach(async (execution) => { waitingExecutions.forEach(async (execution) => {
const data = JSON.parse(execution.workflowData); const data = JSON.parse(execution.workflowData);

View file

@ -2,7 +2,7 @@ import { INode } from 'n8n-workflow';
import { MigrationInterface, QueryRunner } from 'typeorm'; import { MigrationInterface, QueryRunner } from 'typeorm';
import * as config from '../../../../config'; import * as config from '../../../../config';
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers'; import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
import { runChunked } from '../../utils/migrationHelpers'; import { runInBatches } from '../../utils/migrationHelpers';
import { v4 as uuid } from 'uuid'; import { v4 as uuid } from 'uuid';
// add node ids in workflow objects // add node ids in workflow objects
@ -21,7 +21,7 @@ export class AddNodeIds1658930531669 implements MigrationInterface {
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
const nodes = JSON.parse(workflow.nodes); const nodes = JSON.parse(workflow.nodes);
nodes.forEach((node: INode) => { nodes.forEach((node: INode) => {
@ -30,8 +30,7 @@ export class AddNodeIds1658930531669 implements MigrationInterface {
} }
}); });
const [updateQuery, updateParams] = const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
queryRunner.connection.driver.escapeQueryWithParameters(
` `
UPDATE "${tablePrefix}workflow_entity" UPDATE "${tablePrefix}workflow_entity"
SET nodes = :nodes SET nodes = :nodes
@ -48,7 +47,6 @@ export class AddNodeIds1658930531669 implements MigrationInterface {
logMigrationEnd(this.name); logMigrationEnd(this.name);
} }
public async down(queryRunner: QueryRunner): Promise<void> { public async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.getEnv('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
@ -58,14 +56,13 @@ export class AddNodeIds1658930531669 implements MigrationInterface {
`; `;
// @ts-ignore // @ts-ignore
await runChunked(queryRunner, workflowsQuery, (workflows) => { await runInBatches(queryRunner, workflowsQuery, (workflows) => {
workflows.forEach(async (workflow) => { workflows.forEach(async (workflow) => {
const nodes = JSON.parse(workflow.nodes); const nodes = JSON.parse(workflow.nodes);
// @ts-ignore // @ts-ignore
nodes.forEach((node) => delete node.id ); nodes.forEach((node) => delete node.id);
const [updateQuery, updateParams] = const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
queryRunner.connection.driver.escapeQueryWithParameters(
` `
UPDATE "${tablePrefix}workflow_entity" UPDATE "${tablePrefix}workflow_entity"
SET nodes = :nodes SET nodes = :nodes

View file

@ -0,0 +1,93 @@
import {
logMigrationStart,
logMigrationEnd,
runInBatches,
getTablePrefix,
escapeQuery,
} from '../../utils/migrationHelpers';
import type { MigrationInterface, QueryRunner } from 'typeorm';
import { isJsonKeyObject, PinData } from '../../utils/migrations.types';
/**
* Convert TEXT-type `pinData` column in `workflow_entity` table from
* `{ [nodeName: string]: IDataObject[] }` to `{ [nodeName: string]: INodeExecutionData[] }`
*/
export class AddJsonKeyPinData1659888469333 implements MigrationInterface {
name = 'AddJsonKeyPinData1659888469333';
async up(queryRunner: QueryRunner) {
logMigrationStart(this.name);
const workflowTable = `${getTablePrefix()}workflow_entity`;
const PINDATA_SELECT_QUERY = `
SELECT id, pinData
FROM "${workflowTable}"
WHERE pinData IS NOT NULL;
`;
const PINDATA_UPDATE_STATEMENT = `
UPDATE "${workflowTable}"
SET "pinData" = :pinData
WHERE id = :id;
`;
await runInBatches(
queryRunner,
PINDATA_SELECT_QUERY,
addJsonKeyToPinDataColumn(queryRunner, PINDATA_UPDATE_STATEMENT),
);
logMigrationEnd(this.name);
}
async down() {
// irreversible migration
}
}
export const addJsonKeyToPinDataColumn =
(queryRunner: QueryRunner, updateStatement: string) =>
async (fetchedWorkflows: PinData.FetchedWorkflow[]) => {
makeUpdateParams(fetchedWorkflows).forEach((param) => {
const params = {
pinData: param.pinData,
id: param.id,
};
const [escapedStatement, escapedParams] = escapeQuery(queryRunner, updateStatement, params);
queryRunner.query(escapedStatement, escapedParams);
});
};
function makeUpdateParams(fetchedWorkflows: PinData.FetchedWorkflow[]) {
return fetchedWorkflows.reduce<PinData.FetchedWorkflow[]>(
(updateParams, { id, pinData: rawPinData }) => {
const pinDataPerWorkflow: PinData.Old | PinData.New =
typeof rawPinData === 'string' ? JSON.parse(rawPinData) : rawPinData;
const newPinDataPerWorkflow = Object.keys(pinDataPerWorkflow).reduce<PinData.New>(
(newPinDataPerWorkflow, nodeName) => {
const pinDataPerNode = pinDataPerWorkflow[nodeName];
if (pinDataPerNode.every((item) => item.json)) return newPinDataPerWorkflow;
newPinDataPerWorkflow[nodeName] = pinDataPerNode.map((item) =>
isJsonKeyObject(item) ? item : { json: item },
);
return newPinDataPerWorkflow;
},
{},
);
if (Object.keys(newPinDataPerWorkflow).length > 0) {
updateParams.push({ id, pinData: JSON.stringify(newPinDataPerWorkflow) });
}
return updateParams;
},
[],
);
}

View file

@ -11,10 +11,11 @@ import { AddExecutionEntityIndexes1644421939510 } from './1644421939510-AddExecu
import { CreateUserManagement1646992772331 } from './1646992772331-CreateUserManagement'; import { CreateUserManagement1646992772331 } from './1646992772331-CreateUserManagement';
import { LowerCaseUserEmail1648740597343 } from './1648740597343-LowerCaseUserEmail'; import { LowerCaseUserEmail1648740597343 } from './1648740597343-LowerCaseUserEmail';
import { AddUserSettings1652367743993 } from './1652367743993-AddUserSettings'; import { AddUserSettings1652367743993 } from './1652367743993-AddUserSettings';
import { CommunityNodes1652254514001 } from './1652254514001-CommunityNodes' import { CommunityNodes1652254514001 } from './1652254514001-CommunityNodes';
import { AddAPIKeyColumn1652905585850 } from './1652905585850-AddAPIKeyColumn'; import { AddAPIKeyColumn1652905585850 } from './1652905585850-AddAPIKeyColumn';
import { IntroducePinData1654089251344 } from './1654089251344-IntroducePinData'; import { IntroducePinData1654089251344 } from './1654089251344-IntroducePinData';
import { AddNodeIds1658930531669 } from './1658930531669-AddNodeIds'; import { AddNodeIds1658930531669 } from './1658930531669-AddNodeIds';
import { AddJsonKeyPinData1659888469333 } from './1659888469333-AddJsonKeyPinData';
const sqliteMigrations = [ const sqliteMigrations = [
InitialMigration1588102412422, InitialMigration1588102412422,
@ -34,6 +35,7 @@ const sqliteMigrations = [
AddAPIKeyColumn1652905585850, AddAPIKeyColumn1652905585850,
IntroducePinData1654089251344, IntroducePinData1654089251344,
AddNodeIds1658930531669, AddNodeIds1658930531669,
AddJsonKeyPinData1659888469333,
]; ];
export { sqliteMigrations }; export { sqliteMigrations };

View file

@ -2,6 +2,9 @@ import path from 'path';
import { UserSettings } from 'n8n-core'; import { UserSettings } from 'n8n-core';
import { entities } from './entities'; import { entities } from './entities';
const MIGRATIONS_DIR = path.resolve('src', 'databases', 'migrations');
const ENTITIES_DIR = path.resolve('src', 'databases', 'entities');
export default [ export default [
{ {
name: 'sqlite', name: 'sqlite',
@ -9,10 +12,10 @@ export default [
logging: true, logging: true,
entities: Object.values(entities), entities: Object.values(entities),
database: path.resolve(UserSettings.getUserN8nFolderPath(), 'database.sqlite'), database: path.resolve(UserSettings.getUserN8nFolderPath(), 'database.sqlite'),
migrations: [path.resolve('migrations', 'sqlite', 'index.ts')], migrations: [path.resolve(MIGRATIONS_DIR, 'sqlite', 'index.ts')],
cli: { cli: {
entitiesDir: path.resolve('entities'), entitiesDir: ENTITIES_DIR,
migrationsDir: path.resolve('migrations', 'sqlite'), migrationsDir: path.resolve(MIGRATIONS_DIR, 'sqlite'),
}, },
}, },
{ {
@ -26,10 +29,10 @@ export default [
port: 5432, port: 5432,
logging: false, logging: false,
entities: Object.values(entities), entities: Object.values(entities),
migrations: [path.resolve('migrations', 'postgresdb', 'index.ts')], migrations: [path.resolve(MIGRATIONS_DIR, 'postgresdb', 'index.ts')],
cli: { cli: {
entitiesDir: path.resolve('entities'), entitiesDir: ENTITIES_DIR,
migrationsDir: path.resolve('migrations', 'postgresdb'), migrationsDir: path.resolve(MIGRATIONS_DIR, 'postgresdb'),
}, },
}, },
{ {
@ -42,10 +45,10 @@ export default [
port: 3306, port: 3306,
logging: false, logging: false,
entities: Object.values(entities), entities: Object.values(entities),
migrations: [path.resolve('migrations', 'mysqldb', 'index.ts')], migrations: [path.resolve(MIGRATIONS_DIR, 'mysqldb', 'index.ts')],
cli: { cli: {
entitiesDir: path.resolve('entities'), entitiesDir: ENTITIES_DIR,
migrationsDir: path.resolve('migrations', 'mysqldb'), migrationsDir: path.resolve(MIGRATIONS_DIR, 'mysqldb'),
}, },
}, },
{ {
@ -58,10 +61,10 @@ export default [
port: 3306, port: 3306,
logging: false, logging: false,
entities: Object.values(entities), entities: Object.values(entities),
migrations: [path.resolve('migrations', 'mysqldb', 'index.ts')], migrations: [path.resolve(MIGRATIONS_DIR, 'mysqldb', 'index.ts')],
cli: { cli: {
entitiesDir: path.resolve('entities'), entitiesDir: ENTITIES_DIR,
migrationsDir: path.resolve('migrations', 'mysqldb'), migrationsDir: path.resolve(MIGRATIONS_DIR, 'mysqldb'),
}, },
}, },
]; ];

View file

@ -1,7 +1,8 @@
/* eslint-disable no-await-in-loop */ /* eslint-disable no-await-in-loop */
import { readFileSync, rmSync } from 'fs'; import { readFileSync, rmSync } from 'fs';
import { UserSettings } from 'n8n-core'; import { UserSettings } from 'n8n-core';
import { QueryRunner } from 'typeorm/query-runner/QueryRunner'; import type { QueryRunner } from 'typeorm/query-runner/QueryRunner';
import config from '../../../config';
import { getLogger } from '../../Logger'; import { getLogger } from '../../Logger';
const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json'; const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json';
@ -35,28 +36,36 @@ export function loadSurveyFromDisk(): string | null {
} }
let logFinishTimeout: NodeJS.Timeout; let logFinishTimeout: NodeJS.Timeout;
const disableLogging = process.argv[1].split('/').includes('jest');
export function logMigrationStart(migrationName: string): void { export function logMigrationStart(
migrationName: string,
disableLogging = process.env.NODE_ENV === 'test',
): void {
if (disableLogging) return; if (disableLogging) return;
const logger = getLogger();
if (!logFinishTimeout) { if (!logFinishTimeout) {
logger.warn('Migrations in progress, please do NOT stop the process.'); getLogger().warn('Migrations in progress, please do NOT stop the process.');
} }
logger.debug(`Starting migration ${migrationName}`);
getLogger().debug(`Starting migration ${migrationName}`);
clearTimeout(logFinishTimeout); clearTimeout(logFinishTimeout);
} }
export function logMigrationEnd(migrationName: string): void { export function logMigrationEnd(
migrationName: string,
disableLogging = process.env.NODE_ENV === 'test',
): void {
if (disableLogging) return; if (disableLogging) return;
const logger = getLogger();
logger.debug(`Finished migration ${migrationName}`); getLogger().debug(`Finished migration ${migrationName}`);
logFinishTimeout = setTimeout(() => { logFinishTimeout = setTimeout(() => {
logger.warn('Migrations finished.'); getLogger().warn('Migrations finished.');
}, 100); }, 100);
} }
export function chunkQuery(query: string, limit: number, offset = 0): string { export function batchQuery(query: string, limit: number, offset = 0): string {
return ` return `
${query} ${query}
LIMIT ${limit} LIMIT ${limit}
@ -64,7 +73,7 @@ export function chunkQuery(query: string, limit: number, offset = 0): string {
`; `;
} }
export async function runChunked( export async function runInBatches(
queryRunner: QueryRunner, queryRunner: QueryRunner,
query: string, query: string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any // eslint-disable-next-line @typescript-eslint/no-explicit-any
@ -72,14 +81,42 @@ export async function runChunked(
limit = 100, limit = 100,
): Promise<void> { ): Promise<void> {
let offset = 0; let offset = 0;
let chunkedQuery: string; let batchedQuery: string;
let chunkedQueryResults: unknown[]; let batchedQueryResults: unknown[];
// eslint-disable-next-line no-param-reassign
if (query.trim().endsWith(';')) query = query.trim().slice(0, -1);
do { do {
chunkedQuery = chunkQuery(query, limit, offset); batchedQuery = batchQuery(query, limit, offset);
chunkedQueryResults = (await queryRunner.query(chunkedQuery)) as unknown[]; batchedQueryResults = (await queryRunner.query(batchedQuery)) as unknown[];
// pass a copy to prevent errors from mutation // pass a copy to prevent errors from mutation
await operation([...chunkedQueryResults]); await operation([...batchedQueryResults]);
offset += limit; offset += limit;
} while (chunkedQueryResults.length === limit); } while (batchedQueryResults.length === limit);
} }
export const getTablePrefix = () => {
const tablePrefix = config.getEnv('database.tablePrefix');
if (config.getEnv('database.type') === 'postgresdb') {
const schema = config.getEnv('database.postgresdb.schema');
return [schema, tablePrefix].join('.');
}
return tablePrefix;
};
export const escapeQuery = (
queryRunner: QueryRunner,
query: string,
params: { [property: string]: unknown },
): [string, unknown[]] =>
queryRunner.connection.driver.escapeQueryWithParameters(
query,
{
pinData: params.pinData,
id: params.id,
},
{},
);

View file

@ -0,0 +1,22 @@
import type { IDataObject, INodeExecutionData } from 'n8n-workflow';
export namespace PinData {
export type Old = { [nodeName: string]: IDataObject[] };
export type New = { [nodeName: string]: INodeExecutionData[] };
export type FetchedWorkflow = { id: number; pinData: string | object };
}
export function isObjectLiteral(maybeObject: unknown): maybeObject is { [key: string]: string } {
return typeof maybeObject === 'object' && maybeObject !== null && !Array.isArray(maybeObject);
}
export function isJsonKeyObject(item: unknown): item is {
json: unknown;
[otherKeys: string]: unknown;
} {
if (!isObjectLiteral(item)) return false;
return Object.keys(item).includes('json');
}

View file

@ -1,4 +1,5 @@
import { ValueTransformer } from 'typeorm'; import { ValueTransformer } from 'typeorm';
import config from '../../../config';
export const idStringifier = { export const idStringifier = {
from: (value: number): string | number => (typeof value === 'number' ? value.toString() : value), from: (value: number): string | number => (typeof value === 'number' ? value.toString() : value),
@ -20,11 +21,14 @@ export const objectRetriever: ValueTransformer = {
}; };
/** /**
* Transformer to store object as string and retrieve string as object. * Transformer for sqlite JSON columns to mimic JSON-as-object behavior
* from Postgres and MySQL.
*/ */
export const serializer: ValueTransformer = { const jsonColumn: ValueTransformer = {
to: (value: object | string): string => to: (value: object): string | object =>
typeof value === 'object' ? JSON.stringify(value) : value, config.getEnv('database.type') === 'sqlite' ? JSON.stringify(value) : value,
from: (value: string | object): object => from: (value: string | object): object =>
typeof value === 'string' ? (JSON.parse(value) as object) : value, typeof value === 'string' ? (JSON.parse(value) as object) : value,
}; };
export const sqlite = { jsonColumn };

View file

@ -3,8 +3,9 @@ import express from 'express';
import * as utils from './shared/utils'; import * as utils from './shared/utils';
import * as testDb from './shared/testDb'; import * as testDb from './shared/testDb';
import { WorkflowEntity } from '../../src/databases/entities/WorkflowEntity'; import { WorkflowEntity } from '../../src/databases/entities/WorkflowEntity';
import type { Role } from '../../src/databases/entities/Role'; import type { Role } from '../../src/databases/entities/Role';
import { IPinData } from 'n8n-workflow'; import type { IPinData } from 'n8n-workflow';
jest.mock('../../src/telemetry'); jest.mock('../../src/telemetry');
@ -46,7 +47,7 @@ test('POST /workflows should store pin data for node in workflow', async () => {
const { pinData } = response.body.data as { pinData: IPinData }; const { pinData } = response.body.data as { pinData: IPinData };
expect(pinData).toMatchObject({ Spotify: [{ myKey: 'myValue' }] }); expect(pinData).toMatchObject(MOCK_PINDATA);
}); });
test('POST /workflows should set pin data to null if no pin data', async () => { test('POST /workflows should set pin data to null if no pin data', async () => {
@ -80,7 +81,7 @@ test('GET /workflows/:id should return pin data', async () => {
const { pinData } = workflowRetrievalResponse.body.data as { pinData: IPinData }; const { pinData } = workflowRetrievalResponse.body.data as { pinData: IPinData };
expect(pinData).toMatchObject({ Spotify: [{ myKey: 'myValue' }] }); expect(pinData).toMatchObject(MOCK_PINDATA);
}); });
function makeWorkflow({ withPinData }: { withPinData: boolean }) { function makeWorkflow({ withPinData }: { withPinData: boolean }) {
@ -101,8 +102,10 @@ function makeWorkflow({ withPinData }: { withPinData: boolean }) {
]; ];
if (withPinData) { if (withPinData) {
workflow.pinData = { Spotify: [{ myKey: 'myValue' }] }; workflow.pinData = MOCK_PINDATA;
} }
return workflow; return workflow;
} }
const MOCK_PINDATA = { Spotify: [{ json: { myKey: 'myValue' } }] };

View file

@ -933,14 +933,9 @@ export class WorkflowExecute {
const { pinData } = this.runExecutionData.resultData; const { pinData } = this.runExecutionData.resultData;
if (pinData && !executionNode.disabled && pinData[executionNode.name] !== undefined) { if (pinData && !executionNode.disabled && pinData[executionNode.name] !== undefined) {
let nodePinData = pinData[executionNode.name]; const nodePinData = pinData[executionNode.name];
if (!Array.isArray(nodePinData)) nodePinData = [nodePinData]; nodeSuccessData = [nodePinData]; // always zeroth runIndex
const itemsPerRun = nodePinData.map((item, index) => {
return { json: item, pairedItem: { item: index } };
});
nodeSuccessData = [itemsPerRun]; // always zeroth runIndex
} else { } else {
Logger.debug(`Running node "${executionNode.name}" started`, { Logger.debug(`Running node "${executionNode.name}" started`, {
node: executionNode.name, node: executionNode.name,

View file

@ -381,6 +381,7 @@ import { CodeEditor } from "@/components/forms";
import { dataPinningEventBus } from '../event-bus/data-pinning-event-bus'; import { dataPinningEventBus } from '../event-bus/data-pinning-event-bus';
import { stringSizeInBytes } from './helpers'; import { stringSizeInBytes } from './helpers';
import RunDataTable from './RunDataTable.vue'; import RunDataTable from './RunDataTable.vue';
import { isJsonKeyObject } from '@/utils';
// A path that does not exist so that nothing is selected by default // A path that does not exist so that nothing is selected by default
const deselectedPlaceholder = '_!^&*'; const deselectedPlaceholder = '_!^&*';
@ -631,13 +632,7 @@ export default mixins(
let inputData = this.rawInputData; let inputData = this.rawInputData;
if (this.node && this.pinData) { if (this.node && this.pinData) {
inputData = Array.isArray(this.pinData) inputData = this.pinData;
? this.pinData.map((value) => ({
json: value,
}))
: [{
json: this.pinData,
}];
} }
const offset = this.pageSize * (this.currentPage - 1); const offset = this.pageSize * (this.currentPage - 1);
@ -734,7 +729,10 @@ export default mixins(
localStorage.setItem(LOCAL_STORAGE_PIN_DATA_DISCOVERY_CANVAS_FLAG, 'true'); localStorage.setItem(LOCAL_STORAGE_PIN_DATA_DISCOVERY_CANVAS_FLAG, 'true');
}, },
enterEditMode({ origin }: EnterEditModeArgs) { enterEditMode({ origin }: EnterEditModeArgs) {
const inputData = this.pinData ? this.pinData : this.convertToJson(this.rawInputData); const inputData = this.pinData
? this.clearJsonKey(this.pinData)
: this.convertToJson(this.rawInputData);
const data = inputData.length > 0 const data = inputData.length > 0
? inputData ? inputData
: TEST_PIN_DATA; : TEST_PIN_DATA;
@ -773,25 +771,18 @@ export default mixins(
} }
this.$store.commit('ui/setOutputPanelEditModeEnabled', false); this.$store.commit('ui/setOutputPanelEditModeEnabled', false);
this.$store.commit('pinData', { node: this.node, data: this.removeJsonKeys(value) }); this.$store.commit('pinData', { node: this.node, data: this.clearJsonKey(value) });
this.onDataPinningSuccess({ source: 'save-edit' }); this.onDataPinningSuccess({ source: 'save-edit' });
this.onExitEditMode({ type: 'save' }); this.onExitEditMode({ type: 'save' });
}, },
removeJsonKeys(value: string) { clearJsonKey(userInput: string | object) {
const parsed = JSON.parse(value); const parsedUserInput = typeof userInput === 'string' ? JSON.parse(userInput) : userInput;
return Array.isArray(parsed) if (!Array.isArray(parsedUserInput)) return parsedUserInput;
? parsed.map(item => this.isJsonKeyObject(item) ? item.json : item)
: parsed;
},
isJsonKeyObject(item: unknown): item is { json: unknown } {
if (!this.isObjectLiteral(item)) return false;
const keys = Object.keys(item); return parsedUserInput.map(item => isJsonKeyObject(item) ? item.json : item);
return keys.length === 1 && keys[0] === 'json';
}, },
onExitEditMode({ type }: { type: 'save' | 'cancel' }) { onExitEditMode({ type }: { type: 'save' | 'cancel' }) {
this.$telemetry.track('User closed ndv edit state', { this.$telemetry.track('User closed ndv edit state', {
@ -1186,7 +1177,7 @@ export default mixins(
let selectedValue = this.selectedOutput.value; let selectedValue = this.selectedOutput.value;
if (isNotSelected) { if (isNotSelected) {
if (this.hasPinData) { if (this.hasPinData) {
selectedValue = this.pinData as object; selectedValue = this.clearJsonKey(this.pinData as object);
} else { } else {
selectedValue = this.convertToJson(this.getNodeInputData(this.node, this.runIndex, this.currentOutputIndex)); selectedValue = this.convertToJson(this.getNodeInputData(this.node, this.runIndex, this.currentOutputIndex));
} }

View file

@ -34,6 +34,7 @@ import { get } from 'lodash';
import mixins from 'vue-typed-mixins'; import mixins from 'vue-typed-mixins';
import { mapGetters } from 'vuex'; import { mapGetters } from 'vuex';
import { isObjectLiteral } from '@/utils';
export const nodeHelpers = mixins( export const nodeHelpers = mixins(
restApi, restApi,
@ -47,14 +48,10 @@ export const nodeHelpers = mixins(
return Object.keys(node.parameters).includes('nodeCredentialType'); return Object.keys(node.parameters).includes('nodeCredentialType');
}, },
isObjectLiteral(maybeObject: unknown): maybeObject is { [key: string]: string } {
return typeof maybeObject === 'object' && maybeObject !== null && !Array.isArray(maybeObject);
},
isCustomApiCallSelected (nodeValues: INodeParameters): boolean { isCustomApiCallSelected (nodeValues: INodeParameters): boolean {
const { parameters } = nodeValues; const { parameters } = nodeValues;
if (!this.isObjectLiteral(parameters)) return false; if (!isObjectLiteral(parameters)) return false;
return ( return (
parameters.resource !== undefined && parameters.resource.includes(CUSTOM_API_CALL_KEY) || parameters.resource !== undefined && parameters.resource.includes(CUSTOM_API_CALL_KEY) ||

View file

@ -11,6 +11,7 @@ import {
IConnections, IConnections,
IDataObject, IDataObject,
INodeConnections, INodeConnections,
INodeExecutionData,
INodeIssueData, INodeIssueData,
INodeTypeDescription, INodeTypeDescription,
IPinData, IPinData,
@ -48,6 +49,7 @@ import {stringSizeInBytes} from "@/components/helpers";
import {dataPinningEventBus} from "@/event-bus/data-pinning-event-bus"; import {dataPinningEventBus} from "@/event-bus/data-pinning-event-bus";
import communityNodes from './modules/communityNodes'; import communityNodes from './modules/communityNodes';
import { isCommunityPackageName } from './components/helpers'; import { isCommunityPackageName } from './components/helpers';
import { isJsonKeyObject } from './utils';
Vue.use(Vuex); Vue.use(Vuex);
@ -214,15 +216,21 @@ export const store = new Vuex.Store({
}, },
// Pin data // Pin data
pinData(state, payload: { node: INodeUi, data: IPinData[string] }) { pinData(state, payload: { node: INodeUi, data: INodeExecutionData[] }) {
if (!state.workflow.pinData) { if (!state.workflow.pinData) {
Vue.set(state.workflow, 'pinData', {}); Vue.set(state.workflow, 'pinData', {});
} }
Vue.set(state.workflow.pinData!, payload.node.name, payload.data); if (!Array.isArray(payload.data)) {
payload.data = [payload.data];
}
const storedPinData = payload.data.map(item => isJsonKeyObject(item) ? item : { json: item });
Vue.set(state.workflow.pinData!, payload.node.name, storedPinData);
state.stateIsDirty = true; state.stateIsDirty = true;
dataPinningEventBus.$emit('pin-data', { [payload.node.name]: payload.data }); dataPinningEventBus.$emit('pin-data', { [payload.node.name]: storedPinData });
}, },
unpinData(state, payload: { node: INodeUi }) { unpinData(state, payload: { node: INodeUi }) {
if (!state.workflow.pinData) { if (!state.workflow.pinData) {

View file

@ -1 +1,14 @@
export const omit = (keyToOmit: string, { [keyToOmit]: _, ...remainder }) => remainder; export const omit = (keyToOmit: string, { [keyToOmit]: _, ...remainder }) => remainder;
export function isObjectLiteral(maybeObject: unknown): maybeObject is { [key: string]: string } {
return typeof maybeObject === 'object' && maybeObject !== null && !Array.isArray(maybeObject);
}
export function isJsonKeyObject(item: unknown): item is {
json: unknown;
[otherKeys: string]: unknown;
} {
if (!isObjectLiteral(item)) return false;
return Object.keys(item).includes('json');
}

View file

@ -843,7 +843,7 @@ export interface INode {
} }
export interface IPinData { export interface IPinData {
[nodeName: string]: IDataObject[]; [nodeName: string]: INodeExecutionData[];
} }
export interface INodes { export interface INodes {