mirror of
https://github.com/n8n-io/n8n.git
synced 2024-12-24 20:24:05 -08:00
fix: Fix issue with long credential type names (#2961)
* 🐛 Fix issue when saving long credential's types The column type in the entity credentials was varchar(33) but nodes like Google Cloud Natural Language exceed the type length. The issue in only found when using Postgres. Mysql works fine as the column type has the proper length. Probably a migration at some point did not property update the column. https://community.n8n.io/t/google-cloud-natural-language-credentials-error-too-long-value/12003/4 * 👕 Fix lint issue * ⚡ Improvement
This commit is contained in:
parent
2d8ac4b477
commit
535dfe0838
|
@ -37,7 +37,7 @@ function getTimestampSyntax() {
|
||||||
const dbType = config.get('database.type') as DatabaseType;
|
const dbType = config.get('database.type') as DatabaseType;
|
||||||
|
|
||||||
const map: { [key in DatabaseType]: string } = {
|
const map: { [key in DatabaseType]: string } = {
|
||||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
sqlite: `STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')`,
|
||||||
postgresdb: 'CURRENT_TIMESTAMP(3)',
|
postgresdb: 'CURRENT_TIMESTAMP(3)',
|
||||||
mysqldb: 'CURRENT_TIMESTAMP(3)',
|
mysqldb: 'CURRENT_TIMESTAMP(3)',
|
||||||
mariadb: 'CURRENT_TIMESTAMP(3)',
|
mariadb: 'CURRENT_TIMESTAMP(3)',
|
||||||
|
@ -61,7 +61,7 @@ export class CredentialsEntity implements ICredentialsDb {
|
||||||
|
|
||||||
@Index()
|
@Index()
|
||||||
@Column({
|
@Column({
|
||||||
length: 32,
|
length: 128,
|
||||||
})
|
})
|
||||||
type: string;
|
type: string;
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
import {
|
||||||
|
MigrationInterface,
|
||||||
|
QueryRunner,
|
||||||
|
} from 'typeorm';
|
||||||
|
|
||||||
|
import config = require('../../../../config');
|
||||||
|
|
||||||
|
export class IncreaseTypeVarcharLimit1646834195327 implements MigrationInterface {
|
||||||
|
name = 'IncreaseTypeVarcharLimit1646834195327';
|
||||||
|
|
||||||
|
async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "type" TYPE VARCHAR(128)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner: QueryRunner): Promise<void> {}
|
||||||
|
}
|
|
@ -8,6 +8,7 @@ import { UniqueWorkflowNames1620824779533 } from './1620824779533-UniqueWorkflow
|
||||||
import { AddwaitTill1626176912946 } from './1626176912946-AddwaitTill';
|
import { AddwaitTill1626176912946 } from './1626176912946-AddwaitTill';
|
||||||
import { UpdateWorkflowCredentials1630419189837 } from './1630419189837-UpdateWorkflowCredentials';
|
import { UpdateWorkflowCredentials1630419189837 } from './1630419189837-UpdateWorkflowCredentials';
|
||||||
import { AddExecutionEntityIndexes1644422880309 } from './1644422880309-AddExecutionEntityIndexes';
|
import { AddExecutionEntityIndexes1644422880309 } from './1644422880309-AddExecutionEntityIndexes';
|
||||||
|
import { IncreaseTypeVarcharLimit1646834195327 } from './1646834195327-IncreaseTypeVarcharLimit';
|
||||||
|
|
||||||
export const postgresMigrations = [
|
export const postgresMigrations = [
|
||||||
InitialMigration1587669153312,
|
InitialMigration1587669153312,
|
||||||
|
@ -20,4 +21,5 @@ export const postgresMigrations = [
|
||||||
AddwaitTill1626176912946,
|
AddwaitTill1626176912946,
|
||||||
UpdateWorkflowCredentials1630419189837,
|
UpdateWorkflowCredentials1630419189837,
|
||||||
AddExecutionEntityIndexes1644422880309,
|
AddExecutionEntityIndexes1644422880309,
|
||||||
|
IncreaseTypeVarcharLimit1646834195327,
|
||||||
];
|
];
|
||||||
|
|
|
@ -11,7 +11,7 @@ export class InitialMigration1588102412422 implements MigrationInterface {
|
||||||
async up(queryRunner: QueryRunner): Promise<void> {
|
async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
const tablePrefix = config.get('database.tablePrefix');
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
|
||||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS "${tablePrefix}credentials_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(32) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`, undefined);
|
await queryRunner.query(`CREATE TABLE IF NOT EXISTS "${tablePrefix}credentials_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(128) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`, undefined);
|
||||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefix}07fde106c0b471d8cc80a64fc8" ON "${tablePrefix}credentials_entity" ("type") `, undefined);
|
await queryRunner.query(`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefix}07fde106c0b471d8cc80a64fc8" ON "${tablePrefix}credentials_entity" ("type") `, undefined);
|
||||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS "${tablePrefix}execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime NOT NULL, "workflowData" text NOT NULL, "workflowId" varchar)`, undefined);
|
await queryRunner.query(`CREATE TABLE IF NOT EXISTS "${tablePrefix}execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime NOT NULL, "workflowData" text NOT NULL, "workflowId" varchar)`, undefined);
|
||||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefix}c4d999a5e90784e8caccf5589d" ON "${tablePrefix}execution_entity" ("workflowId") `, undefined);
|
await queryRunner.query(`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefix}c4d999a5e90784e8caccf5589d" ON "${tablePrefix}execution_entity" ("workflowId") `, undefined);
|
||||||
|
|
Loading…
Reference in a new issue