mirror of
https://github.com/n8n-io/n8n.git
synced 2024-12-24 04:04:06 -08:00
fix(core): Fix migrations on non-public Postgres schema (#3356)
* 🐛 Fix UM migration * ⚡ Account for schema in `search_path` * 🔥 Remove unneeded schema refs * 🧪 Account for alt schema in DB testing * ⚡ Add schema to `IncreaseTypeVarcharLimit` * ⚡ Set `search_path` in every migration * ⚡ Set `search_path` in down migrations
This commit is contained in:
parent
56c07a45d5
commit
b49d493653
|
@ -32,9 +32,10 @@
|
|||
"start:default": "cd bin && ./n8n",
|
||||
"start:windows": "cd bin && n8n",
|
||||
"test": "npm run test:sqlite",
|
||||
"test:sqlite": "export N8N_LOG_LEVEL='silent'; export DB_TYPE=sqlite; jest",
|
||||
"test:postgres": "export N8N_LOG_LEVEL='silent'; export DB_TYPE=postgresdb && jest",
|
||||
"test:mysql": "export N8N_LOG_LEVEL='silent'; export DB_TYPE=mysqldb && jest",
|
||||
"test:sqlite": "export N8N_LOG_LEVEL=silent; export DB_TYPE=sqlite; jest",
|
||||
"test:postgres": "export N8N_LOG_LEVEL=silent; export DB_TYPE=postgresdb; jest",
|
||||
"test:postgres:alt-schema": "export DB_POSTGRESDB_SCHEMA=alt_schema; npm run test:postgres",
|
||||
"test:mysql": "export N8N_LOG_LEVEL=silent; export DB_TYPE=mysqldb; jest",
|
||||
"watch": "tsc --watch",
|
||||
"typeorm": "ts-node ../../node_modules/typeorm/cli.js"
|
||||
},
|
||||
|
|
|
@ -14,6 +14,8 @@ export class InitialMigration1587669153312 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}credentials_entity ("id" SERIAL NOT NULL, "name" character varying(128) NOT NULL, "data" text NOT NULL, "type" character varying(32) NOT NULL, "nodesAccess" json NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, CONSTRAINT PK_${tablePrefixIndex}814c3d3c36e8a27fa8edb761b0e PRIMARY KEY ("id"))`, undefined);
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixIndex}07fde106c0b471d8cc80a64fc8 ON ${tablePrefix}credentials_entity (type) `, undefined);
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}execution_entity ("id" SERIAL NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" character varying NOT NULL, "retryOf" character varying, "retrySuccessId" character varying, "startedAt" TIMESTAMP NOT NULL, "stoppedAt" TIMESTAMP NOT NULL, "workflowData" json NOT NULL, "workflowId" character varying, CONSTRAINT PK_${tablePrefixIndex}e3e63bbf986767844bbe1166d4e PRIMARY KEY ("id"))`, undefined);
|
||||
|
@ -29,6 +31,8 @@ export class InitialMigration1587669153312 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}workflow_entity`, undefined);
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixIndex}c4d999a5e90784e8caccf5589d`, undefined);
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}execution_entity`, undefined);
|
||||
|
|
|
@ -16,6 +16,8 @@ export class WebhookModel1589476000887 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" character varying NOT NULL, "method" character varying NOT NULL, "node" character varying NOT NULL, CONSTRAINT "PK_${tablePrefixIndex}b21ace2e13596ccd87dc9bf4ea6" PRIMARY KEY ("webhookPath", "method"))`, undefined);
|
||||
}
|
||||
|
||||
|
@ -25,6 +27,7 @@ export class WebhookModel1589476000887 implements MigrationInterface {
|
|||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`, undefined);
|
||||
}
|
||||
|
||||
|
|
|
@ -13,13 +13,21 @@ export class CreateIndexStoppedAt1594828256133 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixPure}33228da131bb1112247cf52a42 ON ${tablePrefix}execution_entity ("stoppedAt") `);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}33228da131bb1112247cf52a42`);
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}33228da131bb1112247cf52a42`);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -11,6 +11,9 @@ export class MakeStoppedAtNullable1607431743768 implements MigrationInterface {
|
|||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query('ALTER TABLE ' + tablePrefix + 'execution_entity ALTER COLUMN "stoppedAt" DROP NOT NULL', undefined);
|
||||
}
|
||||
|
||||
|
|
|
@ -12,6 +12,8 @@ export class AddWebhookId1611144599516 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity ADD "webhookId" character varying`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity ADD "pathLength" integer`);
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixPure}16f4436789e804e3e1c9eeb240 ON ${tablePrefix}webhook_entity ("webhookId", "method", "pathLength") `);
|
||||
|
@ -24,6 +26,7 @@ export class AddWebhookId1611144599516 implements MigrationInterface {
|
|||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}16f4436789e804e3e1c9eeb240`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "pathLength"`);
|
||||
|
|
|
@ -12,6 +12,8 @@ export class CreateTagEntity1617270242566 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
// create tags table + relationship with workflow entity
|
||||
|
||||
await queryRunner.query(`CREATE TABLE ${tablePrefix}tag_entity ("id" SERIAL NOT NULL, "name" character varying(24) NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, CONSTRAINT "PK_${tablePrefixPure}7a50a9b74ae6855c0dcaee25052" PRIMARY KEY ("id"))`);
|
||||
|
@ -47,6 +49,8 @@ export class CreateTagEntity1617270242566 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
// `createdAt` and `updatedAt`
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity ALTER COLUMN "updatedAt" DROP DEFAULT`);
|
||||
|
|
|
@ -12,6 +12,8 @@ export class UniqueWorkflowNames1620824779533 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
const workflowNames = await queryRunner.query(`
|
||||
SELECT name
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
|
@ -65,6 +67,8 @@ export class UniqueWorkflowNames1620824779533 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefixPure}a252c527c4c89237221fe2c0ab"`);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,8 @@ export class AddwaitTill1626176912946 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}execution_entity ADD "waitTill" TIMESTAMP`);
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixPure}ca4a71b47f28ac6ea88293a8e2 ON ${tablePrefix}execution_entity ("waitTill")`);
|
||||
}
|
||||
|
@ -24,6 +26,8 @@ export class AddwaitTill1626176912946 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}ca4a71b47f28ac6ea88293a8e2`);
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "waitTill"`);
|
||||
}
|
||||
|
|
|
@ -14,6 +14,9 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
|||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
const helpers = new MigrationHelpers(queryRunner);
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
|
@ -157,6 +160,7 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
|||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
const helpers = new MigrationHelpers(queryRunner);
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
|
|
|
@ -13,6 +13,8 @@ export class AddExecutionEntityIndexes1644422880309 implements MigrationInterfac
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(
|
||||
`DROP INDEX IF EXISTS "${schema}".IDX_${tablePrefixPure}c4d999a5e90784e8caccf5589d`,
|
||||
);
|
||||
|
@ -49,22 +51,22 @@ export class AddExecutionEntityIndexes1644422880309 implements MigrationInterfac
|
|||
}
|
||||
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "${schema}"."IDX_${tablePrefixPure}d160d4771aba5a0d78943edbe3"`,
|
||||
`DROP INDEX "IDX_${tablePrefixPure}d160d4771aba5a0d78943edbe3"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "${schema}"."IDX_${tablePrefixPure}85b981df7b444f905f8bf50747"`,
|
||||
`DROP INDEX "IDX_${tablePrefixPure}85b981df7b444f905f8bf50747"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "${schema}"."IDX_${tablePrefixPure}72ffaaab9f04c2c1f1ea86e662"`,
|
||||
`DROP INDEX "IDX_${tablePrefixPure}72ffaaab9f04c2c1f1ea86e662"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "${schema}"."IDX_${tablePrefixPure}4f474ac92be81610439aaad61e"`,
|
||||
`DROP INDEX "IDX_${tablePrefixPure}4f474ac92be81610439aaad61e"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "${schema}"."IDX_${tablePrefixPure}58154df94c686818c99fb754ce"`,
|
||||
`DROP INDEX "IDX_${tablePrefixPure}58154df94c686818c99fb754ce"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "${schema}"."IDX_${tablePrefixPure}33228da131bb1112247cf52a42"`,
|
||||
`DROP INDEX "IDX_${tablePrefixPure}33228da131bb1112247cf52a42"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}ca4a71b47f28ac6ea88293a8e2" ON ${tablePrefix}execution_entity ("waitTill") `,
|
||||
|
|
|
@ -9,7 +9,14 @@ export class IncreaseTypeVarcharLimit1646834195327 implements MigrationInterface
|
|||
name = 'IncreaseTypeVarcharLimit1646834195327';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "type" TYPE VARCHAR(128)`);
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,8 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE ${tablePrefix}role (
|
||||
"id" serial NOT NULL,
|
||||
|
@ -56,12 +58,12 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
|
|||
CONSTRAINT "FK_${tablePrefixPure}3540da03964527aa24ae014b780" FOREIGN KEY ("roleId") REFERENCES ${tablePrefix}role ("id") ON DELETE NO ACTION ON UPDATE NO ACTION,
|
||||
CONSTRAINT "FK_${tablePrefixPure}82b2fd9ec4e3e24209af8160282" FOREIGN KEY ("userId") REFERENCES ${tablePrefix}user ("id") ON DELETE CASCADE ON UPDATE NO ACTION,
|
||||
CONSTRAINT "FK_${tablePrefixPure}b83f8d2530884b66a9c848c8b88" FOREIGN KEY ("workflowId") REFERENCES
|
||||
${tablePrefixPure}workflow_entity ("id") ON DELETE CASCADE ON UPDATE NO ACTION
|
||||
${tablePrefix}workflow_entity ("id") ON DELETE CASCADE ON UPDATE NO ACTION
|
||||
);`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}65a0933c0f19d278881653bf81d35064" ON "shared_workflow" ("workflowId");`,
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}65a0933c0f19d278881653bf81d35064" ON ${tablePrefix}shared_workflow ("workflowId");`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
|
@ -146,6 +148,7 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
|
|||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefixPure}a252c527c4c89237221fe2c0ab" ON ${tablePrefix}workflow_entity ("name")`,
|
||||
|
|
|
@ -11,6 +11,8 @@ export class LowerCaseUserEmail1648740597343 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`
|
||||
UPDATE ${tablePrefix}user
|
||||
SET email = LOWER(email);
|
||||
|
|
|
@ -11,6 +11,8 @@ export class AddUserSettings1652367743993 implements MigrationInterface {
|
|||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}user ADD COLUMN settings json`);
|
||||
|
||||
await queryRunner.query(
|
||||
|
@ -24,6 +26,7 @@ export class AddUserSettings1652367743993 implements MigrationInterface {
|
|||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}user DROP COLUMN settings`);
|
||||
}
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
import { exec as callbackExec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
|
||||
import { createConnection, getConnection, ConnectionOptions, Connection } from 'typeorm';
|
||||
import { Credentials, UserSettings } from 'n8n-core';
|
||||
|
||||
|
@ -12,12 +15,14 @@ import { entities } from '../../../src/databases/entities';
|
|||
import { mysqlMigrations } from '../../../src/databases/mysqldb/migrations';
|
||||
import { postgresMigrations } from '../../../src/databases/postgresdb/migrations';
|
||||
import { sqliteMigrations } from '../../../src/databases/sqlite/migrations';
|
||||
import { categorize } from './utils';
|
||||
import { categorize, getPostgresSchemaSection } from './utils';
|
||||
|
||||
import type { Role } from '../../../src/databases/entities/Role';
|
||||
import type { User } from '../../../src/databases/entities/User';
|
||||
import type { CollectionName, CredentialPayload } from './types';
|
||||
|
||||
const exec = promisify(callbackExec);
|
||||
|
||||
/**
|
||||
* Initialize one test DB per suite run, with bootstrap connection if needed.
|
||||
*/
|
||||
|
@ -35,21 +40,42 @@ export async function init() {
|
|||
|
||||
if (dbType === 'postgresdb') {
|
||||
let bootstrapPostgres;
|
||||
const bootstrapPostgresOptions = getBootstrapPostgresOptions();
|
||||
const pgOptions = getBootstrapPostgresOptions();
|
||||
|
||||
try {
|
||||
bootstrapPostgres = await createConnection(bootstrapPostgresOptions);
|
||||
bootstrapPostgres = await createConnection(pgOptions);
|
||||
} catch (error) {
|
||||
const { username, password, host, port, schema } = bootstrapPostgresOptions;
|
||||
console.error(
|
||||
`ERROR: Failed to connect to Postgres default DB 'postgres'.\nPlease review your Postgres connection options:\n\thost: "${host}"\n\tusername: "${username}"\n\tpassword: "${password}"\n\tport: "${port}"\n\tschema: "${schema}"\nFix by setting correct values via environment variables:\n\texport DB_POSTGRESDB_HOST=value\n\texport DB_POSTGRESDB_USER=value\n\texport DB_POSTGRESDB_PASSWORD=value\n\texport DB_POSTGRESDB_PORT=value\n\texport DB_POSTGRESDB_SCHEMA=value`,
|
||||
);
|
||||
const pgConfig = getPostgresSchemaSection();
|
||||
|
||||
if (!pgConfig) throw new Error("Failed to find config schema section for 'postgresdb'");
|
||||
|
||||
const message = [
|
||||
"ERROR: Failed to connect to Postgres default DB 'postgres'",
|
||||
'Please review your Postgres connection options:',
|
||||
`host: ${pgOptions.host} | port: ${pgOptions.port} | schema: ${pgOptions.schema} | username: ${pgOptions.username} | password: ${pgOptions.password}`,
|
||||
'Fix by setting correct values via environment variables:',
|
||||
`${pgConfig.host.env} | ${pgConfig.port.env} | ${pgConfig.schema.env} | ${pgConfig.user.env} | ${pgConfig.password.env}`,
|
||||
'Otherwise, make sure your Postgres server is running.'
|
||||
].join('\n');
|
||||
|
||||
console.error(message);
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const testDbName = `pg_${randomString(6, 10)}_${Date.now()}_n8n_test`;
|
||||
await bootstrapPostgres.query(`CREATE DATABASE ${testDbName};`);
|
||||
|
||||
try {
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
await exec(`psql -d ${testDbName} -c "CREATE SCHEMA IF NOT EXISTS ${schema}";`);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message.includes('command not found')) {
|
||||
console.error('psql command not found. Make sure psql is installed and added to your PATH.');
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
await Db.init(getPostgresOptions({ name: testDbName }));
|
||||
|
||||
return { testDbName };
|
||||
|
@ -116,8 +142,10 @@ export async function truncate(collections: CollectionName[], testDbName: string
|
|||
if (dbType === 'postgresdb') {
|
||||
return Promise.all(
|
||||
collections.map((collection) => {
|
||||
const tableName = toTableName(collection);
|
||||
testDb.query(`TRUNCATE TABLE "${tableName}" RESTART IDENTITY CASCADE;`);
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
const fullTableName = `${schema}.${toTableName(collection)}`;
|
||||
|
||||
testDb.query(`TRUNCATE TABLE ${fullTableName} RESTART IDENTITY CASCADE;`);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -28,3 +28,7 @@ export type SaveCredentialFunction = (
|
|||
credentialPayload: CredentialPayload,
|
||||
{ user }: { user: User },
|
||||
) => Promise<CredentialsEntity & ICredentialsDb>;
|
||||
|
||||
export type PostgresSchemaSection = {
|
||||
[K in 'host' | 'port' | 'schema' | 'user' | 'password']: { env: string };
|
||||
};
|
||||
|
|
|
@ -24,7 +24,7 @@ import { issueJWT } from '../../../src/UserManagement/auth/jwt';
|
|||
import { getLogger } from '../../../src/Logger';
|
||||
import { credentialsController } from '../../../src/api/credentials.api';
|
||||
import type { User } from '../../../src/databases/entities/User';
|
||||
import type { EndpointGroup, SmtpTestAccount } from './types';
|
||||
import type { EndpointGroup, PostgresSchemaSection, SmtpTestAccount } from './types';
|
||||
import type { N8nApp } from '../../../src/UserManagement/Interfaces';
|
||||
import * as UserManagementMailer from '../../../src/UserManagement/email/UserManagementMailer';
|
||||
|
||||
|
@ -262,3 +262,15 @@ export const categorize = <T>(arr: T[], test: (str: T) => boolean) => {
|
|||
{ pass: [], fail: [] },
|
||||
);
|
||||
};
|
||||
|
||||
export function getPostgresSchemaSection(
|
||||
schema = config.getSchema(),
|
||||
): PostgresSchemaSection | null {
|
||||
for (const [key, value] of Object.entries(schema)) {
|
||||
if (key === 'postgresdb') {
|
||||
return value._cvtProperties;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue