n8n/packages/nodes-base/nodes/Postgres/v2/helpers/utils.ts
Milorad FIlipović 04cfa548af
feat(editor): Implement Resource Mapper component (#6207)
*  scaffolding
*  finished scaffolding
*  renamed types
*  updated subtitle
*  renamed functions file, UI updates
*  query parameters fixes, ui updates, refactoring
*  fixes for credentials test, setup for error parsing
*  rlc for schema and table, error handling tweaks
*  delete operation, new options
*  columns loader
*  linter fixes
*  where clauses setup
*  logic for processing where clauses
*  select operation
*  refactoring
*  data mode for insert and update, wip
*  data mapping, insert update, skip on conflict option
*  select columns with spaces fix
*  update operation update, wip
*  finished update operation
*  upsert operation
*  ui fixes
* Copy updates.
* Copy updates.
*  option to convert empty strings to nulls, schema checks
*  UI requested updates
*  ssh setup WIP
*  fixes, ssh WIP
*  ssh fixes, credentials
*  credentials testing update
*  uncaught error fix
*  clean up
*  address in use fix
*  improved error message
*  tests setup
*  unit tests wip
*  config files clean up
*  utils unit tests
*  refactoring
*  setup for testing operations, tests for deleteTable operation
*  executeQuery and insert operations tests
*  select, update, upsert operations tests
*  runQueries tests setup
*  hint to query
* Copy updates.
*  ui fixes
*  clean up
*  error message update
*  ui update
* Minor tweaks to query params decription.
* feat(Google Sheets Node): Implement Resource mapper in Google Sheets node (#5752)
*  Added initial resource mapping support in google sheets node
*  Wired mapping API endpoint with node-specific logic for fetching mapping fields
*  Implementing mapping fields logic for google sheets
*  Updating Google Sheets execute methods to support resource mapper fields
* 🚧 Added initial version of `ResourceLocator` component
* 👌 Added `update` mode to resource mapper modes
* 👌 Addressing PR feedback
* 👌 Removing leftover const reference
* 👕 Fixing lint errors
*  singlton for conections
*  credentials test fix, clean up
* feat(Postgres Node): Add resource mapper to new version of Postgres node (#5814)
*  scaffolding
*  finished scaffolding
*  renamed types
*  updated subtitle
*  renamed functions file, UI updates
*  query parameters fixes, ui updates, refactoring
*  fixes for credentials test, setup for error parsing
*  rlc for schema and table, error handling tweaks
*  delete operation, new options
*  columns loader
*  linter fixes
*  where clauses setup
*  logic for processing where clauses
*  select operation
*  refactoring
*  data mode for insert and update, wip
*  data mapping, insert update, skip on conflict option
*  select columns with spaces fix
*  update operation update, wip
*  finished update operation
*  upsert operation
*  ui fixes
* Copy updates.
* Copy updates.
*  option to convert empty strings to nulls, schema checks
*  UI requested updates
*  ssh setup WIP
*  fixes, ssh WIP
*  ssh fixes, credentials
*  credentials testing update
*  uncaught error fix
*  clean up
*  address in use fix
*  improved error message
*  tests setup
*  unit tests wip
*  config files clean up
*  utils unit tests
*  refactoring
*  setup for testing operations, tests for deleteTable operation
*  executeQuery and insert operations tests
*  select, update, upsert operations tests
*  runQueries tests setup
*  hint to query
* Copy updates.
*  ui fixes
*  clean up
*  error message update
*  ui update
* Minor tweaks to query params decription.
*  Updated Postgres node to use resource mapper component
*  Implemented postgres <-> resource mapper type mapping
*  Updated Postgres node execution to use resource mapper fields in v3
* 🔥 Removing unused import
---------
Co-authored-by: Michael Kret <michael.k@radency.com>
Co-authored-by: Giulio Andreini <g.andreini@gmail.com>

* feat(core): Resource editor componend P0 (#5970)
*  Added inital value of mapping mode dropdown
*  Finished mapping mode selector
*  Finished implementing mapping mode selector
*  Implemented 'Columns to match on' dropdown
*  Implemented `loadOptionsDependOn` support in resource mapper
*  Implemented initial version of mapping fields
*  Implementing dependant fields watcher in new component setup
*  Generating correct resource mapper field types. Added `supportAutoMap` to node specification and UI. Not showing fields with `display=false`. Pre-selecting matching columns if it's the only one
*  Handling matching columns correctly in UI
*  Saving and loading resourceMapper values in component
*  Implemented proper data saving and loading
*  ResourceMapper component refactor, fixing value save/load
*  Refactoring MatchingColumnSelect component. Updating Sheets node to use single key match and Postgres to use multi key
*  Updated Google Sheets node to work with the new UI
*  Updating Postgres Node to work with new UI
*  Additional loading indicator that shown if there is no mapping mode selector
*  Removing hard-coded values, fixing matching columns ordering, refactoring
*  Updating field names in nodes
*  Fixing minor UI issues
*  Implemented matching fields filter logic
*  Moving loading label outside of fields list
*  Added initial unit tests for resource mapper
*  Finished default rendering test
*  Test refactoring
*  Finished unit tests
* 🔨 Updating the way i18n is used in resource mapper components
* ✔️ Fixing value to match on logic for postgres node
*  Hiding mapping fields when auto-map mode is selected
*  Syncing selected mapping mode between components
*  Fixing dateTime input rendering and adding update check to Postgres node
*  Properly handling database connections. Sending null for empty string values.
* 💄 Updated wording in the error message for non-existing rows
*  Fixing issues with selected matching values
* ✔️ Updating unit tests after matching logic update
*  Updating matching columns when new fields are loaded
*  Defaulting to null for empty parameter values
*  Allowing zero as valid value for number imputs
*  Updated list of types that use datepicker as widger
*  Using text inputs for time types
*  Initial mapping field rework
*  Added new component for mapping fields, moved bit of logic from root component to matching selector, fixing some lint errors
*  Added tooltip for columns that cannot be deleted
*  Saving deleted values in parameter value
*  Implemented control to add/remove mapping fields
*  Syncing field list with add field dropdown when changing dependent values
*  Not showing removed fields in matching columns selector. Updating wording in matching columns selector description
*  Implementing disabled states for add/remove all fields options
*  Saving removed columns separately, updating copy
*  Implemented resource mapper values validation
*  Updated validation logic and error input styling
*  Validating resource mapper fields when new nodes are added
*  Using node field words in validation, refactoring resource mapper component
*  Implemented schema syncing and add/remove all fields
*  Implemented custom parameter actions
*  Implemented loading indicator in parameter options
* 🔨 Removing unnecessary constants and vue props
*  Handling default values properly
*  Fixing validation logic
* 👕 Fixing lint errors
*  Fixing type issues
*  Not showing fields by default if `addAllFields` is set to `false`
*  Implemented field type validation in resource mapper
*  Updated casing in copy, removed all/remove all option from bottom menu
*  Added auto mapping mode notice
*  Added support for more types in validation
*  Added support for enumerated values
*  Fixing imports after merging
*  Not showing removed fields in matching columns selector. Refactoring validation logic.
* 👕 Fixing imports
* ✔️ Updating unit tests
*  Added resource mapper schema tests
*  Removing `match` from resource mapper field definition, fixing matching columns loading
*  Fixed schema merging
*  update operation return data fix
*  review
* 🐛 Added missing import
* 💄 Updating parameter actions icon based on the ui review
* 💄 Updating word capitalisation in tooltips
* 💄 Added empty state to mapping fields list
* 💄 Removing asterisk from fields, updating tooltips for matching fields
*  Preventing matching fields from being removed by 'Remove All option'
*  Not showing hidden fields in the `Add field` dropdown
*  Added support for custom matching columns labels
*  query optimization
*  fix
*  Optimizing Postgres node enumeration logic
*  Added empty state for matching columns
*  Only fully loading fields if there is no schema fetched
*  Hiding mapping fields if there is no matching columns available in the schema
* ✔️ Fixing minor issues
*  Implemented runtime type validation
* 🔨 Refactoring validation logic
*  Implemented required check, added more custom messages
*  Skipping boolean type in required check
* Type check improvements
*  Only reloading fields if dependent values actually change
*  Adding item index to validation error title
*  Updating Postgres fetching logic, using resource mapper mode to determine if a field can be deleted
*  Resetting field values when adding them via the addAll option
*  Using minor version (2.2) for new Postgres node
*  Implemented proper date validation and type casting
* 👕 Consolidating typing
*  Added unit tests for type validations
* 👌 Addressing front-end review comments
*  More refactoring to address review changes
*  Updating leftover props
*  Added fallback for ISO dates with invalid timezones
* Added timestamp to datetime test cases
*  Reseting matching columns if operation changes
*  Not forcing auto-increment fields to be filled in in Postgres node. Handling null values
* 💄 Added a custom message for invalid dates
*  Better handling of JSON values
*  Updating codemirror readonly stauts based on component property, handling objects in json validation
* Deleting leftover console.log
*  Better time validation
*  Fixing build error after merging
* 👕 Fixing lint error
*  Updating node configuration values
*  Handling postgres arrays better
*  Handling SQL array syntax
*  Updating time validation rules to include timezone
*  Sending expressions that resolve to `null` or `undefined` by the resource mapper to delete cell content in Google Sheets
*  Allowing removed fields to be selected for match
*  Updated the query for fetching unique columns and primary keys
*  Optimizing the unique query
*  Setting timezone to all parsed dates
*  Addressing PR review feedback
*  Configuring Sheets node for production, minor vue component update
* New cases added to the TypeValidation test.
*  Tweaking validation rules for arrays/objects and updating test cases
---------
Co-authored-by: Michael Kret <michael.k@radency.com>
Co-authored-by: Giulio Andreini <g.andreini@gmail.com>
2023-05-31 11:56:09 +02:00

411 lines
11 KiB
TypeScript

import type { IDataObject, INode, INodeExecutionData, INodePropertyOptions } from 'n8n-workflow';
import { NodeOperationError } from 'n8n-workflow';
import type {
ColumnInfo,
ConstructExecutionMetaData,
EnumInfo,
PgpClient,
PgpDatabase,
QueryMode,
QueryValues,
QueryWithValues,
SortRule,
WhereClause,
} from './interfaces';
const ENUM_VALUES_REGEX = /\{(.+?)\}/gm;
export function wrapData(data: IDataObject | IDataObject[]): INodeExecutionData[] {
if (!Array.isArray(data)) {
return [{ json: data }];
}
return data.map((item) => ({
json: item,
}));
}
export function prepareErrorItem(
items: INodeExecutionData[],
error: IDataObject | NodeOperationError | Error,
index: number,
) {
return {
json: { message: error.message, item: { ...items[index].json }, error: { ...error } },
pairedItem: { item: index },
} as INodeExecutionData;
}
export function parsePostgresError(
node: INode,
error: any,
queries: QueryWithValues[],
itemIndex?: number,
) {
if (error.message.includes('syntax error at or near') && queries.length) {
try {
const snippet = error.message.match(/syntax error at or near "(.*)"/)[1] as string;
const failedQureryIndex = queries.findIndex((query) => query.query.includes(snippet));
if (failedQureryIndex !== -1) {
if (!itemIndex) {
itemIndex = failedQureryIndex;
}
const failedQuery = queries[failedQureryIndex].query;
const lines = failedQuery.split('\n');
const lineIndex = lines.findIndex((line) => line.includes(snippet));
const errorMessage = `Syntax error at line ${lineIndex + 1} near "${snippet}"`;
error.message = errorMessage;
}
} catch {}
}
let message = error.message;
const errorDescription = error.description ? error.description : error.detail || error.hint;
let description = errorDescription;
if (!description && queries[itemIndex || 0]?.query) {
description = `Failed query: ${queries[itemIndex || 0].query}`;
}
if (error.message.includes('ECONNREFUSED')) {
message = 'Connection refused';
try {
description = error.message.split('ECONNREFUSED ')[1].trim();
} catch (e) {}
}
if (error.message.includes('ENOTFOUND')) {
message = 'Host not found';
try {
description = error.message.split('ENOTFOUND ')[1].trim();
} catch (e) {}
}
if (error.message.includes('ETIMEDOUT')) {
message = 'Connection timed out';
try {
description = error.message.split('ETIMEDOUT ')[1].trim();
} catch (e) {}
}
return new NodeOperationError(node, error as Error, {
message,
description,
itemIndex,
});
}
export function addWhereClauses(
node: INode,
itemIndex: number,
query: string,
clauses: WhereClause[],
replacements: QueryValues,
combineConditions: string,
): [string, QueryValues] {
if (clauses.length === 0) return [query, replacements];
let combineWith = 'AND';
if (combineConditions === 'OR') {
combineWith = 'OR';
}
let replacementIndex = replacements.length + 1;
let whereQuery = ' WHERE';
const values: QueryValues = [];
clauses.forEach((clause, index) => {
if (clause.condition === 'equal') {
clause.condition = '=';
}
if (['>', '<', '>=', '<='].includes(clause.condition)) {
const value = Number(clause.value);
if (Number.isNaN(value)) {
throw new NodeOperationError(
node,
`Operator in entry ${index + 1} of 'Select Rows' works with numbers, but value ${
clause.value
} is not a number`,
{
itemIndex,
},
);
}
clause.value = value;
}
const columnReplacement = `$${replacementIndex}:name`;
values.push(clause.column);
replacementIndex = replacementIndex + 1;
let valueReplacement = '';
if (clause.condition !== 'IS NULL') {
valueReplacement = ` $${replacementIndex}`;
values.push(clause.value);
replacementIndex = replacementIndex + 1;
}
const operator = index === clauses.length - 1 ? '' : ` ${combineWith}`;
whereQuery += ` ${columnReplacement} ${clause.condition}${valueReplacement}${operator}`;
});
return [`${query}${whereQuery}`, replacements.concat(...values)];
}
export function addSortRules(
query: string,
rules: SortRule[],
replacements: QueryValues,
): [string, QueryValues] {
if (rules.length === 0) return [query, replacements];
let replacementIndex = replacements.length + 1;
let orderByQuery = ' ORDER BY';
const values: string[] = [];
rules.forEach((rule, index) => {
const columnReplacement = `$${replacementIndex}:name`;
values.push(rule.column);
replacementIndex = replacementIndex + 1;
const endWith = index === rules.length - 1 ? '' : ',';
const sortDirection = rule.direction === 'DESC' ? 'DESC' : 'ASC';
orderByQuery += ` ${columnReplacement} ${sortDirection}${endWith}`;
});
return [`${query}${orderByQuery}`, replacements.concat(...values)];
}
export function addReturning(
query: string,
outputColumns: string[],
replacements: QueryValues,
): [string, QueryValues] {
if (outputColumns.includes('*')) return [`${query} RETURNING *`, replacements];
const replacementIndex = replacements.length + 1;
return [`${query} RETURNING $${replacementIndex}:name`, [...replacements, outputColumns]];
}
export const configureQueryRunner =
(
node: INode,
constructExecutionMetaData: ConstructExecutionMetaData,
continueOnFail: boolean,
pgp: PgpClient,
db: PgpDatabase,
) =>
async (queries: QueryWithValues[], items: INodeExecutionData[], options: IDataObject) => {
let returnData: INodeExecutionData[] = [];
const queryBatching = (options.queryBatching as QueryMode) || 'single';
if (queryBatching === 'single') {
try {
returnData = (await db.multi(pgp.helpers.concat(queries)))
.map((result, i) => {
return constructExecutionMetaData(wrapData(result as IDataObject[]), {
itemData: { item: i },
});
})
.flat();
returnData = returnData.length ? returnData : [{ json: { success: true } }];
} catch (err) {
const error = parsePostgresError(node, err, queries);
if (!continueOnFail) throw error;
return [
{
json: {
message: error.message,
error: { ...error },
},
},
];
}
}
if (queryBatching === 'transaction') {
returnData = await db.tx(async (transaction) => {
const result: INodeExecutionData[] = [];
for (let i = 0; i < queries.length; i++) {
try {
const transactionResult: IDataObject[] = await transaction.any(
queries[i].query,
queries[i].values,
);
const executionData = constructExecutionMetaData(
wrapData(transactionResult.length ? transactionResult : [{ success: true }]),
{ itemData: { item: i } },
);
result.push(...executionData);
} catch (err) {
const error = parsePostgresError(node, err, queries, i);
if (!continueOnFail) throw error;
result.push(prepareErrorItem(items, error, i));
return result;
}
}
return result;
});
}
if (queryBatching === 'independently') {
returnData = await db.task(async (t) => {
const result: INodeExecutionData[] = [];
for (let i = 0; i < queries.length; i++) {
try {
const transactionResult: IDataObject[] = await t.any(
queries[i].query,
queries[i].values,
);
const executionData = constructExecutionMetaData(
wrapData(transactionResult.length ? transactionResult : [{ success: true }]),
{ itemData: { item: i } },
);
result.push(...executionData);
} catch (err) {
const error = parsePostgresError(node, err, queries, i);
if (!continueOnFail) throw error;
result.push(prepareErrorItem(items, error, i));
}
}
return result;
});
}
return returnData;
};
export function replaceEmptyStringsByNulls(
items: INodeExecutionData[],
replace?: boolean,
): INodeExecutionData[] {
if (!replace) return items;
const returnData: INodeExecutionData[] = items.map((item) => {
const newItem = { ...item };
const keys = Object.keys(newItem.json);
for (const key of keys) {
if (newItem.json[key] === '') {
newItem.json[key] = null;
}
}
return newItem;
});
return returnData;
}
export function prepareItem(values: IDataObject[]) {
const item = values.reduce((acc, { column, value }) => {
acc[column as string] = value;
return acc;
}, {} as IDataObject);
return item;
}
export async function getTableSchema(
db: PgpDatabase,
schema: string,
table: string,
): Promise<ColumnInfo[]> {
const columns = await db.any(
'SELECT column_name, data_type, is_nullable, udt_name, column_default FROM information_schema.columns WHERE table_schema = $1 AND table_name = $2',
[schema, table],
);
return columns;
}
export async function uniqueColumns(db: PgpDatabase, table: string) {
// Using the modified query from https://wiki.postgresql.org/wiki/Retrieve_primary_key_columns
const unique = await db.any(
`
SELECT DISTINCT a.attname
FROM pg_index i JOIN pg_attribute a ON a.attrelid = i.indrelid AND a.attnum = ANY(i.indkey)
WHERE i.indrelid = quote_ident($1)::regclass
AND (i.indisprimary OR i.indisunique);
`,
[table],
);
return unique as IDataObject[];
}
export async function getEnums(db: PgpDatabase): Promise<EnumInfo[]> {
const enumsData = await db.any(
'SELECT pg_type.typname, pg_enum.enumlabel FROM pg_type JOIN pg_enum ON pg_enum.enumtypid = pg_type.oid;',
);
return enumsData as EnumInfo[];
}
export function getEnumValues(enumInfo: EnumInfo[], enumName: string): INodePropertyOptions[] {
return enumInfo.reduce((acc, current) => {
if (current.typname === enumName) {
acc.push({ name: current.enumlabel, value: current.enumlabel });
}
return acc;
}, [] as INodePropertyOptions[]);
}
export async function doesRowExist(
db: PgpDatabase,
schema: string,
table: string,
values: string[],
): Promise<boolean> {
const where = [];
for (let i = 3; i < 3 + values.length; i += 2) {
where.push(`$${i}:name=$${i + 1}`);
}
const exists = await db.any(
`SELECT EXISTS(SELECT 1 FROM $1:name.$2:name WHERE ${where.join(' AND ')})`,
[schema, table, ...values],
);
return exists[0].exists;
}
export function checkItemAgainstSchema(
node: INode,
item: IDataObject,
columnsInfo: ColumnInfo[],
index: number,
) {
if (columnsInfo.length === 0) return item;
const schema = columnsInfo.reduce((acc, { column_name, data_type, is_nullable }) => {
acc[column_name] = { type: data_type.toUpperCase(), nullable: is_nullable === 'YES' };
return acc;
}, {} as IDataObject);
for (const key of Object.keys(item)) {
if (schema[key] === undefined) {
throw new NodeOperationError(node, `Column '${key}' does not exist in selected table`, {
itemIndex: index,
});
}
if (item[key] === null && !(schema[key] as IDataObject)?.nullable) {
throw new NodeOperationError(node, `Column '${key}' is not nullable`, {
itemIndex: index,
});
}
}
return item;
}