mirror of
https://github.com/n8n-io/n8n.git
synced 2025-01-11 04:47:29 -08:00
refactor(core): Make Logger a service (no-changelog) (#7494)
This commit is contained in:
parent
db4e61ba24
commit
05586a900d
|
@ -4,7 +4,6 @@ import type { Server } from 'http';
|
|||
import express from 'express';
|
||||
import compression from 'compression';
|
||||
import isbot from 'isbot';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
import config from '@/config';
|
||||
import { N8N_VERSION, inDevelopment, inTest } from '@/constants';
|
||||
|
@ -19,8 +18,11 @@ import { TestWebhooks } from '@/TestWebhooks';
|
|||
import { WaitingWebhooks } from '@/WaitingWebhooks';
|
||||
import { webhookRequestHandler } from '@/WebhookHelpers';
|
||||
import { generateHostInstanceId } from './databases/utils/generators';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export abstract class AbstractServer {
|
||||
protected logger: Logger;
|
||||
|
||||
protected server: Server;
|
||||
|
||||
readonly app: express.Application;
|
||||
|
@ -67,6 +69,8 @@ export abstract class AbstractServer {
|
|||
this.endpointWebhookWaiting = config.getEnv('endpoints.webhookWaiting');
|
||||
|
||||
this.uniqueInstanceId = generateHostInstanceId(instanceType);
|
||||
|
||||
this.logger = Container.get(Logger);
|
||||
}
|
||||
|
||||
async configure(): Promise<void> {
|
||||
|
@ -194,7 +198,7 @@ export abstract class AbstractServer {
|
|||
this.app.use((req, res, next) => {
|
||||
const userAgent = req.headers['user-agent'];
|
||||
if (userAgent && checkIfBot(userAgent)) {
|
||||
Logger.info(`Blocked ${req.method} ${req.url} for "${userAgent}"`);
|
||||
this.logger.info(`Blocked ${req.method} ${req.url} for "${userAgent}"`);
|
||||
res.status(204).end();
|
||||
} else next();
|
||||
});
|
||||
|
|
|
@ -7,7 +7,7 @@ import type {
|
|||
IRun,
|
||||
ExecutionStatus,
|
||||
} from 'n8n-workflow';
|
||||
import { createDeferredPromise, LoggerProxy } from 'n8n-workflow';
|
||||
import { createDeferredPromise } from 'n8n-workflow';
|
||||
|
||||
import type { ChildProcess } from 'child_process';
|
||||
import type PCancelable from 'p-cancelable';
|
||||
|
@ -20,6 +20,7 @@ import type {
|
|||
} from '@/Interfaces';
|
||||
import { isWorkflowIdValid } from '@/utils';
|
||||
import { ExecutionRepository } from '@db/repositories';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class ActiveExecutions {
|
||||
|
@ -27,6 +28,8 @@ export class ActiveExecutions {
|
|||
[index: string]: IExecutingWorkflowData;
|
||||
} = {};
|
||||
|
||||
constructor(private readonly logger: Logger) {}
|
||||
|
||||
/**
|
||||
* Add a new active execution
|
||||
*/
|
||||
|
@ -225,7 +228,7 @@ export class ActiveExecutions {
|
|||
|
||||
async setStatus(executionId: string, status: ExecutionStatus): Promise<void> {
|
||||
if (this.activeExecutions[executionId] === undefined) {
|
||||
LoggerProxy.debug(
|
||||
this.logger.debug(
|
||||
`There is no active execution with id "${executionId}", can't update status to ${status}.`,
|
||||
);
|
||||
return;
|
||||
|
|
|
@ -27,7 +27,6 @@ import {
|
|||
NodeHelpers,
|
||||
Workflow,
|
||||
WorkflowActivationError,
|
||||
LoggerProxy as Logger,
|
||||
ErrorReporterProxy as ErrorReporter,
|
||||
WebhookPathAlreadyTakenError,
|
||||
} from 'n8n-workflow';
|
||||
|
@ -66,6 +65,7 @@ import { WorkflowsService } from './workflows/workflows.services';
|
|||
import { webhookNotFoundErrorMessage } from './utils';
|
||||
import { In } from 'typeorm';
|
||||
import { WebhookService } from './services/webhook.service';
|
||||
import { Logger } from './Logger';
|
||||
|
||||
const WEBHOOK_PROD_UNREGISTERED_HINT =
|
||||
"The workflow must be active for a production URL to run successfully. You can activate the workflow using the toggle in the top-right of the editor. Note that unlike test URL calls, production URL calls aren't shown on the canvas (only in the executions list)";
|
||||
|
@ -83,10 +83,11 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
} = {};
|
||||
|
||||
constructor(
|
||||
private activeExecutions: ActiveExecutions,
|
||||
private externalHooks: ExternalHooks,
|
||||
private nodeTypes: NodeTypes,
|
||||
private webhookService: WebhookService,
|
||||
private readonly logger: Logger,
|
||||
private readonly activeExecutions: ActiveExecutions,
|
||||
private readonly externalHooks: ExternalHooks,
|
||||
private readonly nodeTypes: NodeTypes,
|
||||
private readonly webhookService: WebhookService,
|
||||
) {}
|
||||
|
||||
async init() {
|
||||
|
@ -113,31 +114,31 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
}
|
||||
|
||||
if (workflowsData.length !== 0) {
|
||||
Logger.info(' ================================');
|
||||
Logger.info(' Start Active Workflows:');
|
||||
Logger.info(' ================================');
|
||||
this.logger.info(' ================================');
|
||||
this.logger.info(' Start Active Workflows:');
|
||||
this.logger.info(' ================================');
|
||||
|
||||
for (const workflowData of workflowsData) {
|
||||
Logger.info(` - ${workflowData.name} (ID: ${workflowData.id})`);
|
||||
Logger.debug(`Initializing active workflow "${workflowData.name}" (startup)`, {
|
||||
this.logger.info(` - ${workflowData.name} (ID: ${workflowData.id})`);
|
||||
this.logger.debug(`Initializing active workflow "${workflowData.name}" (startup)`, {
|
||||
workflowName: workflowData.name,
|
||||
workflowId: workflowData.id,
|
||||
});
|
||||
try {
|
||||
await this.add(workflowData.id, 'init', workflowData);
|
||||
Logger.verbose(`Successfully started workflow "${workflowData.name}"`, {
|
||||
this.logger.verbose(`Successfully started workflow "${workflowData.name}"`, {
|
||||
workflowName: workflowData.name,
|
||||
workflowId: workflowData.id,
|
||||
});
|
||||
Logger.info(' => Started');
|
||||
this.logger.info(' => Started');
|
||||
} catch (error) {
|
||||
ErrorReporter.error(error);
|
||||
Logger.info(
|
||||
this.logger.info(
|
||||
' => ERROR: Workflow could not be activated on first try, keep on trying if not an auth issue',
|
||||
);
|
||||
|
||||
Logger.info(` ${error.message}`);
|
||||
Logger.error(
|
||||
this.logger.info(` ${error.message}`);
|
||||
this.logger.error(
|
||||
`Issue on initial workflow activation try "${workflowData.name}" (startup)`,
|
||||
{
|
||||
workflowName: workflowData.name,
|
||||
|
@ -153,7 +154,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
}
|
||||
}
|
||||
}
|
||||
Logger.verbose('Finished initializing active workflows (startup)');
|
||||
this.logger.verbose('Finished initializing active workflows (startup)');
|
||||
}
|
||||
|
||||
await this.externalHooks.run('activeWorkflows.initialized', []);
|
||||
|
@ -165,7 +166,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
*/
|
||||
async removeAll(): Promise<void> {
|
||||
let activeWorkflowIds: string[] = [];
|
||||
Logger.verbose('Call to remove all active workflows received (removeAll)');
|
||||
this.logger.verbose('Call to remove all active workflows received (removeAll)');
|
||||
|
||||
activeWorkflowIds.push(...this.activeWorkflows.allActiveWorkflows());
|
||||
|
||||
|
@ -192,7 +193,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
const httpMethod = request.method;
|
||||
let path = request.params.path;
|
||||
|
||||
Logger.debug(`Received webhook "${httpMethod}" for path "${path}"`);
|
||||
this.logger.debug(`Received webhook "${httpMethod}" for path "${path}"`);
|
||||
|
||||
// Reset request parameters
|
||||
request.params = {} as WebhookRequest['params'];
|
||||
|
@ -421,7 +422,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
await this.removeWorkflowWebhooks(workflow.id);
|
||||
} catch (error1) {
|
||||
ErrorReporter.error(error1);
|
||||
Logger.error(
|
||||
this.logger.error(
|
||||
`Could not remove webhooks of workflow "${workflow.id}" because of error: "${error1.message}"`,
|
||||
);
|
||||
}
|
||||
|
@ -558,7 +559,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>,
|
||||
donePromise?: IDeferredPromise<IRun | undefined>,
|
||||
): void => {
|
||||
Logger.debug(`Received event to trigger execution for workflow "${workflow.name}"`);
|
||||
this.logger.debug(`Received event to trigger execution for workflow "${workflow.name}"`);
|
||||
void WorkflowsService.saveStaticData(workflow);
|
||||
const executePromise = this.runWorkflow(
|
||||
workflowData,
|
||||
|
@ -577,7 +578,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
.catch(donePromise.reject);
|
||||
});
|
||||
} else {
|
||||
void executePromise.catch(Logger.error);
|
||||
void executePromise.catch((error: Error) => this.logger.error(error.message, { error }));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -614,7 +615,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>,
|
||||
donePromise?: IDeferredPromise<IRun | undefined>,
|
||||
): void => {
|
||||
Logger.debug(`Received trigger for workflow "${workflow.name}"`);
|
||||
this.logger.debug(`Received trigger for workflow "${workflow.name}"`);
|
||||
void WorkflowsService.saveStaticData(workflow);
|
||||
|
||||
const executePromise = this.runWorkflow(
|
||||
|
@ -634,11 +635,11 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
.catch(donePromise.reject);
|
||||
});
|
||||
} else {
|
||||
executePromise.catch(Logger.error);
|
||||
executePromise.catch((error: Error) => this.logger.error(error.message, { error }));
|
||||
}
|
||||
};
|
||||
returnFunctions.emitError = (error: Error): void => {
|
||||
Logger.info(
|
||||
this.logger.info(
|
||||
`The trigger node "${node.name}" of workflow "${workflowData.name}" failed with the error: "${error.message}". Will try to reactivate.`,
|
||||
{
|
||||
nodeName: node.name,
|
||||
|
@ -728,7 +729,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
|
||||
const canBeActivated = workflowInstance.checkIfWorkflowCanBeActivated(STARTING_NODES);
|
||||
if (!canBeActivated) {
|
||||
Logger.error(`Unable to activate workflow "${workflowData.name}"`);
|
||||
this.logger.error(`Unable to activate workflow "${workflowData.name}"`);
|
||||
throw new Error(
|
||||
'The workflow can not be activated because it does not contain any nodes which could start the workflow. Only workflows which have trigger or webhook nodes can be activated.',
|
||||
);
|
||||
|
@ -771,7 +772,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
getTriggerFunctions,
|
||||
getPollFunctions,
|
||||
);
|
||||
Logger.verbose(`Successfully activated workflow "${workflowData.name}"`, {
|
||||
this.logger.verbose(`Successfully activated workflow "${workflowData.name}"`, {
|
||||
workflowId,
|
||||
workflowName: workflowData.name,
|
||||
});
|
||||
|
@ -828,7 +829,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
const workflowName = workflowData.name;
|
||||
|
||||
const retryFunction = async () => {
|
||||
Logger.info(`Try to activate workflow "${workflowName}" (${workflowId})`, {
|
||||
this.logger.info(`Try to activate workflow "${workflowName}" (${workflowId})`, {
|
||||
workflowId,
|
||||
workflowName,
|
||||
});
|
||||
|
@ -841,7 +842,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
lastTimeout = Math.min(lastTimeout * 2, WORKFLOW_REACTIVATE_MAX_TIMEOUT);
|
||||
}
|
||||
|
||||
Logger.info(
|
||||
this.logger.info(
|
||||
` -> Activation of workflow "${workflowName}" (${workflowId}) did fail with error: "${
|
||||
error.message as string
|
||||
}" | retry in ${Math.floor(lastTimeout / 1000)} seconds`,
|
||||
|
@ -855,10 +856,13 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
this.queuedWorkflowActivations[workflowId].timeout = setTimeout(retryFunction, lastTimeout);
|
||||
return;
|
||||
}
|
||||
Logger.info(` -> Activation of workflow "${workflowName}" (${workflowId}) was successful!`, {
|
||||
workflowId,
|
||||
workflowName,
|
||||
});
|
||||
this.logger.info(
|
||||
` -> Activation of workflow "${workflowName}" (${workflowId}) was successful!`,
|
||||
{
|
||||
workflowId,
|
||||
workflowName,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
// Just to be sure that there is not chance that for any reason
|
||||
|
@ -904,7 +908,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
await this.removeWorkflowWebhooks(workflowId);
|
||||
} catch (error) {
|
||||
ErrorReporter.error(error);
|
||||
Logger.error(
|
||||
this.logger.error(
|
||||
`Could not remove webhooks of workflow "${workflowId}" because of error: "${error.message}"`,
|
||||
);
|
||||
}
|
||||
|
@ -923,7 +927,7 @@ export class ActiveWorkflowRunner implements IWebhookManager {
|
|||
if (this.activeWorkflows.isActive(workflowId)) {
|
||||
const removalSuccess = await this.activeWorkflows.remove(workflowId);
|
||||
if (removalSuccess) {
|
||||
Logger.verbose(`Successfully deactivated workflow "${workflowId}"`, { workflowId });
|
||||
this.logger.verbose(`Successfully deactivated workflow "${workflowId}"`, { workflowId });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,8 +3,9 @@ import { mkdir, utimes, open, rm } from 'fs/promises';
|
|||
import { join, dirname } from 'path';
|
||||
import { Container } from 'typedi';
|
||||
import { InstanceSettings } from 'n8n-core';
|
||||
import { LoggerProxy, sleep } from 'n8n-workflow';
|
||||
import { sleep } from 'n8n-workflow';
|
||||
import { inProduction } from '@/constants';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export const touchFile = async (filePath: string): Promise<void> => {
|
||||
await mkdir(dirname(filePath), { recursive: true });
|
||||
|
@ -25,7 +26,7 @@ export const init = async () => {
|
|||
|
||||
if (existsSync(journalFile)) {
|
||||
// Crash detected
|
||||
LoggerProxy.error('Last session crashed');
|
||||
Container.get(Logger).error('Last session crashed');
|
||||
// add a 10 seconds pause to slow down crash-looping
|
||||
await sleep(10_000);
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
import { Service } from 'typedi';
|
||||
import { Credentials, NodeExecuteFunctions } from 'n8n-core';
|
||||
import get from 'lodash/get';
|
||||
|
||||
|
@ -38,7 +39,6 @@ import {
|
|||
NodeHelpers,
|
||||
RoutingNode,
|
||||
Workflow,
|
||||
LoggerProxy as Logger,
|
||||
ErrorReporterProxy as ErrorReporter,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -52,8 +52,8 @@ import { CredentialTypes } from '@/CredentialTypes';
|
|||
import { CredentialsOverwrites } from '@/CredentialsOverwrites';
|
||||
import { whereClause } from './UserManagement/UserManagementHelper';
|
||||
import { RESPONSE_ERROR_MESSAGES } from './constants';
|
||||
import { Service } from 'typedi';
|
||||
import { isObjectLiteral } from './utils';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
const { OAUTH2_CREDENTIAL_TEST_SUCCEEDED, OAUTH2_CREDENTIAL_TEST_FAILED } = RESPONSE_ERROR_MESSAGES;
|
||||
|
||||
|
@ -89,6 +89,7 @@ const mockNodeTypes: INodeTypes = {
|
|||
@Service()
|
||||
export class CredentialsHelper extends ICredentialsHelper {
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly credentialTypes: CredentialTypes,
|
||||
private readonly nodeTypes: NodeTypes,
|
||||
private readonly credentialsOverwrites: CredentialsOverwrites,
|
||||
|
@ -601,7 +602,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
user.isOwner,
|
||||
);
|
||||
} catch (error) {
|
||||
Logger.debug('Credential test failed', error);
|
||||
this.logger.debug('Credential test failed', error);
|
||||
return {
|
||||
status: 'Error',
|
||||
message: error.message.toString(),
|
||||
|
@ -757,7 +758,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
message: error.cause.code,
|
||||
};
|
||||
}
|
||||
Logger.debug('Credential test failed', error);
|
||||
this.logger.debug('Credential test failed', error);
|
||||
return {
|
||||
status: 'Error',
|
||||
message: error.message.toString(),
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import { Service } from 'typedi';
|
||||
import type { ICredentialDataDecryptedObject } from 'n8n-workflow';
|
||||
import { deepCopy, LoggerProxy as Logger, jsonParse } from 'n8n-workflow';
|
||||
import { deepCopy, jsonParse } from 'n8n-workflow';
|
||||
import config from '@/config';
|
||||
import type { ICredentialsOverwrite } from '@/Interfaces';
|
||||
import { CredentialTypes } from '@/CredentialTypes';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class CredentialsOverwrites {
|
||||
|
@ -11,7 +12,10 @@ export class CredentialsOverwrites {
|
|||
|
||||
private resolvedTypes: string[] = [];
|
||||
|
||||
constructor(private credentialTypes: CredentialTypes) {
|
||||
constructor(
|
||||
private readonly credentialTypes: CredentialTypes,
|
||||
private readonly logger: Logger,
|
||||
) {
|
||||
const data = config.getEnv('credentials.overwrite.data');
|
||||
const overwriteData = jsonParse<ICredentialsOverwrite>(data, {
|
||||
errorMessage: 'The credentials-overwrite is not valid JSON.',
|
||||
|
@ -61,7 +65,7 @@ export class CredentialsOverwrites {
|
|||
}
|
||||
|
||||
if (!this.credentialTypes.recognizes(type)) {
|
||||
Logger.warn(`Unknown credential type ${type} in Credential overwrites`);
|
||||
this.logger.warn(`Unknown credential type ${type} in Credential overwrites`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ import type {
|
|||
import { Cipher } from 'n8n-core';
|
||||
import Container, { Service } from 'typedi';
|
||||
|
||||
import { getLogger } from '@/Logger';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
import { jsonParse, type IDataObject } from 'n8n-workflow';
|
||||
import {
|
||||
|
@ -21,8 +21,6 @@ import { InternalHooks } from '@/InternalHooks';
|
|||
import { ExternalSecretsProviders } from './ExternalSecretsProviders.ee';
|
||||
import { OrchestrationMainService } from '@/services/orchestration/main/orchestration.main.service';
|
||||
|
||||
const logger = getLogger();
|
||||
|
||||
@Service()
|
||||
export class ExternalSecretsManager {
|
||||
private providers: Record<string, SecretsProvider> = {};
|
||||
|
@ -38,10 +36,11 @@ export class ExternalSecretsManager {
|
|||
initRetryTimeouts: Record<string, NodeJS.Timer> = {};
|
||||
|
||||
constructor(
|
||||
private settingsRepo: SettingsRepository,
|
||||
private license: License,
|
||||
private secretsProviders: ExternalSecretsProviders,
|
||||
private cipher: Cipher,
|
||||
private readonly logger: Logger,
|
||||
private readonly settingsRepo: SettingsRepository,
|
||||
private readonly license: License,
|
||||
private readonly secretsProviders: ExternalSecretsProviders,
|
||||
private readonly cipher: Cipher,
|
||||
) {}
|
||||
|
||||
async init(): Promise<void> {
|
||||
|
@ -72,7 +71,7 @@ export class ExternalSecretsManager {
|
|||
}
|
||||
|
||||
async reloadAllProviders(backoff?: number) {
|
||||
logger.debug('Reloading all external secrets providers');
|
||||
this.logger.debug('Reloading all external secrets providers');
|
||||
const providers = this.getProviderNames();
|
||||
if (!providers) {
|
||||
return;
|
||||
|
@ -140,7 +139,7 @@ export class ExternalSecretsManager {
|
|||
try {
|
||||
await provider.init(providerSettings);
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
this.logger.error(
|
||||
`Error initializing secrets provider ${provider.displayName} (${provider.name}).`,
|
||||
);
|
||||
this.retryInitWithBackoff(name, currentBackoff);
|
||||
|
@ -155,7 +154,7 @@ export class ExternalSecretsManager {
|
|||
try {
|
||||
await provider.disconnect();
|
||||
} catch {}
|
||||
logger.error(
|
||||
this.logger.error(
|
||||
`Error initializing secrets provider ${provider.displayName} (${provider.name}).`,
|
||||
);
|
||||
this.retryInitWithBackoff(name, currentBackoff);
|
||||
|
@ -190,7 +189,7 @@ export class ExternalSecretsManager {
|
|||
await p.update();
|
||||
}
|
||||
} catch {
|
||||
logger.error(`Error updating secrets provider ${p.displayName} (${p.name}).`);
|
||||
this.logger.error(`Error updating secrets provider ${p.displayName} (${p.name}).`);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
|
|
@ -3,10 +3,9 @@ import { SecretsProvider } from '@/Interfaces';
|
|||
import type { IDataObject, INodeProperties } from 'n8n-workflow';
|
||||
import type { AxiosInstance, AxiosResponse } from 'axios';
|
||||
import axios from 'axios';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { Logger } from '@/Logger';
|
||||
import { EXTERNAL_SECRETS_NAME_REGEX } from '../constants';
|
||||
|
||||
const logger = getLogger();
|
||||
import { Container } from 'typedi';
|
||||
|
||||
type VaultAuthMethod = 'token' | 'usernameAndPassword' | 'appRole';
|
||||
|
||||
|
@ -239,6 +238,10 @@ export class VaultProvider extends SecretsProvider {
|
|||
|
||||
private refreshAbort = new AbortController();
|
||||
|
||||
constructor(readonly logger = Container.get(Logger)) {
|
||||
super();
|
||||
}
|
||||
|
||||
async init(settings: SecretsProviderSettings): Promise<void> {
|
||||
this.settings = settings.settings as unknown as VaultSettings;
|
||||
|
||||
|
@ -274,7 +277,7 @@ export class VaultProvider extends SecretsProvider {
|
|||
);
|
||||
} catch {
|
||||
this.state = 'error';
|
||||
logger.error('Failed to connect to Vault using Username and Password credentials.');
|
||||
this.logger.error('Failed to connect to Vault using Username and Password credentials.');
|
||||
return;
|
||||
}
|
||||
} else if (this.settings.authMethod === 'appRole') {
|
||||
|
@ -282,7 +285,7 @@ export class VaultProvider extends SecretsProvider {
|
|||
this.#currentToken = await this.authAppRole(this.settings.roleId, this.settings.secretId);
|
||||
} catch {
|
||||
this.state = 'error';
|
||||
logger.error('Failed to connect to Vault using AppRole credentials.');
|
||||
this.logger.error('Failed to connect to Vault using AppRole credentials.');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -297,13 +300,13 @@ export class VaultProvider extends SecretsProvider {
|
|||
}
|
||||
} catch (e) {
|
||||
this.state = 'error';
|
||||
logger.error('Failed credentials test on Vault connect.');
|
||||
this.logger.error('Failed credentials test on Vault connect.');
|
||||
}
|
||||
|
||||
try {
|
||||
await this.update();
|
||||
} catch {
|
||||
logger.warn('Failed to update Vault secrets');
|
||||
this.logger.warn('Failed to update Vault secrets');
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -343,7 +346,9 @@ export class VaultProvider extends SecretsProvider {
|
|||
[this.#tokenInfo] = await this.getTokenInfo();
|
||||
|
||||
if (!this.#tokenInfo) {
|
||||
logger.error('Failed to fetch token info during renewal. Cancelling all future renewals.');
|
||||
this.logger.error(
|
||||
'Failed to fetch token info during renewal. Cancelling all future renewals.',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -353,7 +358,7 @@ export class VaultProvider extends SecretsProvider {
|
|||
|
||||
this.setupTokenRefresh();
|
||||
} catch {
|
||||
logger.error('Failed to renew Vault token. Attempting to reconnect.');
|
||||
this.logger.error('Failed to renew Vault token. Attempting to reconnect.');
|
||||
void this.connect();
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import type { Entry as LdapUser } from 'ldapts';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
import { QueryFailedError } from 'typeorm/error/QueryFailedError';
|
||||
import type { LdapService } from './LdapService.ee';
|
||||
import type { LdapConfig } from './types';
|
||||
|
@ -17,6 +16,7 @@ import type { Role } from '@db/entities/Role';
|
|||
import type { RunningMode, SyncStatus } from '@db/entities/AuthProviderSyncHistory';
|
||||
import { Container } from 'typedi';
|
||||
import { InternalHooks } from '@/InternalHooks';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export class LdapSync {
|
||||
private intervalId: NodeJS.Timeout | undefined = undefined;
|
||||
|
@ -25,6 +25,12 @@ export class LdapSync {
|
|||
|
||||
private _ldapService: LdapService;
|
||||
|
||||
private readonly logger: Logger;
|
||||
|
||||
constructor() {
|
||||
this.logger = Container.get(Logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the LDAP configuration
|
||||
*/
|
||||
|
@ -71,7 +77,7 @@ export class LdapSync {
|
|||
* else the users are not modified
|
||||
*/
|
||||
async run(mode: RunningMode): Promise<void> {
|
||||
Logger.debug(`LDAP - Starting a synchronization run in ${mode} mode`);
|
||||
this.logger.debug(`LDAP - Starting a synchronization run in ${mode} mode`);
|
||||
|
||||
let adUsers: LdapUser[] = [];
|
||||
|
||||
|
@ -80,14 +86,14 @@ export class LdapSync {
|
|||
createFilter(`(${this._config.loginIdAttribute}=*)`, this._config.userFilter),
|
||||
);
|
||||
|
||||
Logger.debug('LDAP - Users return by the query', {
|
||||
this.logger.debug('LDAP - Users return by the query', {
|
||||
users: adUsers,
|
||||
});
|
||||
|
||||
resolveBinaryAttributes(adUsers);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
Logger.error(`LDAP - ${e.message}`);
|
||||
this.logger.error(`LDAP - ${e.message}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
@ -104,7 +110,7 @@ export class LdapSync {
|
|||
role,
|
||||
);
|
||||
|
||||
Logger.debug('LDAP - Users processed', {
|
||||
this.logger.debug('LDAP - Users processed', {
|
||||
created: usersToCreate.length,
|
||||
updated: usersToUpdate.length,
|
||||
disabled: usersToDisable.length,
|
||||
|
@ -144,7 +150,7 @@ export class LdapSync {
|
|||
error: errorMessage,
|
||||
});
|
||||
|
||||
Logger.debug('LDAP - Synchronization finished successfully');
|
||||
this.logger.debug('LDAP - Synchronization finished successfully');
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,7 +20,7 @@ import {
|
|||
LDAP_LOGIN_LABEL,
|
||||
} from './constants';
|
||||
import type { ConnectionSecurity, LdapConfig } from './types';
|
||||
import { jsonParse, LoggerProxy as Logger } from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import { License } from '@/License';
|
||||
import { InternalHooks } from '@/InternalHooks';
|
||||
import {
|
||||
|
@ -31,6 +31,7 @@ import {
|
|||
} from '@/sso/ssoHelpers';
|
||||
import { InternalServerError } from '../ResponseHelper';
|
||||
import { RoleService } from '@/services/role.service';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
/**
|
||||
* Check whether the LDAP feature is disabled in the instance
|
||||
|
@ -185,7 +186,7 @@ export const handleLdapInit = async (): Promise<void> => {
|
|||
try {
|
||||
await setGlobalLdapConfigVariables(ldapConfig);
|
||||
} catch (error) {
|
||||
Logger.warn(
|
||||
Container.get(Logger).warn(
|
||||
`Cannot set LDAP login enabled state when an authentication method other than email or ldap is active (current: ${getCurrentAuthenticationMethod()})`,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
error,
|
||||
|
@ -235,7 +236,7 @@ export const findAndAuthenticateLdapUser = async (
|
|||
void Container.get(InternalHooks).onLdapLoginSyncFailed({
|
||||
error: e.message,
|
||||
});
|
||||
Logger.error('LDAP - Error during search', { message: e.message });
|
||||
Container.get(Logger).error('LDAP - Error during search', { message: e.message });
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
@ -261,7 +262,9 @@ export const findAndAuthenticateLdapUser = async (
|
|||
await ldapService.validUser(user.dn, password);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
Logger.error('LDAP - Error validating user against LDAP server', { message: e.message });
|
||||
Container.get(Logger).error('LDAP - Error validating user against LDAP server', {
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import type { TEntitlement, TFeatures, TLicenseBlock } from '@n8n_io/license-sdk';
|
||||
import { LicenseManager } from '@n8n_io/license-sdk';
|
||||
import type { ILogger } from 'n8n-workflow';
|
||||
import { getLogger } from './Logger';
|
||||
import { InstanceSettings, ObjectStoreService } from 'n8n-core';
|
||||
import Container, { Service } from 'typedi';
|
||||
import { Logger } from '@/Logger';
|
||||
import config from '@/config';
|
||||
import * as Db from '@/Db';
|
||||
import {
|
||||
|
@ -11,12 +12,10 @@ import {
|
|||
SETTINGS_LICENSE_CERT_KEY,
|
||||
UNLIMITED_LICENSE_QUOTA,
|
||||
} from './constants';
|
||||
import Container, { Service } from 'typedi';
|
||||
import { WorkflowRepository } from '@/databases/repositories';
|
||||
import type { BooleanLicenseFeature, N8nInstanceType, NumericLicenseFeature } from './Interfaces';
|
||||
import type { RedisServicePubSubPublisher } from './services/redis/RedisServicePubSubPublisher';
|
||||
import { RedisService } from './services/redis.service';
|
||||
import { InstanceSettings, ObjectStoreService } from 'n8n-core';
|
||||
|
||||
type FeatureReturnType = Partial<
|
||||
{
|
||||
|
@ -26,15 +25,14 @@ type FeatureReturnType = Partial<
|
|||
|
||||
@Service()
|
||||
export class License {
|
||||
private logger: ILogger;
|
||||
|
||||
private manager: LicenseManager | undefined;
|
||||
|
||||
private redisPublisher: RedisServicePubSubPublisher;
|
||||
|
||||
constructor(private readonly instanceSettings: InstanceSettings) {
|
||||
this.logger = getLogger();
|
||||
}
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly instanceSettings: InstanceSettings,
|
||||
) {}
|
||||
|
||||
async init(instanceType: N8nInstanceType = 'main') {
|
||||
if (this.manager) {
|
||||
|
|
|
@ -17,7 +17,7 @@ import type {
|
|||
INodeTypeData,
|
||||
ICredentialTypeData,
|
||||
} from 'n8n-workflow';
|
||||
import { LoggerProxy, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow';
|
||||
import { ErrorReporterProxy as ErrorReporter } from 'n8n-workflow';
|
||||
|
||||
import config from '@/config';
|
||||
import {
|
||||
|
@ -27,6 +27,7 @@ import {
|
|||
CLI_DIR,
|
||||
inE2ETests,
|
||||
} from '@/constants';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
interface LoadedNodesAndCredentials {
|
||||
nodes: INodeTypeData;
|
||||
|
@ -49,7 +50,10 @@ export class LoadNodesAndCredentials {
|
|||
|
||||
private postProcessors: Array<() => Promise<void>> = [];
|
||||
|
||||
constructor(private readonly instanceSettings: InstanceSettings) {}
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly instanceSettings: InstanceSettings,
|
||||
) {}
|
||||
|
||||
async init() {
|
||||
if (inTest) throw new Error('Not available in tests');
|
||||
|
@ -197,7 +201,7 @@ export class LoadNodesAndCredentials {
|
|||
return description.credentials.some(({ name }) => {
|
||||
const credType = this.types.credentials.find((t) => t.name === name);
|
||||
if (!credType) {
|
||||
LoggerProxy.warn(
|
||||
this.logger.warn(
|
||||
`Failed to load Custom API options for the node "${description.name}": Unknown credential name "${name}"`,
|
||||
);
|
||||
return false;
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
/* eslint-disable @typescript-eslint/no-shadow */
|
||||
|
||||
import { inspect } from 'util';
|
||||
import { Service } from 'typedi';
|
||||
import winston from 'winston';
|
||||
|
||||
import type { IDataObject, ILogger, LogTypes } from 'n8n-workflow';
|
||||
|
||||
import callsites from 'callsites';
|
||||
import { inspect } from 'util';
|
||||
import { basename } from 'path';
|
||||
|
||||
import { LoggerProxy, type IDataObject, LOG_LEVELS } from 'n8n-workflow';
|
||||
|
||||
import config from '@/config';
|
||||
|
||||
const noOp = () => {};
|
||||
const levelNames = ['debug', 'verbose', 'info', 'warn', 'error'] as const;
|
||||
|
||||
export class Logger implements ILogger {
|
||||
@Service()
|
||||
export class Logger {
|
||||
private logger: winston.Logger;
|
||||
|
||||
constructor() {
|
||||
|
@ -24,7 +23,7 @@ export class Logger implements ILogger {
|
|||
});
|
||||
|
||||
// Change all methods with higher log-level to no-op
|
||||
for (const levelName of levelNames) {
|
||||
for (const levelName of LOG_LEVELS) {
|
||||
if (this.logger.levels[levelName] > this.logger.levels[level]) {
|
||||
Object.defineProperty(this, levelName, { value: noOp });
|
||||
}
|
||||
|
@ -33,7 +32,7 @@ export class Logger implements ILogger {
|
|||
const output = config
|
||||
.getEnv('logs.output')
|
||||
.split(',')
|
||||
.map((output) => output.trim());
|
||||
.map((line) => line.trim());
|
||||
|
||||
if (output.includes('console')) {
|
||||
let format: winston.Logform.Format;
|
||||
|
@ -43,8 +42,8 @@ export class Logger implements ILogger {
|
|||
winston.format.timestamp(),
|
||||
winston.format.colorize({ all: true }),
|
||||
|
||||
winston.format.printf(({ level, message, timestamp, metadata }) => {
|
||||
return `${timestamp} | ${level.padEnd(18)} | ${message}${
|
||||
winston.format.printf(({ level: logLevel, message, timestamp, metadata }) => {
|
||||
return `${timestamp} | ${logLevel.padEnd(18)} | ${message}${
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
Object.keys(metadata).length ? ` ${JSON.stringify(inspect(metadata))}` : ''
|
||||
}`;
|
||||
|
@ -76,13 +75,15 @@ export class Logger implements ILogger {
|
|||
}),
|
||||
);
|
||||
}
|
||||
|
||||
LoggerProxy.init(this);
|
||||
}
|
||||
|
||||
log(type: LogTypes, message: string, meta: object = {}): void {
|
||||
private log(level: (typeof LOG_LEVELS)[number], message: string, meta: object = {}): void {
|
||||
const callsite = callsites();
|
||||
// We are using the third array element as the structure is as follows:
|
||||
// [0]: this file
|
||||
// [1]: Should be LoggerProxy
|
||||
// [1]: Should be Logger
|
||||
// [2]: Should point to the caller.
|
||||
// Note: getting line number is useless because at this point
|
||||
// We are in runtime, so it means we are looking at compiled js files
|
||||
|
@ -95,38 +96,28 @@ export class Logger implements ILogger {
|
|||
logDetails.function = functionName;
|
||||
}
|
||||
}
|
||||
this.logger.log(type, message, { ...meta, ...logDetails });
|
||||
this.logger.log(level, message, { ...meta, ...logDetails });
|
||||
}
|
||||
|
||||
// Convenience methods below
|
||||
|
||||
debug(message: string, meta: object = {}): void {
|
||||
this.log('debug', message, meta);
|
||||
error(message: string, meta: object = {}): void {
|
||||
this.log('error', message, meta);
|
||||
}
|
||||
|
||||
warn(message: string, meta: object = {}): void {
|
||||
this.log('warn', message, meta);
|
||||
}
|
||||
|
||||
info(message: string, meta: object = {}): void {
|
||||
this.log('info', message, meta);
|
||||
}
|
||||
|
||||
error(message: string, meta: object = {}): void {
|
||||
this.log('error', message, meta);
|
||||
debug(message: string, meta: object = {}): void {
|
||||
this.log('debug', message, meta);
|
||||
}
|
||||
|
||||
verbose(message: string, meta: object = {}): void {
|
||||
this.log('verbose', message, meta);
|
||||
}
|
||||
|
||||
warn(message: string, meta: object = {}): void {
|
||||
this.log('warn', message, meta);
|
||||
}
|
||||
}
|
||||
|
||||
let activeLoggerInstance: Logger | undefined;
|
||||
|
||||
export function getLogger() {
|
||||
if (activeLoggerInstance === undefined) {
|
||||
activeLoggerInstance = new Logger();
|
||||
}
|
||||
|
||||
return activeLoggerInstance;
|
||||
}
|
||||
|
|
|
@ -39,11 +39,6 @@ export = {
|
|||
|
||||
const savedCredential = await saveCredential(newCredential, req.user, encryptedData);
|
||||
|
||||
// LoggerProxy.verbose('New credential created', {
|
||||
// credentialsId: newCredential.id,
|
||||
// ownerId: req.user.id,
|
||||
// });
|
||||
|
||||
return res.json(sanitizeCredentials(savedCredential));
|
||||
} catch ({ message, httpStatusCode }) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
|
|
|
@ -46,7 +46,7 @@ import type {
|
|||
ResourceMapperFields,
|
||||
IN8nUISettings,
|
||||
} from 'n8n-workflow';
|
||||
import { LoggerProxy, jsonParse } from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
|
||||
// @ts-ignore
|
||||
import timezones from 'google-timezones-json';
|
||||
|
@ -204,7 +204,7 @@ export class Server extends AbstractServer {
|
|||
this.endpointPresetCredentials = config.getEnv('credentials.overwrite.endpoint');
|
||||
|
||||
await super.start();
|
||||
LoggerProxy.debug(`Server ID: ${this.uniqueInstanceId}`);
|
||||
this.logger.debug(`Server ID: ${this.uniqueInstanceId}`);
|
||||
|
||||
const cpus = os.cpus();
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
|
@ -270,11 +270,10 @@ export class Server extends AbstractServer {
|
|||
}
|
||||
|
||||
private async registerControllers(ignoredEndpoints: Readonly<string[]>) {
|
||||
const { app, externalHooks, activeWorkflowRunner, nodeTypes } = this;
|
||||
const { app, externalHooks, activeWorkflowRunner, nodeTypes, logger } = this;
|
||||
const repositories = Db.collections;
|
||||
setupAuthMiddlewares(app, ignoredEndpoints, this.restEndpoint);
|
||||
|
||||
const logger = LoggerProxy;
|
||||
const internalHooks = Container.get(InternalHooks);
|
||||
const mailer = Container.get(UserManagementMailer);
|
||||
const userService = Container.get(UserService);
|
||||
|
@ -285,7 +284,7 @@ export class Server extends AbstractServer {
|
|||
const controllers: object[] = [
|
||||
new EventBusController(),
|
||||
new EventBusControllerEE(),
|
||||
new AuthController(config, logger, internalHooks, mfaService, userService, postHog),
|
||||
Container.get(AuthController),
|
||||
new OwnerController(
|
||||
config,
|
||||
logger,
|
||||
|
@ -294,7 +293,7 @@ export class Server extends AbstractServer {
|
|||
userService,
|
||||
postHog,
|
||||
),
|
||||
new MeController(logger, externalHooks, internalHooks, userService),
|
||||
Container.get(MeController),
|
||||
new NodeTypesController(config, nodeTypes),
|
||||
new PasswordResetController(
|
||||
logger,
|
||||
|
@ -457,7 +456,7 @@ export class Server extends AbstractServer {
|
|||
try {
|
||||
await Container.get(SamlService).init();
|
||||
} catch (error) {
|
||||
LoggerProxy.warn(`SAML initialization failed: ${error.message}`);
|
||||
this.logger.warn(`SAML initialization failed: ${error.message}`);
|
||||
}
|
||||
|
||||
// ----------------------------------------
|
||||
|
@ -472,7 +471,7 @@ export class Server extends AbstractServer {
|
|||
try {
|
||||
await Container.get(SourceControlService).init();
|
||||
} catch (error) {
|
||||
LoggerProxy.warn(`Source Control initialization failed: ${error.message}`);
|
||||
this.logger.warn(`Source Control initialization failed: ${error.message}`);
|
||||
}
|
||||
|
||||
// ----------------------------------------
|
||||
|
@ -667,7 +666,7 @@ export class Server extends AbstractServer {
|
|||
});
|
||||
|
||||
if (!shared) {
|
||||
LoggerProxy.verbose('User attempted to access workflow errors without permissions', {
|
||||
this.logger.verbose('User attempted to access workflow errors without permissions', {
|
||||
workflowId,
|
||||
userId: req.user.id,
|
||||
});
|
||||
|
@ -714,14 +713,14 @@ export class Server extends AbstractServer {
|
|||
const { id: credentialId } = req.query;
|
||||
|
||||
if (!credentialId) {
|
||||
LoggerProxy.error('OAuth1 credential authorization failed due to missing credential ID');
|
||||
this.logger.error('OAuth1 credential authorization failed due to missing credential ID');
|
||||
throw new ResponseHelper.BadRequestError('Required credential ID is missing');
|
||||
}
|
||||
|
||||
const credential = await getCredentialForUser(credentialId, req.user);
|
||||
|
||||
if (!credential) {
|
||||
LoggerProxy.error(
|
||||
this.logger.error(
|
||||
'OAuth1 credential authorization failed because the current user does not have the correct permissions',
|
||||
{ userId: req.user.id },
|
||||
);
|
||||
|
@ -826,7 +825,7 @@ export class Server extends AbstractServer {
|
|||
// Update the credentials in DB
|
||||
await Db.collections.Credentials.update(credentialId, newCredentialsData);
|
||||
|
||||
LoggerProxy.verbose('OAuth1 authorization successful for new credential', {
|
||||
this.logger.verbose('OAuth1 authorization successful for new credential', {
|
||||
userId: req.user.id,
|
||||
credentialId,
|
||||
});
|
||||
|
@ -848,7 +847,7 @@ export class Server extends AbstractServer {
|
|||
req.query,
|
||||
)}`,
|
||||
);
|
||||
LoggerProxy.error(
|
||||
this.logger.error(
|
||||
'OAuth1 callback failed because of insufficient parameters received',
|
||||
{
|
||||
userId: req.user?.id,
|
||||
|
@ -861,7 +860,7 @@ export class Server extends AbstractServer {
|
|||
const credential = await getCredentialWithoutUser(credentialId);
|
||||
|
||||
if (!credential) {
|
||||
LoggerProxy.error('OAuth1 callback failed because of insufficient user permissions', {
|
||||
this.logger.error('OAuth1 callback failed because of insufficient user permissions', {
|
||||
userId: req.user?.id,
|
||||
credentialId,
|
||||
});
|
||||
|
@ -906,7 +905,7 @@ export class Server extends AbstractServer {
|
|||
try {
|
||||
oauthToken = await axios.request(options);
|
||||
} catch (error) {
|
||||
LoggerProxy.error('Unable to fetch tokens for OAuth1 callback', {
|
||||
this.logger.error('Unable to fetch tokens for OAuth1 callback', {
|
||||
userId: req.user?.id,
|
||||
credentialId,
|
||||
});
|
||||
|
@ -934,13 +933,13 @@ export class Server extends AbstractServer {
|
|||
// Save the credentials in DB
|
||||
await Db.collections.Credentials.update(credentialId, newCredentialsData);
|
||||
|
||||
LoggerProxy.verbose('OAuth1 callback successful for new credential', {
|
||||
this.logger.verbose('OAuth1 callback successful for new credential', {
|
||||
userId: req.user?.id,
|
||||
credentialId,
|
||||
});
|
||||
res.sendFile(pathResolve(TEMPLATES_DIR, 'oauth-callback.html'));
|
||||
} catch (error) {
|
||||
LoggerProxy.error('OAuth1 callback failed because of insufficient user permissions', {
|
||||
this.logger.error('OAuth1 callback failed because of insufficient user permissions', {
|
||||
userId: req.user?.id,
|
||||
credentialId: req.query.cid,
|
||||
});
|
||||
|
|
|
@ -1,14 +1,19 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import type { Transporter } from 'nodemailer';
|
||||
import { createTransport } from 'nodemailer';
|
||||
import { ErrorReporterProxy as ErrorReporter, LoggerProxy as Logger } from 'n8n-workflow';
|
||||
import type SMTPConnection from 'nodemailer/lib/smtp-connection';
|
||||
import { Service } from 'typedi';
|
||||
import { ErrorReporterProxy as ErrorReporter } from 'n8n-workflow';
|
||||
import config from '@/config';
|
||||
import type { MailData, SendEmailResult } from './Interfaces';
|
||||
import type SMTPConnection from 'nodemailer/lib/smtp-connection';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class NodeMailer {
|
||||
private transport?: Transporter;
|
||||
|
||||
constructor(private readonly logger: Logger) {}
|
||||
|
||||
async init(): Promise<void> {
|
||||
const transportConfig: SMTPConnection.Options = {
|
||||
host: config.getEnv('userManagement.emails.smtp.host'),
|
||||
|
@ -81,12 +86,12 @@ export class NodeMailer {
|
|||
text: mailData.textOnly,
|
||||
html: mailData.body,
|
||||
});
|
||||
Logger.verbose(
|
||||
this.logger.verbose(
|
||||
`Email sent successfully to the following recipients: ${mailData.emailRecipients.toString()}`,
|
||||
);
|
||||
} catch (error) {
|
||||
ErrorReporter.error(error);
|
||||
Logger.error('Failed to send email', { recipients: mailData.emailRecipients, error });
|
||||
this.logger.error('Failed to send email', { recipients: mailData.emailRecipients, error });
|
||||
throw error;
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ import { existsSync } from 'fs';
|
|||
import { readFile } from 'fs/promises';
|
||||
import Handlebars from 'handlebars';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { Service } from 'typedi';
|
||||
import { Container, Service } from 'typedi';
|
||||
import config from '@/config';
|
||||
import type { InviteEmailData, PasswordResetData, SendEmailResult } from './Interfaces';
|
||||
import { NodeMailer } from './NodeMailer';
|
||||
|
@ -45,7 +45,7 @@ export class UserManagementMailer {
|
|||
|
||||
// Other implementations can be used in the future.
|
||||
if (this.isEmailSetUp) {
|
||||
this.mailer = new NodeMailer();
|
||||
this.mailer = Container.get(NodeMailer);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,4 @@
|
|||
import {
|
||||
ErrorReporterProxy as ErrorReporter,
|
||||
LoggerProxy as Logger,
|
||||
WorkflowOperationError,
|
||||
} from 'n8n-workflow';
|
||||
import { ErrorReporterProxy as ErrorReporter, WorkflowOperationError } from 'n8n-workflow';
|
||||
import { Container, Service } from 'typedi';
|
||||
import type { FindManyOptions, ObjectLiteral } from 'typeorm';
|
||||
import { Not, LessThanOrEqual } from 'typeorm';
|
||||
|
@ -20,6 +16,7 @@ import { recoverExecutionDataFromEventLogMessages } from './eventbus/MessageEven
|
|||
import { ExecutionRepository } from '@db/repositories';
|
||||
import type { ExecutionEntity } from '@db/entities/ExecutionEntity';
|
||||
import { OwnershipService } from './services/ownership.service';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class WaitTracker {
|
||||
|
@ -33,8 +30,9 @@ export class WaitTracker {
|
|||
mainTimer: NodeJS.Timeout;
|
||||
|
||||
constructor(
|
||||
private executionRepository: ExecutionRepository,
|
||||
private ownershipService: OwnershipService,
|
||||
private readonly logger: Logger,
|
||||
private readonly executionRepository: ExecutionRepository,
|
||||
private readonly ownershipService: OwnershipService,
|
||||
) {
|
||||
// Poll every 60 seconds a list of upcoming executions
|
||||
this.mainTimer = setInterval(() => {
|
||||
|
@ -45,7 +43,7 @@ export class WaitTracker {
|
|||
}
|
||||
|
||||
async getWaitingExecutions() {
|
||||
Logger.debug('Wait tracker querying database for waiting executions');
|
||||
this.logger.debug('Wait tracker querying database for waiting executions');
|
||||
// Find all the executions which should be triggered in the next 70 seconds
|
||||
const findQuery: FindManyOptions<ExecutionEntity> = {
|
||||
select: ['id', 'waitTill'],
|
||||
|
@ -74,7 +72,7 @@ export class WaitTracker {
|
|||
}
|
||||
|
||||
const executionIds = executions.map((execution) => execution.id).join(', ');
|
||||
Logger.debug(
|
||||
this.logger.debug(
|
||||
`Wait tracker found ${executions.length} executions. Setting timer for IDs: ${executionIds}`,
|
||||
);
|
||||
|
||||
|
@ -163,7 +161,7 @@ export class WaitTracker {
|
|||
}
|
||||
|
||||
startExecution(executionId: string) {
|
||||
Logger.debug(`Wait tracker resuming execution ${executionId}`, { executionId });
|
||||
this.logger.debug(`Wait tracker resuming execution ${executionId}`, { executionId });
|
||||
delete this.waitingExecutions[executionId];
|
||||
|
||||
(async () => {
|
||||
|
@ -198,7 +196,7 @@ export class WaitTracker {
|
|||
await workflowRunner.run(data, false, false, executionId);
|
||||
})().catch((error: Error) => {
|
||||
ErrorReporter.error(error);
|
||||
Logger.error(
|
||||
this.logger.error(
|
||||
`There was a problem starting the waiting execution with id "${executionId}": "${error.message}"`,
|
||||
{ executionId },
|
||||
);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { NodeHelpers, Workflow, LoggerProxy as Logger } from 'n8n-workflow';
|
||||
import { NodeHelpers, Workflow } from 'n8n-workflow';
|
||||
import { Service } from 'typedi';
|
||||
import type express from 'express';
|
||||
|
||||
|
@ -14,13 +14,15 @@ import type {
|
|||
import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData';
|
||||
import { ExecutionRepository } from '@db/repositories';
|
||||
import { OwnershipService } from './services/ownership.service';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class WaitingWebhooks implements IWebhookManager {
|
||||
constructor(
|
||||
private nodeTypes: NodeTypes,
|
||||
private executionRepository: ExecutionRepository,
|
||||
private ownershipService: OwnershipService,
|
||||
private readonly logger: Logger,
|
||||
private readonly nodeTypes: NodeTypes,
|
||||
private readonly executionRepository: ExecutionRepository,
|
||||
private readonly ownershipService: OwnershipService,
|
||||
) {}
|
||||
|
||||
// TODO: implement `getWebhookMethods` for CORS support
|
||||
|
@ -30,7 +32,7 @@ export class WaitingWebhooks implements IWebhookManager {
|
|||
res: express.Response,
|
||||
): Promise<IResponseCallbackData> {
|
||||
const { path: executionId, suffix } = req.params;
|
||||
Logger.debug(`Received waiting-webhook "${req.method}" for execution "${executionId}"`);
|
||||
this.logger.debug(`Received waiting-webhook "${req.method}" for execution "${executionId}"`);
|
||||
|
||||
// Reset request parameters
|
||||
req.params = {} as WaitingWebhookRequest['params'];
|
||||
|
|
|
@ -39,7 +39,6 @@ import {
|
|||
createDeferredPromise,
|
||||
ErrorReporterProxy as ErrorReporter,
|
||||
FORM_TRIGGER_PATH_IDENTIFIER,
|
||||
LoggerProxy as Logger,
|
||||
NodeHelpers,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -64,6 +63,7 @@ import { EventsService } from '@/services/events.service';
|
|||
import { OwnershipService } from './services/ownership.service';
|
||||
import { parseBody } from './middlewares';
|
||||
import { WorkflowsService } from './workflows/workflows.services';
|
||||
import { Logger } from './Logger';
|
||||
|
||||
const pipeline = promisify(stream.pipeline);
|
||||
|
||||
|
@ -534,7 +534,7 @@ export async function executeWebhook(
|
|||
})
|
||||
.catch(async (error) => {
|
||||
ErrorReporter.error(error);
|
||||
Logger.error(
|
||||
Container.get(Logger).error(
|
||||
`Error with Webhook-Response for execution "${executionId}": "${error.message}"`,
|
||||
{ executionId, workflowId: workflow.id },
|
||||
);
|
||||
|
@ -551,7 +551,7 @@ export async function executeWebhook(
|
|||
responsePromise,
|
||||
);
|
||||
|
||||
Logger.verbose(
|
||||
Container.get(Logger).verbose(
|
||||
`Started execution of workflow "${workflow.name}" from webhook with execution ID ${executionId}`,
|
||||
{ executionId },
|
||||
);
|
||||
|
|
|
@ -27,7 +27,6 @@ import type {
|
|||
} from 'n8n-workflow';
|
||||
import {
|
||||
ErrorReporterProxy as ErrorReporter,
|
||||
LoggerProxy as Logger,
|
||||
NodeOperationError,
|
||||
Workflow,
|
||||
WorkflowHooks,
|
||||
|
@ -64,6 +63,7 @@ import {
|
|||
updateExistingExecution,
|
||||
} from './executionLifecycleHooks/shared/sharedHookFunctions';
|
||||
import { restoreBinaryDataId } from './executionLifecycleHooks/restoreBinaryDataId';
|
||||
import { Logger } from './Logger';
|
||||
|
||||
const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
|
||||
|
||||
|
@ -118,8 +118,9 @@ export function executeErrorWorkflow(
|
|||
executionId?: string,
|
||||
retryOf?: string,
|
||||
): void {
|
||||
// Check if there was an error and if so if an errorWorkflow or a trigger is set
|
||||
const logger = Container.get(Logger);
|
||||
|
||||
// Check if there was an error and if so if an errorWorkflow or a trigger is set
|
||||
let pastExecutionUrl: string | undefined;
|
||||
if (executionId !== undefined) {
|
||||
pastExecutionUrl = `${WebhookHelpers.getWebhookBaseUrl()}workflow/${
|
||||
|
@ -165,7 +166,7 @@ export function executeErrorWorkflow(
|
|||
// To avoid an infinite loop do not run the error workflow again if the error-workflow itself failed and it is its own error-workflow.
|
||||
const { errorWorkflow } = workflowData.settings ?? {};
|
||||
if (errorWorkflow && !(mode === 'error' && workflowId && errorWorkflow === workflowId)) {
|
||||
Logger.verbose('Start external error workflow', {
|
||||
logger.verbose('Start external error workflow', {
|
||||
executionId,
|
||||
errorWorkflowId: errorWorkflow,
|
||||
workflowId,
|
||||
|
@ -187,7 +188,7 @@ export function executeErrorWorkflow(
|
|||
})
|
||||
.catch((error: Error) => {
|
||||
ErrorReporter.error(error);
|
||||
Logger.error(
|
||||
logger.error(
|
||||
`Could not execute ErrorWorkflow for execution ID ${this.executionId} because of error querying the workflow owner`,
|
||||
{
|
||||
executionId,
|
||||
|
@ -203,7 +204,7 @@ export function executeErrorWorkflow(
|
|||
workflowId !== undefined &&
|
||||
workflowData.nodes.some((node) => node.type === ERROR_TRIGGER_TYPE)
|
||||
) {
|
||||
Logger.verbose('Start internal error workflow', { executionId, workflowId });
|
||||
logger.verbose('Start internal error workflow', { executionId, workflowId });
|
||||
void Container.get(OwnershipService)
|
||||
.getWorkflowOwnerCached(workflowId)
|
||||
.then((user) => {
|
||||
|
@ -218,6 +219,7 @@ export function executeErrorWorkflow(
|
|||
*
|
||||
*/
|
||||
function hookFunctionsPush(): IWorkflowExecuteHooks {
|
||||
const logger = Container.get(Logger);
|
||||
const pushInstance = Container.get(Push);
|
||||
return {
|
||||
nodeExecuteBefore: [
|
||||
|
@ -229,7 +231,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
return;
|
||||
}
|
||||
|
||||
Logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, {
|
||||
logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, {
|
||||
executionId,
|
||||
sessionId,
|
||||
workflowId: this.workflowData.id,
|
||||
|
@ -246,7 +248,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
return;
|
||||
}
|
||||
|
||||
Logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, {
|
||||
logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, {
|
||||
executionId,
|
||||
sessionId,
|
||||
workflowId: this.workflowData.id,
|
||||
|
@ -259,7 +261,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
async function (this: WorkflowHooks): Promise<void> {
|
||||
const { sessionId, executionId } = this;
|
||||
const { id: workflowId, name: workflowName } = this.workflowData;
|
||||
Logger.debug('Executing hook (hookFunctionsPush)', {
|
||||
logger.debug('Executing hook (hookFunctionsPush)', {
|
||||
executionId,
|
||||
sessionId,
|
||||
workflowId,
|
||||
|
@ -291,7 +293,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
): Promise<void> {
|
||||
const { sessionId, executionId, retryOf } = this;
|
||||
const { id: workflowId } = this.workflowData;
|
||||
Logger.debug('Executing hook (hookFunctionsPush)', {
|
||||
logger.debug('Executing hook (hookFunctionsPush)', {
|
||||
executionId,
|
||||
sessionId,
|
||||
workflowId,
|
||||
|
@ -322,7 +324,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
}
|
||||
|
||||
// Push data to editor-ui once workflow finished
|
||||
Logger.debug(`Save execution progress to database for execution ID ${executionId} `, {
|
||||
logger.debug(`Save execution progress to database for execution ID ${executionId} `, {
|
||||
executionId,
|
||||
workflowId,
|
||||
});
|
||||
|
@ -340,6 +342,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
}
|
||||
|
||||
export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowExecuteHooks {
|
||||
const logger = Container.get(Logger);
|
||||
const externalHooks = Container.get(ExternalHooks);
|
||||
return {
|
||||
workflowExecuteBefore: [
|
||||
|
@ -368,7 +371,7 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
|
|||
}
|
||||
|
||||
try {
|
||||
Logger.debug(
|
||||
logger.debug(
|
||||
`Save execution progress to database for execution ID ${this.executionId} `,
|
||||
{ executionId: this.executionId, nodeName },
|
||||
);
|
||||
|
@ -436,7 +439,7 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
|
|||
// For busy machines, we may get "Database is locked" errors.
|
||||
|
||||
// We do this to prevent crashes and executions ending in `unknown` state.
|
||||
Logger.error(
|
||||
logger.error(
|
||||
`Failed saving execution progress to database for execution ID ${this.executionId} (hookFunctionsPreExecute, nodeExecuteAfter)`,
|
||||
{
|
||||
...err,
|
||||
|
@ -456,6 +459,7 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
|
|||
*
|
||||
*/
|
||||
function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
||||
const logger = Container.get(Logger);
|
||||
const internalHooks = Container.get(InternalHooks);
|
||||
const eventsService = Container.get(EventsService);
|
||||
return {
|
||||
|
@ -476,7 +480,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
fullRunData: IRun,
|
||||
newStaticData: IDataObject,
|
||||
): Promise<void> {
|
||||
Logger.debug('Executing hook (hookFunctionsSave)', {
|
||||
logger.debug('Executing hook (hookFunctionsSave)', {
|
||||
executionId: this.executionId,
|
||||
workflowId: this.workflowData.id,
|
||||
});
|
||||
|
@ -497,7 +501,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
);
|
||||
} catch (e) {
|
||||
ErrorReporter.error(e);
|
||||
Logger.error(
|
||||
logger.error(
|
||||
`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (hookFunctionsSave)`,
|
||||
{ executionId: this.executionId, workflowId: this.workflowData.id },
|
||||
);
|
||||
|
@ -581,7 +585,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
}
|
||||
} catch (error) {
|
||||
ErrorReporter.error(error);
|
||||
Logger.error(`Failed saving execution data to DB on execution ID ${this.executionId}`, {
|
||||
logger.error(`Failed saving execution data to DB on execution ID ${this.executionId}`, {
|
||||
executionId: this.executionId,
|
||||
workflowId: this.workflowData.id,
|
||||
error,
|
||||
|
@ -615,6 +619,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
*
|
||||
*/
|
||||
function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
|
||||
const logger = Container.get(Logger);
|
||||
const internalHooks = Container.get(InternalHooks);
|
||||
const eventsService = Container.get(EventsService);
|
||||
return {
|
||||
|
@ -639,7 +644,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
|
|||
fullRunData: IRun,
|
||||
newStaticData: IDataObject,
|
||||
): Promise<void> {
|
||||
Logger.debug('Executing hook (hookFunctionsSaveWorker)', {
|
||||
logger.debug('Executing hook (hookFunctionsSaveWorker)', {
|
||||
executionId: this.executionId,
|
||||
workflowId: this.workflowData.id,
|
||||
});
|
||||
|
@ -653,7 +658,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
|
|||
);
|
||||
} catch (e) {
|
||||
ErrorReporter.error(e);
|
||||
Logger.error(
|
||||
logger.error(
|
||||
`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (workflowExecuteAfter)`,
|
||||
{ sessionId: this.sessionId, workflowId: this.workflowData.id },
|
||||
);
|
||||
|
@ -986,15 +991,16 @@ async function executeWorkflow(
|
|||
}
|
||||
|
||||
export function setExecutionStatus(status: ExecutionStatus) {
|
||||
const logger = Container.get(Logger);
|
||||
if (this.executionId === undefined) {
|
||||
Logger.debug(`Setting execution status "${status}" failed because executionId is undefined`);
|
||||
logger.debug(`Setting execution status "${status}" failed because executionId is undefined`);
|
||||
return;
|
||||
}
|
||||
Logger.debug(`Setting execution status for ${this.executionId} to "${status}"`);
|
||||
logger.debug(`Setting execution status for ${this.executionId} to "${status}"`);
|
||||
Container.get(ActiveExecutions)
|
||||
.setStatus(this.executionId, status)
|
||||
.catch((error) => {
|
||||
Logger.debug(`Setting execution status "${status}" failed: ${error.message}`);
|
||||
logger.debug(`Setting execution status "${status}" failed: ${error.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1009,7 +1015,8 @@ export function sendDataToUI(type: string, data: IDataObject | IDataObject[]) {
|
|||
const pushInstance = Container.get(Push);
|
||||
pushInstance.send(type as IPushDataType, data, sessionId);
|
||||
} catch (error) {
|
||||
Logger.warn(`There was a problem sending message to UI: ${error.message}`);
|
||||
const logger = Container.get(Logger);
|
||||
logger.warn(`There was a problem sending message to UI: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@ import type {
|
|||
} from 'n8n-workflow';
|
||||
import {
|
||||
ErrorReporterProxy as ErrorReporter,
|
||||
LoggerProxy as Logger,
|
||||
NodeOperationError,
|
||||
SubworkflowOperationError,
|
||||
Workflow,
|
||||
|
@ -44,6 +43,7 @@ import { RoleService } from './services/role.service';
|
|||
import { ExecutionRepository, RoleRepository } from './databases/repositories';
|
||||
import { VariablesService } from './environments/variables/variables.service';
|
||||
import type { CredentialsEntity } from './databases/entities/CredentialsEntity';
|
||||
import { Logger } from './Logger';
|
||||
|
||||
const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
|
||||
|
||||
|
@ -143,13 +143,14 @@ export async function executeErrorWorkflow(
|
|||
workflowErrorData: IWorkflowErrorData,
|
||||
runningUser: User,
|
||||
): Promise<void> {
|
||||
const logger = Container.get(Logger);
|
||||
// Wrap everything in try/catch to make sure that no errors bubble up and all get caught here
|
||||
try {
|
||||
const workflowData = await Db.collections.Workflow.findOneBy({ id: workflowId });
|
||||
|
||||
if (workflowData === null) {
|
||||
// The error workflow could not be found
|
||||
Logger.error(
|
||||
logger.error(
|
||||
`Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find error workflow "${workflowId}"`,
|
||||
{ workflowId },
|
||||
);
|
||||
|
@ -205,7 +206,7 @@ export async function executeErrorWorkflow(
|
|||
|
||||
await Container.get(ExecutionRepository).createNewExecution(fullExecutionData);
|
||||
}
|
||||
Logger.info('Error workflow execution blocked due to subworkflow settings', {
|
||||
logger.info('Error workflow execution blocked due to subworkflow settings', {
|
||||
erroredWorkflowId: workflowErrorData.workflow.id,
|
||||
errorWorkflowId: workflowId,
|
||||
});
|
||||
|
@ -222,7 +223,7 @@ export async function executeErrorWorkflow(
|
|||
}
|
||||
|
||||
if (workflowStartNode === undefined) {
|
||||
Logger.error(
|
||||
logger.error(
|
||||
`Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find "${ERROR_TRIGGER_TYPE}" in workflow "${workflowId}"`,
|
||||
);
|
||||
return;
|
||||
|
@ -271,7 +272,7 @@ export async function executeErrorWorkflow(
|
|||
await workflowRunner.run(runData);
|
||||
} catch (error) {
|
||||
ErrorReporter.error(error);
|
||||
Logger.error(
|
||||
logger.error(
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
`Calling Error Workflow for "${workflowErrorData.workflow.id}": "${error.message}"`,
|
||||
{ workflowId: workflowErrorData.workflow.id },
|
||||
|
@ -533,9 +534,10 @@ export function validateWorkflowCredentialUsage(
|
|||
const isTamperingAttempt = (inaccessibleCredNodeId: string) =>
|
||||
!previouslyExistingNodeIds.includes(inaccessibleCredNodeId);
|
||||
|
||||
const logger = Container.get(Logger);
|
||||
nodesWithCredentialsUserDoesNotHaveAccessTo.forEach((node) => {
|
||||
if (isTamperingAttempt(node.id)) {
|
||||
Logger.verbose('Blocked workflow update due to tampering attempt', {
|
||||
logger.verbose('Blocked workflow update due to tampering attempt', {
|
||||
nodeType: node.type,
|
||||
nodeName: node.name,
|
||||
nodeId: node.id,
|
||||
|
@ -553,7 +555,7 @@ export function validateWorkflowCredentialUsage(
|
|||
(newWorkflowNode) => newWorkflowNode.id === node.id,
|
||||
);
|
||||
|
||||
Logger.debug('Replacing node with previous version when saving updated workflow', {
|
||||
logger.debug('Replacing node with previous version when saving updated workflow', {
|
||||
nodeType: node.type,
|
||||
nodeName: node.name,
|
||||
nodeId: node.id,
|
||||
|
|
|
@ -18,7 +18,6 @@ import type {
|
|||
} from 'n8n-workflow';
|
||||
import {
|
||||
ErrorReporterProxy as ErrorReporter,
|
||||
LoggerProxy as Logger,
|
||||
Workflow,
|
||||
WorkflowOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
@ -54,8 +53,11 @@ import { recoverExecutionDataFromEventLogMessages } from './eventbus/MessageEven
|
|||
import { Container } from 'typedi';
|
||||
import { InternalHooks } from './InternalHooks';
|
||||
import { ExecutionRepository } from '@db/repositories';
|
||||
import { Logger } from './Logger';
|
||||
|
||||
export class WorkflowRunner {
|
||||
logger: Logger;
|
||||
|
||||
activeExecutions: ActiveExecutions;
|
||||
|
||||
push: Push;
|
||||
|
@ -63,6 +65,7 @@ export class WorkflowRunner {
|
|||
jobQueue: Queue;
|
||||
|
||||
constructor() {
|
||||
this.logger = Container.get(Logger);
|
||||
this.push = Container.get(Push);
|
||||
this.activeExecutions = Container.get(ActiveExecutions);
|
||||
}
|
||||
|
@ -298,14 +301,14 @@ export class WorkflowRunner {
|
|||
const executionId = await this.activeExecutions.add(data, undefined, restartExecutionId);
|
||||
additionalData.executionId = executionId;
|
||||
|
||||
Logger.verbose(
|
||||
this.logger.verbose(
|
||||
`Execution for workflow ${data.workflowData.name} was assigned id ${executionId}`,
|
||||
{ executionId },
|
||||
);
|
||||
let workflowExecution: PCancelable<IRun>;
|
||||
|
||||
try {
|
||||
Logger.verbose(
|
||||
this.logger.verbose(
|
||||
`Execution for workflow ${data.workflowData.name} was assigned id ${executionId}`,
|
||||
{ executionId },
|
||||
);
|
||||
|
@ -349,7 +352,7 @@ export class WorkflowRunner {
|
|||
});
|
||||
|
||||
if (data.executionData !== undefined) {
|
||||
Logger.debug(`Execution ID ${executionId} had Execution data. Running with payload.`, {
|
||||
this.logger.debug(`Execution ID ${executionId} had Execution data. Running with payload.`, {
|
||||
executionId,
|
||||
});
|
||||
const workflowExecute = new WorkflowExecute(
|
||||
|
@ -363,7 +366,9 @@ export class WorkflowRunner {
|
|||
data.startNodes === undefined ||
|
||||
data.startNodes.length === 0
|
||||
) {
|
||||
Logger.debug(`Execution ID ${executionId} will run executing all nodes.`, { executionId });
|
||||
this.logger.debug(`Execution ID ${executionId} will run executing all nodes.`, {
|
||||
executionId,
|
||||
});
|
||||
// Execute all nodes
|
||||
|
||||
const startNode = WorkflowHelpers.getExecutionStartNode(data, workflow);
|
||||
|
@ -377,7 +382,7 @@ export class WorkflowRunner {
|
|||
data.pinData,
|
||||
);
|
||||
} else {
|
||||
Logger.debug(`Execution ID ${executionId} is a partial execution.`, { executionId });
|
||||
this.logger.debug(`Execution ID ${executionId} is a partial execution.`, { executionId });
|
||||
// Execute only the nodes between start and destination nodes
|
||||
const workflowExecute = new WorkflowExecute(additionalData, data.executionMode);
|
||||
workflowExecution = workflowExecute.runPartialWorkflow(
|
||||
|
@ -576,7 +581,7 @@ export class WorkflowRunner {
|
|||
data.workflowData,
|
||||
{ retryOf: data.retryOf ? data.retryOf.toString() : undefined },
|
||||
);
|
||||
Logger.error(`Problem with execution ${executionId}: ${error.message}. Aborting.`);
|
||||
this.logger.error(`Problem with execution ${executionId}: ${error.message}. Aborting.`);
|
||||
if (clearWatchdogInterval !== undefined) {
|
||||
clearWatchdogInterval();
|
||||
}
|
||||
|
@ -590,11 +595,11 @@ export class WorkflowRunner {
|
|||
this.activeExecutions.getPostExecutePromiseCount(executionId) > 0;
|
||||
|
||||
if (executionHasPostExecutionPromises) {
|
||||
Logger.debug(
|
||||
this.logger.debug(
|
||||
`Reading execution data for execution ${executionId} from db for PostExecutionPromise.`,
|
||||
);
|
||||
} else {
|
||||
Logger.debug(
|
||||
this.logger.debug(
|
||||
`Skipping execution data for execution ${executionId} since there are no PostExecutionPromise.`,
|
||||
);
|
||||
}
|
||||
|
@ -737,7 +742,7 @@ export class WorkflowRunner {
|
|||
|
||||
// Listen to data from the subprocess
|
||||
subprocess.on('message', async (message: IProcessMessage) => {
|
||||
Logger.debug(
|
||||
this.logger.debug(
|
||||
`Received child process message of type ${message.type} for execution ID ${executionId}.`,
|
||||
{ executionId },
|
||||
);
|
||||
|
@ -811,7 +816,7 @@ export class WorkflowRunner {
|
|||
// Also get informed when the processes does exit especially when it did crash or timed out
|
||||
subprocess.on('exit', async (code, signal) => {
|
||||
if (signal === 'SIGTERM') {
|
||||
Logger.debug(`Subprocess for execution ID ${executionId} timed out.`, { executionId });
|
||||
this.logger.debug(`Subprocess for execution ID ${executionId} timed out.`, { executionId });
|
||||
// Execution timed out and its process has been terminated
|
||||
const timeoutError = new WorkflowOperationError('Workflow execution timed out!');
|
||||
|
||||
|
@ -823,7 +828,7 @@ export class WorkflowRunner {
|
|||
workflowHooks,
|
||||
);
|
||||
} else if (code !== 0) {
|
||||
Logger.debug(
|
||||
this.logger.debug(
|
||||
`Subprocess for execution ID ${executionId} finished with error code ${code}.`,
|
||||
{ executionId },
|
||||
);
|
||||
|
|
|
@ -17,7 +17,6 @@ import type {
|
|||
IDataObject,
|
||||
IExecuteResponsePromiseData,
|
||||
IExecuteWorkflowInfo,
|
||||
ILogger,
|
||||
INode,
|
||||
INodeExecutionData,
|
||||
IRun,
|
||||
|
@ -30,7 +29,6 @@ import type {
|
|||
} from 'n8n-workflow';
|
||||
import {
|
||||
ErrorReporterProxy as ErrorReporter,
|
||||
LoggerProxy,
|
||||
Workflow,
|
||||
WorkflowHooks,
|
||||
WorkflowOperationError,
|
||||
|
@ -46,7 +44,7 @@ import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
|
|||
import * as WebhookHelpers from '@/WebhookHelpers';
|
||||
import * as WorkflowHelpers from '@/WorkflowHelpers';
|
||||
import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
import config from '@/config';
|
||||
import { generateFailedExecutionFromError } from '@/WorkflowHelpers';
|
||||
|
@ -63,7 +61,7 @@ if (process.env.NODEJS_PREFER_IPV4 === 'true') {
|
|||
class WorkflowRunnerProcess {
|
||||
data: IWorkflowExecutionDataProcessWithExecution | undefined;
|
||||
|
||||
logger: ILogger;
|
||||
logger: Logger;
|
||||
|
||||
startedAt = new Date();
|
||||
|
||||
|
@ -85,19 +83,20 @@ class WorkflowRunnerProcess {
|
|||
}, 30000);
|
||||
}
|
||||
|
||||
constructor() {
|
||||
this.logger = Container.get(Logger);
|
||||
}
|
||||
|
||||
async runWorkflow(inputData: IWorkflowExecutionDataProcessWithExecution): Promise<IRun> {
|
||||
process.once('SIGTERM', WorkflowRunnerProcess.stopProcess);
|
||||
process.once('SIGINT', WorkflowRunnerProcess.stopProcess);
|
||||
|
||||
await initErrorHandling();
|
||||
|
||||
const logger = (this.logger = getLogger());
|
||||
LoggerProxy.init(logger);
|
||||
|
||||
this.data = inputData;
|
||||
const { userId } = inputData;
|
||||
|
||||
logger.verbose('Initializing n8n sub-process', {
|
||||
this.logger.verbose('Initializing n8n sub-process', {
|
||||
pid: process.pid,
|
||||
workflowId: this.data.workflowData.id,
|
||||
});
|
||||
|
|
|
@ -2,10 +2,10 @@ import 'reflect-metadata';
|
|||
import { Command } from '@oclif/command';
|
||||
import { ExitError } from '@oclif/errors';
|
||||
import { Container } from 'typedi';
|
||||
import { LoggerProxy, ErrorReporterProxy as ErrorReporter, sleep } from 'n8n-workflow';
|
||||
import { ErrorReporterProxy as ErrorReporter, sleep } from 'n8n-workflow';
|
||||
import { BinaryDataService, InstanceSettings, ObjectStoreService } from 'n8n-core';
|
||||
import type { AbstractServer } from '@/AbstractServer';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { Logger } from '@/Logger';
|
||||
import config from '@/config';
|
||||
import * as Db from '@/Db';
|
||||
import * as CrashJournal from '@/CrashJournal';
|
||||
|
@ -24,7 +24,7 @@ import { generateHostInstanceId } from '../databases/utils/generators';
|
|||
import { WorkflowHistoryManager } from '@/workflows/workflowHistory/workflowHistoryManager.ee';
|
||||
|
||||
export abstract class BaseCommand extends Command {
|
||||
protected logger = LoggerProxy.init(getLogger());
|
||||
protected logger = Container.get(Logger);
|
||||
|
||||
protected externalHooks: IExternalHooksClass;
|
||||
|
||||
|
@ -64,12 +64,12 @@ export abstract class BaseCommand extends Command {
|
|||
const dbType = config.getEnv('database.type');
|
||||
|
||||
if (['mysqldb', 'mariadb'].includes(dbType)) {
|
||||
LoggerProxy.warn(
|
||||
this.logger.warn(
|
||||
'Support for MySQL/MariaDB has been deprecated and will be removed with an upcoming version of n8n. Please migrate to PostgreSQL.',
|
||||
);
|
||||
}
|
||||
if (process.env.EXECUTIONS_PROCESS === 'own') {
|
||||
LoggerProxy.warn(
|
||||
this.logger.warn(
|
||||
'Own mode has been deprecated and will be removed in a future version of n8n. If you need the isolation and performance gains, please consider using queue mode.',
|
||||
);
|
||||
}
|
||||
|
@ -129,7 +129,7 @@ export abstract class BaseCommand extends Command {
|
|||
const isLicensed = Container.get(License).isFeatureEnabled(LICENSE_FEATURES.BINARY_DATA_S3);
|
||||
|
||||
if (isSelected && isAvailable && isLicensed) {
|
||||
LoggerProxy.debug(
|
||||
this.logger.debug(
|
||||
'License found for external storage - object store to init in read-write mode',
|
||||
);
|
||||
|
||||
|
@ -139,7 +139,7 @@ export abstract class BaseCommand extends Command {
|
|||
}
|
||||
|
||||
if (isSelected && isAvailable && !isLicensed) {
|
||||
LoggerProxy.debug(
|
||||
this.logger.debug(
|
||||
'No license found for external storage - object store to init with writes blocked. To enable writes, please upgrade to a license that supports this feature.',
|
||||
);
|
||||
|
||||
|
@ -149,7 +149,7 @@ export abstract class BaseCommand extends Command {
|
|||
}
|
||||
|
||||
if (!isSelected && isAvailable) {
|
||||
LoggerProxy.debug(
|
||||
this.logger.debug(
|
||||
'External storage unselected but available - object store to init with writes unused',
|
||||
);
|
||||
|
||||
|
@ -204,17 +204,17 @@ export abstract class BaseCommand extends Command {
|
|||
);
|
||||
}
|
||||
|
||||
LoggerProxy.debug('Initializing object store service');
|
||||
this.logger.debug('Initializing object store service');
|
||||
|
||||
try {
|
||||
await objectStoreService.init(host, bucket, credentials);
|
||||
objectStoreService.setReadonly(options.isReadOnly);
|
||||
|
||||
LoggerProxy.debug('Object store init completed');
|
||||
this.logger.debug('Object store init completed');
|
||||
} catch (e) {
|
||||
const error = e instanceof Error ? e : new Error(`${e}`);
|
||||
|
||||
LoggerProxy.debug('Object store init failed', { error });
|
||||
this.logger.debug('Object store init failed', { error });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -223,7 +223,7 @@ export abstract class BaseCommand extends Command {
|
|||
await this.initObjectStoreService();
|
||||
} catch (e) {
|
||||
const error = e instanceof Error ? e : new Error(`${e}`);
|
||||
LoggerProxy.error(`Failed to init object store: ${error.message}`, { error });
|
||||
this.logger.error(`Failed to init object store: ${error.message}`, { error });
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
|
@ -246,14 +246,14 @@ export abstract class BaseCommand extends Command {
|
|||
const hasCert = (await license.loadCertStr()).length > 0;
|
||||
|
||||
if (hasCert) {
|
||||
return LoggerProxy.debug('Skipping license activation');
|
||||
return this.logger.debug('Skipping license activation');
|
||||
}
|
||||
|
||||
try {
|
||||
LoggerProxy.debug('Attempting license activation');
|
||||
this.logger.debug('Attempting license activation');
|
||||
await license.activate(activationKey);
|
||||
} catch (e) {
|
||||
LoggerProxy.error('Could not activate license', e as Error);
|
||||
this.logger.error('Could not activate license', e as Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { Command, flags } from '@oclif/command';
|
||||
import type { DataSourceOptions as ConnectionOptions } from 'typeorm';
|
||||
import { DataSource as Connection } from 'typeorm';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { Container } from 'typedi';
|
||||
import { Logger } from '@/Logger';
|
||||
import { getConnectionOptions } from '@/Db';
|
||||
import type { Migration } from '@db/types';
|
||||
import { wrapMigration } from '@db/utils/migrationHelpers';
|
||||
|
@ -17,7 +17,7 @@ export class DbRevertMigrationCommand extends Command {
|
|||
help: flags.help({ char: 'h' }),
|
||||
};
|
||||
|
||||
protected logger = LoggerProxy.init(getLogger());
|
||||
protected logger = Container.get(Logger);
|
||||
|
||||
private connection: Connection;
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { flags } from '@oclif/command';
|
||||
import { LoggerProxy, sleep } from 'n8n-workflow';
|
||||
import { sleep } from 'n8n-workflow';
|
||||
import config from '@/config';
|
||||
import { ActiveExecutions } from '@/ActiveExecutions';
|
||||
import { WebhookServer } from '@/WebhookServer';
|
||||
|
@ -36,7 +36,7 @@ export class Webhook extends BaseCommand {
|
|||
* get removed.
|
||||
*/
|
||||
async stopProcess() {
|
||||
LoggerProxy.info('\nStopping n8n...');
|
||||
this.logger.info('\nStopping n8n...');
|
||||
|
||||
try {
|
||||
await this.externalHooks.run('n8n.stop', []);
|
||||
|
@ -54,7 +54,7 @@ export class Webhook extends BaseCommand {
|
|||
let count = 0;
|
||||
while (executingWorkflows.length !== 0) {
|
||||
if (count++ % 4 === 0) {
|
||||
LoggerProxy.info(
|
||||
this.logger.info(
|
||||
`Waiting for ${executingWorkflows.length} active executions to finish...`,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ import type {
|
|||
INodeTypes,
|
||||
IRun,
|
||||
} from 'n8n-workflow';
|
||||
import { Workflow, NodeOperationError, LoggerProxy, sleep } from 'n8n-workflow';
|
||||
import { Workflow, NodeOperationError, sleep } from 'n8n-workflow';
|
||||
|
||||
import * as Db from '@/Db';
|
||||
import * as ResponseHelper from '@/ResponseHelper';
|
||||
|
@ -71,7 +71,7 @@ export class Worker extends BaseCommand {
|
|||
* get removed.
|
||||
*/
|
||||
async stopProcess() {
|
||||
LoggerProxy.info('Stopping n8n...');
|
||||
this.logger.info('Stopping n8n...');
|
||||
|
||||
// Stop accepting new jobs
|
||||
await Worker.jobQueue.pause(true);
|
||||
|
@ -94,7 +94,7 @@ export class Worker extends BaseCommand {
|
|||
while (Object.keys(Worker.runningJobs).length !== 0) {
|
||||
if (count++ % 4 === 0) {
|
||||
const waitLeft = Math.ceil((stopTime - new Date().getTime()) / 1000);
|
||||
LoggerProxy.info(
|
||||
this.logger.info(
|
||||
`Waiting for ${
|
||||
Object.keys(Worker.runningJobs).length
|
||||
} active executions to finish... (wait ${waitLeft} more seconds)`,
|
||||
|
@ -121,7 +121,7 @@ export class Worker extends BaseCommand {
|
|||
);
|
||||
|
||||
if (!fullExecutionData) {
|
||||
LoggerProxy.error(
|
||||
this.logger.error(
|
||||
`Worker failed to find data of execution "${executionId}" in database. Cannot continue.`,
|
||||
{ executionId },
|
||||
);
|
||||
|
@ -130,7 +130,7 @@ export class Worker extends BaseCommand {
|
|||
);
|
||||
}
|
||||
const workflowId = fullExecutionData.workflowData.id!;
|
||||
LoggerProxy.info(
|
||||
this.logger.info(
|
||||
`Start job: ${job.id} (Workflow ID: ${workflowId} | Execution: ${executionId})`,
|
||||
);
|
||||
|
||||
|
@ -145,7 +145,7 @@ export class Worker extends BaseCommand {
|
|||
},
|
||||
});
|
||||
if (workflowData === null) {
|
||||
LoggerProxy.error(
|
||||
this.logger.error(
|
||||
'Worker execution failed because workflow could not be found in database.',
|
||||
{ workflowId, executionId },
|
||||
);
|
||||
|
@ -217,7 +217,7 @@ export class Worker extends BaseCommand {
|
|||
|
||||
additionalData.setExecutionStatus = (status: ExecutionStatus) => {
|
||||
// Can't set the status directly in the queued worker, but it will happen in InternalHook.onWorkflowPostExecute
|
||||
LoggerProxy.debug(`Queued worker execution status for ${executionId} is "${status}"`);
|
||||
this.logger.debug(`Queued worker execution status for ${executionId} is "${status}"`);
|
||||
};
|
||||
|
||||
let workflowExecute: WorkflowExecute;
|
||||
|
@ -400,7 +400,7 @@ export class Worker extends BaseCommand {
|
|||
'/healthz',
|
||||
|
||||
async (req: express.Request, res: express.Response) => {
|
||||
LoggerProxy.debug('Health check started!');
|
||||
this.logger.debug('Health check started!');
|
||||
|
||||
const connection = Db.getConnection();
|
||||
|
||||
|
@ -412,7 +412,7 @@ export class Worker extends BaseCommand {
|
|||
// DB ping
|
||||
await connection.query('SELECT 1');
|
||||
} catch (e) {
|
||||
LoggerProxy.error('No Database connection!', e as Error);
|
||||
this.logger.error('No Database connection!', e as Error);
|
||||
const error = new ResponseHelper.ServiceUnavailableError('No Database connection!');
|
||||
return ResponseHelper.sendErrorResponse(res, error);
|
||||
}
|
||||
|
@ -423,7 +423,7 @@ export class Worker extends BaseCommand {
|
|||
// Redis ping
|
||||
await Worker.jobQueue.ping();
|
||||
} catch (e) {
|
||||
LoggerProxy.error('No Redis connection!', e as Error);
|
||||
this.logger.error('No Redis connection!', e as Error);
|
||||
const error = new ResponseHelper.ServiceUnavailableError('No Redis connection!');
|
||||
return ResponseHelper.sendErrorResponse(res, error);
|
||||
}
|
||||
|
@ -433,7 +433,7 @@ export class Worker extends BaseCommand {
|
|||
status: 'ok',
|
||||
};
|
||||
|
||||
LoggerProxy.debug('Health check completed successfully!');
|
||||
this.logger.debug('Health check completed successfully!');
|
||||
|
||||
ResponseHelper.sendSuccessResponse(res, responseData, true, 200);
|
||||
},
|
||||
|
|
|
@ -2,7 +2,7 @@ import path from 'path';
|
|||
import convict from 'convict';
|
||||
import { Container } from 'typedi';
|
||||
import { InstanceSettings } from 'n8n-core';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import { LOG_LEVELS, jsonParse } from 'n8n-workflow';
|
||||
import { ensureStringArray } from './utils';
|
||||
|
||||
convict.addFormat({
|
||||
|
@ -856,7 +856,7 @@ export const schema = {
|
|||
logs: {
|
||||
level: {
|
||||
doc: 'Log output level',
|
||||
format: ['error', 'warn', 'info', 'verbose', 'debug', 'silent'] as const,
|
||||
format: LOG_LEVELS,
|
||||
default: 'info',
|
||||
env: 'N8N_LOG_LEVEL',
|
||||
},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import validator from 'validator';
|
||||
import { In } from 'typeorm';
|
||||
import { Container } from 'typedi';
|
||||
import { Service } from 'typedi';
|
||||
import { Authorized, Get, Post, RestController } from '@/decorators';
|
||||
import {
|
||||
AuthError,
|
||||
|
@ -11,12 +11,10 @@ import {
|
|||
import { issueCookie, resolveJwt } from '@/auth/jwt';
|
||||
import { AUTH_COOKIE_NAME, RESPONSE_ERROR_MESSAGES } from '@/constants';
|
||||
import { Request, Response } from 'express';
|
||||
import { ILogger } from 'n8n-workflow';
|
||||
import type { User } from '@db/entities/User';
|
||||
import { LoginRequest, UserRequest } from '@/requests';
|
||||
import type { PublicUser } from '@/Interfaces';
|
||||
import { Config } from '@/config';
|
||||
import { IInternalHooksClass } from '@/Interfaces';
|
||||
import config from '@/config';
|
||||
import { handleEmailLogin, handleLdapLogin } from '@/auth';
|
||||
import { PostHogClient } from '@/posthog';
|
||||
import {
|
||||
|
@ -28,15 +26,17 @@ import { InternalHooks } from '../InternalHooks';
|
|||
import { License } from '@/License';
|
||||
import { UserService } from '@/services/user.service';
|
||||
import { MfaService } from '@/Mfa/mfa.service';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
@RestController()
|
||||
export class AuthController {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly logger: ILogger,
|
||||
private readonly internalHooks: IInternalHooksClass,
|
||||
private readonly logger: Logger,
|
||||
private readonly internalHooks: InternalHooks,
|
||||
private readonly mfaService: MfaService,
|
||||
private readonly userService: UserService,
|
||||
private readonly license: License,
|
||||
private readonly postHog?: PostHogClient,
|
||||
) {}
|
||||
|
||||
|
@ -93,14 +93,14 @@ export class AuthController {
|
|||
}
|
||||
|
||||
await issueCookie(res, user);
|
||||
void Container.get(InternalHooks).onUserLoginSuccess({
|
||||
void this.internalHooks.onUserLoginSuccess({
|
||||
user,
|
||||
authenticationMethod: usedAuthenticationMethod,
|
||||
});
|
||||
|
||||
return this.userService.toPublic(user, { posthog: this.postHog });
|
||||
}
|
||||
void Container.get(InternalHooks).onUserLoginFailed({
|
||||
void this.internalHooks.onUserLoginFailed({
|
||||
user: email,
|
||||
authenticationMethod: usedAuthenticationMethod,
|
||||
reason: 'wrong credentials',
|
||||
|
@ -129,7 +129,7 @@ export class AuthController {
|
|||
}
|
||||
}
|
||||
|
||||
if (this.config.getEnv('userManagement.isInstanceOwnerSetUp')) {
|
||||
if (config.getEnv('userManagement.isInstanceOwnerSetUp')) {
|
||||
throw new AuthError('Not logged in');
|
||||
}
|
||||
|
||||
|
@ -155,7 +155,7 @@ export class AuthController {
|
|||
@Get('/resolve-signup-token')
|
||||
async resolveSignupToken(req: UserRequest.ResolveSignUp) {
|
||||
const { inviterId, inviteeId } = req.query;
|
||||
const isWithinUsersLimit = Container.get(License).isWithinUsersLimit();
|
||||
const isWithinUsersLimit = this.license.isWithinUsersLimit();
|
||||
|
||||
if (!isWithinUsersLimit) {
|
||||
this.logger.debug('Request to resolve signup token failed because of users quota reached', {
|
||||
|
|
|
@ -1,32 +1,35 @@
|
|||
import validator from 'validator';
|
||||
import { plainToInstance } from 'class-transformer';
|
||||
import { Response } from 'express';
|
||||
import { Service } from 'typedi';
|
||||
import { randomBytes } from 'crypto';
|
||||
import { Authorized, Delete, Get, Patch, Post, RestController } from '@/decorators';
|
||||
import { compareHash, hashPassword, validatePassword } from '@/UserManagement/UserManagementHelper';
|
||||
import { BadRequestError } from '@/ResponseHelper';
|
||||
import { validateEntity } from '@/GenericHelpers';
|
||||
import { issueCookie } from '@/auth/jwt';
|
||||
import type { User } from '@db/entities/User';
|
||||
import { Response } from 'express';
|
||||
import { ILogger } from 'n8n-workflow';
|
||||
import {
|
||||
AuthenticatedRequest,
|
||||
MeRequest,
|
||||
UserSettingsUpdatePayload,
|
||||
UserUpdatePayload,
|
||||
} from '@/requests';
|
||||
import { IExternalHooksClass, IInternalHooksClass } from '@/Interfaces';
|
||||
import type { PublicUser } from '@/Interfaces';
|
||||
import { randomBytes } from 'crypto';
|
||||
import { isSamlLicensedAndEnabled } from '../sso/saml/samlHelpers';
|
||||
import { UserService } from '@/services/user.service';
|
||||
import { Logger } from '@/Logger';
|
||||
import { ExternalHooks } from '@/ExternalHooks';
|
||||
import { InternalHooks } from '@/InternalHooks';
|
||||
|
||||
@Service()
|
||||
@Authorized()
|
||||
@RestController('/me')
|
||||
export class MeController {
|
||||
constructor(
|
||||
private readonly logger: ILogger,
|
||||
private readonly externalHooks: IExternalHooksClass,
|
||||
private readonly internalHooks: IInternalHooksClass,
|
||||
private readonly logger: Logger,
|
||||
private readonly externalHooks: ExternalHooks,
|
||||
private readonly internalHooks: InternalHooks,
|
||||
private readonly userService: UserService,
|
||||
) {}
|
||||
|
||||
|
|
|
@ -5,20 +5,20 @@ import { BadRequestError } from '@/ResponseHelper';
|
|||
import { hashPassword, validatePassword } from '@/UserManagement/UserManagementHelper';
|
||||
import { issueCookie } from '@/auth/jwt';
|
||||
import { Response } from 'express';
|
||||
import { ILogger } from 'n8n-workflow';
|
||||
import { Config } from '@/config';
|
||||
import { OwnerRequest } from '@/requests';
|
||||
import { IInternalHooksClass } from '@/Interfaces';
|
||||
import { SettingsRepository } from '@db/repositories';
|
||||
import { PostHogClient } from '@/posthog';
|
||||
import { UserService } from '@/services/user.service';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Authorized(['global', 'owner'])
|
||||
@RestController('/owner')
|
||||
export class OwnerController {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly logger: ILogger,
|
||||
private readonly logger: Logger,
|
||||
private readonly internalHooks: IInternalHooksClass,
|
||||
private readonly settingsRepository: SettingsRepository,
|
||||
private readonly userService: UserService,
|
||||
|
|
|
@ -16,7 +16,6 @@ import {
|
|||
import { UserManagementMailer } from '@/UserManagement/email';
|
||||
|
||||
import { Response } from 'express';
|
||||
import { ILogger } from 'n8n-workflow';
|
||||
import { PasswordResetRequest } from '@/requests';
|
||||
import { IExternalHooksClass, IInternalHooksClass } from '@/Interfaces';
|
||||
import { issueCookie } from '@/auth/jwt';
|
||||
|
@ -30,11 +29,12 @@ import { TokenExpiredError } from 'jsonwebtoken';
|
|||
import type { JwtPayload } from '@/services/jwt.service';
|
||||
import { JwtService } from '@/services/jwt.service';
|
||||
import { MfaService } from '@/Mfa/mfa.service';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@RestController()
|
||||
export class PasswordResetController {
|
||||
constructor(
|
||||
private readonly logger: ILogger,
|
||||
private readonly logger: Logger,
|
||||
private readonly externalHooks: IExternalHooksClass,
|
||||
private readonly internalHooks: IInternalHooksClass,
|
||||
private readonly mailer: UserManagementMailer,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import validator from 'validator';
|
||||
import type { FindManyOptions } from 'typeorm';
|
||||
import { In, Not } from 'typeorm';
|
||||
import { ILogger, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow';
|
||||
import { ErrorReporterProxy as ErrorReporter } from 'n8n-workflow';
|
||||
import { User } from '@db/entities/User';
|
||||
import { SharedCredentials } from '@db/entities/SharedCredentials';
|
||||
import { SharedWorkflow } from '@db/entities/SharedWorkflow';
|
||||
|
@ -38,13 +38,14 @@ import { JwtService } from '@/services/jwt.service';
|
|||
import { RoleService } from '@/services/role.service';
|
||||
import { UserService } from '@/services/user.service';
|
||||
import { listQueryMiddleware } from '@/middlewares';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Authorized(['global', 'owner'])
|
||||
@RestController('/users')
|
||||
export class UsersController {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly logger: ILogger,
|
||||
private readonly logger: Logger,
|
||||
private readonly externalHooks: IExternalHooksClass,
|
||||
private readonly internalHooks: IInternalHooksClass,
|
||||
private readonly sharedCredentialsRepository: SharedCredentialsRepository,
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { Service } from 'typedi';
|
||||
import { Response, NextFunction } from 'express';
|
||||
import { ILogger } from 'n8n-workflow';
|
||||
import { Get, Middleware, RestController } from '@/decorators';
|
||||
import type { WorkflowStatistics } from '@db/entities/WorkflowStatistics';
|
||||
import { StatisticsNames } from '@db/entities/WorkflowStatistics';
|
||||
|
@ -9,6 +8,7 @@ import { ExecutionRequest } from '@/requests';
|
|||
import { whereClause } from '@/UserManagement/UserManagementHelper';
|
||||
import { NotFoundError } from '@/ResponseHelper';
|
||||
import type { IWorkflowStatisticsDataLoaded } from '@/Interfaces';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
interface WorkflowStatisticsData<T> {
|
||||
productionSuccess: T;
|
||||
|
@ -23,7 +23,7 @@ export class WorkflowStatisticsController {
|
|||
constructor(
|
||||
private sharedWorkflowRepository: SharedWorkflowRepository,
|
||||
private workflowStatisticsRepository: WorkflowStatisticsRepository,
|
||||
private readonly logger: ILogger,
|
||||
private readonly logger: Logger,
|
||||
) {}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
import express from 'express';
|
||||
import type { INodeCredentialTestResult } from 'n8n-workflow';
|
||||
import { deepCopy, LoggerProxy } from 'n8n-workflow';
|
||||
import { deepCopy } from 'n8n-workflow';
|
||||
|
||||
import * as GenericHelpers from '@/GenericHelpers';
|
||||
import * as ResponseHelper from '@/ResponseHelper';
|
||||
import config from '@/config';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { EECredentialsController } from './credentials.controller.ee';
|
||||
import { CredentialsService } from './credentials.service';
|
||||
|
||||
|
@ -14,21 +13,9 @@ import type { CredentialRequest, ListQuery } from '@/requests';
|
|||
import { Container } from 'typedi';
|
||||
import { InternalHooks } from '@/InternalHooks';
|
||||
import { listQueryMiddleware } from '@/middlewares';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export const credentialsController = express.Router();
|
||||
|
||||
/**
|
||||
* Initialize Logger if needed
|
||||
*/
|
||||
credentialsController.use((req, res, next) => {
|
||||
try {
|
||||
LoggerProxy.getInstance();
|
||||
} catch (error) {
|
||||
LoggerProxy.init(getLogger());
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
credentialsController.use('/', EECredentialsController);
|
||||
|
||||
/**
|
||||
|
@ -151,10 +138,13 @@ credentialsController.patch(
|
|||
const sharing = await CredentialsService.getSharing(req.user, credentialId);
|
||||
|
||||
if (!sharing) {
|
||||
LoggerProxy.info('Attempt to update credential blocked due to lack of permissions', {
|
||||
credentialId,
|
||||
userId: req.user.id,
|
||||
});
|
||||
Container.get(Logger).info(
|
||||
'Attempt to update credential blocked due to lack of permissions',
|
||||
{
|
||||
credentialId,
|
||||
userId: req.user.id,
|
||||
},
|
||||
);
|
||||
throw new ResponseHelper.NotFoundError(
|
||||
'Credential to be updated not found. You can only update credentials owned by you',
|
||||
);
|
||||
|
@ -183,7 +173,7 @@ credentialsController.patch(
|
|||
// Remove the encrypted data as it is not needed in the frontend
|
||||
const { data: _, ...rest } = responseData;
|
||||
|
||||
LoggerProxy.verbose('Credential updated', { credentialId });
|
||||
Container.get(Logger).verbose('Credential updated', { credentialId });
|
||||
|
||||
return { ...rest };
|
||||
}),
|
||||
|
@ -200,10 +190,13 @@ credentialsController.delete(
|
|||
const sharing = await CredentialsService.getSharing(req.user, credentialId);
|
||||
|
||||
if (!sharing) {
|
||||
LoggerProxy.info('Attempt to delete credential blocked due to lack of permissions', {
|
||||
credentialId,
|
||||
userId: req.user.id,
|
||||
});
|
||||
Container.get(Logger).info(
|
||||
'Attempt to delete credential blocked due to lack of permissions',
|
||||
{
|
||||
credentialId,
|
||||
userId: req.user.id,
|
||||
},
|
||||
);
|
||||
throw new ResponseHelper.NotFoundError(
|
||||
'Credential to be deleted not found. You can only removed credentials owned by you',
|
||||
);
|
||||
|
|
|
@ -6,7 +6,7 @@ import type {
|
|||
INodeCredentialTestResult,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
import { CREDENTIAL_EMPTY_VALUE, deepCopy, LoggerProxy, NodeHelpers } from 'n8n-workflow';
|
||||
import { CREDENTIAL_EMPTY_VALUE, deepCopy, NodeHelpers } from 'n8n-workflow';
|
||||
import { Container } from 'typedi';
|
||||
import type { FindManyOptions, FindOptionsWhere } from 'typeorm';
|
||||
import { In, Like } from 'typeorm';
|
||||
|
@ -24,6 +24,7 @@ import type { CredentialRequest, ListQuery } from '@/requests';
|
|||
import { CredentialTypes } from '@/CredentialTypes';
|
||||
import { RoleService } from '@/services/role.service';
|
||||
import { OwnershipService } from '@/services/ownership.service';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export class CredentialsService {
|
||||
static async get(
|
||||
|
@ -270,7 +271,7 @@ export class CredentialsService {
|
|||
|
||||
return savedCredential;
|
||||
});
|
||||
LoggerProxy.verbose('New credential created', {
|
||||
Container.get(Logger).verbose('New credential created', {
|
||||
credentialId: newCredential.id,
|
||||
ownerId: user.id,
|
||||
});
|
||||
|
|
|
@ -11,7 +11,7 @@ import split from 'lodash/split';
|
|||
import unset from 'lodash/unset';
|
||||
import { Credentials } from 'n8n-core';
|
||||
import type { WorkflowExecuteMode, INodeCredentialsDetails } from 'n8n-workflow';
|
||||
import { LoggerProxy, jsonStringify } from 'n8n-workflow';
|
||||
import { jsonStringify } from 'n8n-workflow';
|
||||
import { resolve as pathResolve } from 'path';
|
||||
|
||||
import * as Db from '@/Db';
|
||||
|
@ -23,7 +23,6 @@ import {
|
|||
getCredentialForUser,
|
||||
getCredentialWithoutUser,
|
||||
} from '@/CredentialsHelper';
|
||||
import { getLogger } from '@/Logger';
|
||||
import type { OAuthRequest } from '@/requests';
|
||||
import { ExternalHooks } from '@/ExternalHooks';
|
||||
import config from '@/config';
|
||||
|
@ -31,21 +30,10 @@ import { getInstanceBaseUrl } from '@/UserManagement/UserManagementHelper';
|
|||
import { Container } from 'typedi';
|
||||
|
||||
import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export const oauth2CredentialController = express.Router();
|
||||
|
||||
/**
|
||||
* Initialize Logger if needed
|
||||
*/
|
||||
oauth2CredentialController.use((req, res, next) => {
|
||||
try {
|
||||
LoggerProxy.getInstance();
|
||||
} catch (error) {
|
||||
LoggerProxy.init(getLogger());
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
const restEndpoint = config.getEnv('endpoints.rest');
|
||||
|
||||
/**
|
||||
|
@ -65,7 +53,7 @@ oauth2CredentialController.get(
|
|||
const credential = await getCredentialForUser(credentialId, req.user);
|
||||
|
||||
if (!credential) {
|
||||
LoggerProxy.error('Failed to authorize OAuth2 due to lack of permissions', {
|
||||
Container.get(Logger).error('Failed to authorize OAuth2 due to lack of permissions', {
|
||||
userId: req.user.id,
|
||||
credentialId,
|
||||
});
|
||||
|
@ -164,7 +152,7 @@ oauth2CredentialController.get(
|
|||
// Update the credentials in DB
|
||||
await Db.collections.Credentials.update(req.query.id, newCredentialsData);
|
||||
|
||||
LoggerProxy.verbose('OAuth2 authorization url created for credential', {
|
||||
Container.get(Logger).verbose('OAuth2 authorization url created for credential', {
|
||||
userId: req.user.id,
|
||||
credentialId,
|
||||
});
|
||||
|
@ -210,7 +198,7 @@ oauth2CredentialController.get(
|
|||
|
||||
if (!credential) {
|
||||
const errorMessage = 'OAuth2 callback failed because of insufficient permissions';
|
||||
LoggerProxy.error(errorMessage, {
|
||||
Container.get(Logger).error(errorMessage, {
|
||||
userId: req.user?.id,
|
||||
credentialId: state.cid,
|
||||
});
|
||||
|
@ -244,7 +232,7 @@ oauth2CredentialController.get(
|
|||
!token.verify(decryptedDataOriginal.csrfSecret as string, state.token)
|
||||
) {
|
||||
const errorMessage = 'The OAuth2 callback state is invalid!';
|
||||
LoggerProxy.debug(errorMessage, {
|
||||
Container.get(Logger).debug(errorMessage, {
|
||||
userId: req.user?.id,
|
||||
credentialId: state.cid,
|
||||
});
|
||||
|
@ -298,7 +286,7 @@ oauth2CredentialController.get(
|
|||
|
||||
if (oauthToken === undefined) {
|
||||
const errorMessage = 'Unable to get OAuth2 access tokens!';
|
||||
LoggerProxy.error(errorMessage, {
|
||||
Container.get(Logger).error(errorMessage, {
|
||||
userId: req.user?.id,
|
||||
credentialId: state.cid,
|
||||
});
|
||||
|
@ -327,7 +315,7 @@ oauth2CredentialController.get(
|
|||
newCredentialsData.updatedAt = new Date();
|
||||
// Save the credentials in DB
|
||||
await Db.collections.Credentials.update(state.cid, newCredentialsData);
|
||||
LoggerProxy.verbose('OAuth2 callback successful for new credential', {
|
||||
Container.get(Logger).verbose('OAuth2 callback successful for new credential', {
|
||||
userId: req.user?.id,
|
||||
credentialId: state.cid,
|
||||
});
|
||||
|
|
|
@ -17,7 +17,6 @@ import type {
|
|||
SelectQueryBuilder,
|
||||
} from 'typeorm';
|
||||
import { parse, stringify } from 'flatted';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
import type { IExecutionsSummary, IRunExecutionData } from 'n8n-workflow';
|
||||
import { BinaryDataService } from 'n8n-core';
|
||||
import type {
|
||||
|
@ -35,6 +34,7 @@ import { ExecutionEntity } from '../entities/ExecutionEntity';
|
|||
import { ExecutionMetadata } from '../entities/ExecutionMetadata';
|
||||
import { ExecutionDataRepository } from './executionData.repository';
|
||||
import { TIME, inTest } from '@/constants';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
function parseFiltersToQueryBuilder(
|
||||
qb: SelectQueryBuilder<ExecutionEntity>,
|
||||
|
@ -77,8 +77,6 @@ function parseFiltersToQueryBuilder(
|
|||
|
||||
@Service()
|
||||
export class ExecutionRepository extends Repository<ExecutionEntity> {
|
||||
private logger = Logger;
|
||||
|
||||
private hardDeletionBatchSize = 100;
|
||||
|
||||
private rates: Record<string, number> = {
|
||||
|
@ -96,6 +94,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
|
|||
|
||||
constructor(
|
||||
dataSource: DataSource,
|
||||
private readonly logger: Logger,
|
||||
private readonly executionDataRepository: ExecutionDataRepository,
|
||||
private readonly binaryDataService: BinaryDataService,
|
||||
) {
|
||||
|
@ -360,7 +359,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
|
|||
}
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
Logger.warn(`Failed to get executions count from Postgres: ${error.message}`, {
|
||||
this.logger.warn(`Failed to get executions count from Postgres: ${error.message}`, {
|
||||
error,
|
||||
});
|
||||
}
|
||||
|
@ -461,7 +460,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
|
|||
|
||||
if (!executions.length) {
|
||||
if (deleteConditions.ids) {
|
||||
Logger.error('Failed to delete an execution due to insufficient permissions', {
|
||||
this.logger.error('Failed to delete an execution due to insufficient permissions', {
|
||||
executionIds: deleteConditions.ids,
|
||||
});
|
||||
}
|
||||
|
@ -480,7 +479,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
|
|||
* Mark executions as deleted based on age and count, in a pruning cycle.
|
||||
*/
|
||||
async softDeleteOnPruningCycle() {
|
||||
Logger.debug('Starting soft-deletion of executions (pruning cycle)');
|
||||
this.logger.debug('Starting soft-deletion of executions (pruning cycle)');
|
||||
|
||||
const maxAge = config.getEnv('executions.pruneDataMaxAge'); // in h
|
||||
const maxCount = config.getEnv('executions.pruneDataMaxCount');
|
||||
|
@ -527,7 +526,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
|
|||
.execute();
|
||||
|
||||
if (result.affected === 0) {
|
||||
Logger.debug('Found no executions to soft-delete (pruning cycle)');
|
||||
this.logger.debug('Found no executions to soft-delete (pruning cycle)');
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,10 +8,8 @@ import config from '@/config';
|
|||
import { inTest } from '@/constants';
|
||||
import type { BaseMigration, Migration, MigrationContext, MigrationFn } from '@db/types';
|
||||
import { createSchemaBuilder } from '@db/dsl';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { NodeTypes } from '@/NodeTypes';
|
||||
|
||||
const logger = getLogger();
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json';
|
||||
|
||||
|
@ -48,6 +46,7 @@ let runningMigrations = false;
|
|||
function logMigrationStart(migrationName: string): void {
|
||||
if (inTest) return;
|
||||
|
||||
const logger = Container.get(Logger);
|
||||
if (!runningMigrations) {
|
||||
logger.warn('Migrations in progress, please do NOT stop the process.');
|
||||
runningMigrations = true;
|
||||
|
@ -59,6 +58,7 @@ function logMigrationStart(migrationName: string): void {
|
|||
function logMigrationEnd(migrationName: string): void {
|
||||
if (inTest) return;
|
||||
|
||||
const logger = Container.get(Logger);
|
||||
logger.debug(`Finished migration ${migrationName}`);
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ const dbName = config.getEnv(`database.${dbType === 'mariadb' ? 'mysqldb' : dbTy
|
|||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
const createContext = (queryRunner: QueryRunner, migration: Migration): MigrationContext => ({
|
||||
logger,
|
||||
logger: Container.get(Logger),
|
||||
tablePrefix,
|
||||
dbType,
|
||||
isMysql,
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
import { Container, Service } from 'typedi';
|
||||
import type { PullResult } from 'simple-git';
|
||||
import express from 'express';
|
||||
import { Authorized, Get, Post, Patch, RestController } from '@/decorators';
|
||||
import {
|
||||
sourceControlLicensedMiddleware,
|
||||
|
@ -10,10 +13,7 @@ import type { SourceControlPreferences } from './types/sourceControlPreferences'
|
|||
import type { SourceControlledFile } from './types/sourceControlledFile';
|
||||
import { SOURCE_CONTROL_API_ROOT, SOURCE_CONTROL_DEFAULT_BRANCH } from './constants';
|
||||
import { BadRequestError } from '@/ResponseHelper';
|
||||
import type { PullResult } from 'simple-git';
|
||||
import express from 'express';
|
||||
import type { ImportResult } from './types/importResult';
|
||||
import Container, { Service } from 'typedi';
|
||||
import { InternalHooks } from '../../InternalHooks';
|
||||
import { getRepoType } from './sourceControlHelper.ee';
|
||||
import { SourceControlGetStatus } from './types/sourceControlGetStatus';
|
||||
|
|
|
@ -14,7 +14,6 @@ import {
|
|||
SOURCE_CONTROL_DEFAULT_NAME,
|
||||
SOURCE_CONTROL_README,
|
||||
} from './constants';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { SourceControlGitService } from './sourceControlGit.service.ee';
|
||||
import type { PushResult } from 'simple-git';
|
||||
import { SourceControlExportService } from './sourceControlExport.service.ee';
|
||||
|
@ -35,6 +34,7 @@ import type { SourceControlWorkflowVersionId } from './types/sourceControlWorkfl
|
|||
import type { ExportableCredential } from './types/exportableCredential';
|
||||
import { InternalHooks } from '@/InternalHooks';
|
||||
import { TagRepository } from '@/databases/repositories';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class SourceControlService {
|
||||
|
@ -45,6 +45,7 @@ export class SourceControlService {
|
|||
private gitFolder: string;
|
||||
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private gitService: SourceControlGitService,
|
||||
private sourceControlPreferencesService: SourceControlPreferencesService,
|
||||
private sourceControlExportService: SourceControlExportService,
|
||||
|
@ -123,14 +124,14 @@ export class SourceControlService {
|
|||
if (!this.gitService.git) {
|
||||
await this.initGitService();
|
||||
}
|
||||
LoggerProxy.debug('Initializing repository...');
|
||||
this.logger.debug('Initializing repository...');
|
||||
await this.gitService.initRepository(preferences, user);
|
||||
let getBranchesResult;
|
||||
try {
|
||||
getBranchesResult = await this.getBranches();
|
||||
} catch (error) {
|
||||
if ((error as Error).message.includes('Warning: Permanently added')) {
|
||||
LoggerProxy.debug('Added repository host to the list of known hosts. Retrying...');
|
||||
this.logger.debug('Added repository host to the list of known hosts. Retrying...');
|
||||
getBranchesResult = await this.getBranches();
|
||||
} else {
|
||||
throw error;
|
||||
|
@ -152,7 +153,7 @@ export class SourceControlService {
|
|||
getBranchesResult = await this.getBranches();
|
||||
await this.gitService.setBranch(preferences.branchName);
|
||||
} catch (fileError) {
|
||||
LoggerProxy.error(`Failed to create initial commit: ${(fileError as Error).message}`);
|
||||
this.logger.error(`Failed to create initial commit: ${(fileError as Error).message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -193,7 +194,7 @@ export class SourceControlService {
|
|||
await this.gitService.resetBranch();
|
||||
await this.gitService.pull();
|
||||
} catch (error) {
|
||||
LoggerProxy.error(`Failed to reset workfolder: ${(error as Error).message}`);
|
||||
this.logger.error(`Failed to reset workfolder: ${(error as Error).message}`);
|
||||
throw new Error(
|
||||
'Unable to fetch updates from git - your folder might be out of sync. Try reconnecting from the Source Control settings page.',
|
||||
);
|
||||
|
|
|
@ -8,7 +8,6 @@ import {
|
|||
} from './constants';
|
||||
import * as Db from '@/Db';
|
||||
import type { ICredentialDataDecryptedObject } from 'n8n-workflow';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { writeFile as fsWriteFile, rm as fsRm } from 'fs/promises';
|
||||
import { rmSync } from 'fs';
|
||||
import { Credentials, InstanceSettings } from 'n8n-core';
|
||||
|
@ -27,6 +26,7 @@ import { In } from 'typeorm';
|
|||
import type { SourceControlledFile } from './types/sourceControlledFile';
|
||||
import { VariablesService } from '../variables/variables.service';
|
||||
import { TagRepository } from '@/databases/repositories';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class SourceControlExportService {
|
||||
|
@ -37,6 +37,7 @@ export class SourceControlExportService {
|
|||
private credentialExportFolder: string;
|
||||
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly variablesService: VariablesService,
|
||||
private readonly tagRepository: TagRepository,
|
||||
instanceSettings: InstanceSettings,
|
||||
|
@ -61,7 +62,7 @@ export class SourceControlExportService {
|
|||
try {
|
||||
await fsRm(this.gitFolder, { recursive: true });
|
||||
} catch (error) {
|
||||
LoggerProxy.error(`Failed to delete work folder: ${(error as Error).message}`);
|
||||
this.logger.error(`Failed to delete work folder: ${(error as Error).message}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -69,7 +70,7 @@ export class SourceControlExportService {
|
|||
try {
|
||||
filesToBeDeleted.forEach((e) => rmSync(e));
|
||||
} catch (error) {
|
||||
LoggerProxy.error(`Failed to delete workflows from work folder: ${(error as Error).message}`);
|
||||
this.logger.error(`Failed to delete workflows from work folder: ${(error as Error).message}`);
|
||||
}
|
||||
return filesToBeDeleted;
|
||||
}
|
||||
|
@ -91,7 +92,7 @@ export class SourceControlExportService {
|
|||
versionId: e.versionId,
|
||||
owner: owners[e.id],
|
||||
};
|
||||
LoggerProxy.debug(`Writing workflow ${e.id} to ${fileName}`);
|
||||
this.logger.debug(`Writing workflow ${e.id} to ${fileName}`);
|
||||
return fsWriteFile(fileName, JSON.stringify(sanitizedWorkflow, null, 2));
|
||||
}),
|
||||
);
|
||||
|
@ -224,7 +225,7 @@ export class SourceControlExportService {
|
|||
continue;
|
||||
}
|
||||
} catch (error) {
|
||||
LoggerProxy.error(`Failed to sanitize credential data: ${(error as Error).message}`);
|
||||
this.logger.error(`Failed to sanitize credential data: ${(error as Error).message}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
@ -262,7 +263,7 @@ export class SourceControlExportService {
|
|||
data: sanitizedData,
|
||||
nodesAccess: sharedCredential.credentials.nodesAccess,
|
||||
};
|
||||
LoggerProxy.debug(`Writing credential ${sharedCredential.credentials.id} to ${fileName}`);
|
||||
this.logger.debug(`Writing credential ${sharedCredential.credentials.id} to ${fileName}`);
|
||||
return fsWriteFile(fileName, JSON.stringify(sanitizedCredential, null, 2));
|
||||
}),
|
||||
);
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { Service } from 'typedi';
|
||||
import { execSync } from 'child_process';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import path from 'path';
|
||||
import type {
|
||||
CommitResult,
|
||||
|
@ -20,8 +19,9 @@ import {
|
|||
SOURCE_CONTROL_ORIGIN,
|
||||
} from './constants';
|
||||
import { sourceControlFoldersExistCheck } from './sourceControlHelper.ee';
|
||||
import type { User } from '../../databases/entities/User';
|
||||
import type { User } from '@db/entities/User';
|
||||
import { getInstanceOwner } from '../../UserManagement/UserManagementHelper';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class SourceControlGitService {
|
||||
|
@ -29,17 +29,19 @@ export class SourceControlGitService {
|
|||
|
||||
private gitOptions: Partial<SimpleGitOptions> = {};
|
||||
|
||||
constructor(private readonly logger: Logger) {}
|
||||
|
||||
/**
|
||||
* Run pre-checks before initialising git
|
||||
* Checks for existence of required binaries (git and ssh)
|
||||
*/
|
||||
private preInitCheck(): boolean {
|
||||
LoggerProxy.debug('GitService.preCheck');
|
||||
this.logger.debug('GitService.preCheck');
|
||||
try {
|
||||
const gitResult = execSync('git --version', {
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
});
|
||||
LoggerProxy.debug(`Git binary found: ${gitResult.toString()}`);
|
||||
this.logger.debug(`Git binary found: ${gitResult.toString()}`);
|
||||
} catch (error) {
|
||||
throw new Error(`Git binary not found: ${(error as Error).message}`);
|
||||
}
|
||||
|
@ -47,7 +49,7 @@ export class SourceControlGitService {
|
|||
const sshResult = execSync('ssh -V', {
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
});
|
||||
LoggerProxy.debug(`SSH binary found: ${sshResult.toString()}`);
|
||||
this.logger.debug(`SSH binary found: ${sshResult.toString()}`);
|
||||
} catch (error) {
|
||||
throw new Error(`SSH binary not found: ${(error as Error).message}`);
|
||||
}
|
||||
|
@ -66,13 +68,13 @@ export class SourceControlGitService {
|
|||
sshKeyName,
|
||||
sshFolder,
|
||||
} = options;
|
||||
LoggerProxy.debug('GitService.init');
|
||||
this.logger.debug('GitService.init');
|
||||
if (this.git !== null) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.preInitCheck();
|
||||
LoggerProxy.debug('Git pre-check passed');
|
||||
this.logger.debug('Git pre-check passed');
|
||||
|
||||
sourceControlFoldersExistCheck([gitFolder, sshFolder]);
|
||||
|
||||
|
@ -135,13 +137,13 @@ export class SourceControlGitService {
|
|||
(e) => e.name === SOURCE_CONTROL_ORIGIN && e.refs.push === remote,
|
||||
);
|
||||
if (foundRemote) {
|
||||
LoggerProxy.debug(`Git remote found: ${foundRemote.name}: ${foundRemote.refs.push}`);
|
||||
this.logger.debug(`Git remote found: ${foundRemote.name}: ${foundRemote.refs.push}`);
|
||||
return true;
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Git is not initialized ${(error as Error).message}`);
|
||||
}
|
||||
LoggerProxy.debug(`Git remote not found: ${remote}`);
|
||||
this.logger.debug(`Git remote not found: ${remote}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -159,14 +161,14 @@ export class SourceControlGitService {
|
|||
try {
|
||||
await this.git.init();
|
||||
} catch (error) {
|
||||
LoggerProxy.debug(`Git init: ${(error as Error).message}`);
|
||||
this.logger.debug(`Git init: ${(error as Error).message}`);
|
||||
}
|
||||
}
|
||||
try {
|
||||
await this.git.addRemote(SOURCE_CONTROL_ORIGIN, sourceControlPreferences.repositoryUrl);
|
||||
} catch (error) {
|
||||
if ((error as Error).message.includes('remote origin already exists')) {
|
||||
LoggerProxy.debug(`Git remote already exists: ${(error as Error).message}`);
|
||||
this.logger.debug(`Git remote already exists: ${(error as Error).message}`);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
|
@ -182,7 +184,7 @@ export class SourceControlGitService {
|
|||
await this.git.raw(['branch', '-M', sourceControlPreferences.branchName]);
|
||||
}
|
||||
} catch (error) {
|
||||
LoggerProxy.debug(`Git init: ${(error as Error).message}`);
|
||||
this.logger.debug(`Git init: ${(error as Error).message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -305,7 +307,7 @@ export class SourceControlGitService {
|
|||
try {
|
||||
await this.git.rm(Array.from(deletedFiles));
|
||||
} catch (error) {
|
||||
LoggerProxy.debug(`Git rm: ${(error as Error).message}`);
|
||||
this.logger.debug(`Git rm: ${(error as Error).message}`);
|
||||
}
|
||||
}
|
||||
return this.git.add(Array.from(files));
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import Container from 'typedi';
|
||||
import { Container } from 'typedi';
|
||||
import { License } from '@/License';
|
||||
import { generateKeyPairSync } from 'crypto';
|
||||
import type { KeyPair } from './types/keyPair';
|
||||
import { constants as fsConstants, mkdirSync, accessSync } from 'fs';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import {
|
||||
SOURCE_CONTROL_GIT_KEY_COMMENT,
|
||||
SOURCE_CONTROL_TAGS_EXPORT_FILE,
|
||||
|
@ -12,6 +11,7 @@ import {
|
|||
import type { SourceControlledFile } from './types/sourceControlledFile';
|
||||
import path from 'path';
|
||||
import type { KeyPairType } from './types/keyPairType';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export function stringContainsExpression(testString: string): boolean {
|
||||
return /^=.*\{\{.*\}\}/.test(testString);
|
||||
|
@ -51,7 +51,7 @@ export function sourceControlFoldersExistCheck(
|
|||
try {
|
||||
mkdirSync(folder, { recursive: true });
|
||||
} catch (error) {
|
||||
LoggerProxy.error((error as Error).message);
|
||||
Container.get(Logger).error((error as Error).message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import Container, { Service } from 'typedi';
|
||||
import { Container, Service } from 'typedi';
|
||||
import path from 'path';
|
||||
import {
|
||||
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,
|
||||
|
@ -9,7 +9,7 @@ import {
|
|||
} from './constants';
|
||||
import * as Db from '@/Db';
|
||||
import glob from 'fast-glob';
|
||||
import { LoggerProxy, jsonParse } from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import { readFile as fsReadFile } from 'fs/promises';
|
||||
import { Credentials, InstanceSettings } from 'n8n-core';
|
||||
import type { IWorkflowToImport } from '@/Interfaces';
|
||||
|
@ -28,6 +28,7 @@ import { RoleService } from '@/services/role.service';
|
|||
import { VariablesService } from '../variables/variables.service';
|
||||
import { TagRepository } from '@/databases/repositories';
|
||||
import { UM_FIX_INSTRUCTION } from '@/constants';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class SourceControlImportService {
|
||||
|
@ -38,6 +39,7 @@ export class SourceControlImportService {
|
|||
private credentialExportFolder: string;
|
||||
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly variablesService: VariablesService,
|
||||
private readonly activeWorkflowRunner: ActiveWorkflowRunner,
|
||||
private readonly tagRepository: TagRepository,
|
||||
|
@ -88,7 +90,7 @@ export class SourceControlImportService {
|
|||
});
|
||||
const remoteWorkflowFilesParsed = await Promise.all(
|
||||
remoteWorkflowFiles.map(async (file) => {
|
||||
LoggerProxy.debug(`Parsing workflow file ${file}`);
|
||||
this.logger.debug(`Parsing workflow file ${file}`);
|
||||
const remote = jsonParse<IWorkflowToImport>(await fsReadFile(file, { encoding: 'utf8' }));
|
||||
if (!remote?.id) {
|
||||
return undefined;
|
||||
|
@ -130,7 +132,7 @@ export class SourceControlImportService {
|
|||
});
|
||||
const remoteCredentialFilesParsed = await Promise.all(
|
||||
remoteCredentialFiles.map(async (file) => {
|
||||
LoggerProxy.debug(`Parsing credential file ${file}`);
|
||||
this.logger.debug(`Parsing credential file ${file}`);
|
||||
const remote = jsonParse<ExportableCredential>(
|
||||
await fsReadFile(file, { encoding: 'utf8' }),
|
||||
);
|
||||
|
@ -169,7 +171,7 @@ export class SourceControlImportService {
|
|||
absolute: true,
|
||||
});
|
||||
if (variablesFile.length > 0) {
|
||||
LoggerProxy.debug(`Importing variables from file ${variablesFile[0]}`);
|
||||
this.logger.debug(`Importing variables from file ${variablesFile[0]}`);
|
||||
return jsonParse<Variables[]>(await fsReadFile(variablesFile[0], { encoding: 'utf8' }), {
|
||||
fallbackValue: [],
|
||||
});
|
||||
|
@ -190,7 +192,7 @@ export class SourceControlImportService {
|
|||
absolute: true,
|
||||
});
|
||||
if (tagsFile.length > 0) {
|
||||
LoggerProxy.debug(`Importing tags from file ${tagsFile[0]}`);
|
||||
this.logger.debug(`Importing tags from file ${tagsFile[0]}`);
|
||||
const mappedTags = jsonParse<{ tags: TagEntity[]; mappings: WorkflowTagMapping[] }>(
|
||||
await fsReadFile(tagsFile[0], { encoding: 'utf8' }),
|
||||
{ fallbackValue: { tags: [], mappings: [] } },
|
||||
|
@ -232,7 +234,7 @@ export class SourceControlImportService {
|
|||
const cachedOwnerIds = new Map<string, string>();
|
||||
const importWorkflowsResult = await Promise.all(
|
||||
candidates.map(async (candidate) => {
|
||||
LoggerProxy.debug(`Parsing workflow file ${candidate.file}`);
|
||||
this.logger.debug(`Parsing workflow file ${candidate.file}`);
|
||||
const importedWorkflow = jsonParse<IWorkflowToImport & { owner: string }>(
|
||||
await fsReadFile(candidate.file, { encoding: 'utf8' }),
|
||||
);
|
||||
|
@ -241,7 +243,7 @@ export class SourceControlImportService {
|
|||
}
|
||||
const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id);
|
||||
importedWorkflow.active = existingWorkflow?.active ?? false;
|
||||
LoggerProxy.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`);
|
||||
this.logger.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`);
|
||||
const upsertResult = await Db.collections.Workflow.upsert({ ...importedWorkflow }, ['id']);
|
||||
if (upsertResult?.identifiers?.length !== 1) {
|
||||
throw new Error(`Failed to upsert workflow ${importedWorkflow.id ?? 'new'}`);
|
||||
|
@ -299,14 +301,14 @@ export class SourceControlImportService {
|
|||
if (existingWorkflow?.active) {
|
||||
try {
|
||||
// remove active pre-import workflow
|
||||
LoggerProxy.debug(`Deactivating workflow id ${existingWorkflow.id}`);
|
||||
this.logger.debug(`Deactivating workflow id ${existingWorkflow.id}`);
|
||||
await workflowRunner.remove(existingWorkflow.id);
|
||||
// try activating the imported workflow
|
||||
LoggerProxy.debug(`Reactivating workflow id ${existingWorkflow.id}`);
|
||||
this.logger.debug(`Reactivating workflow id ${existingWorkflow.id}`);
|
||||
await workflowRunner.add(existingWorkflow.id, 'activate');
|
||||
// update the versionId of the workflow to match the imported workflow
|
||||
} catch (error) {
|
||||
LoggerProxy.error(`Failed to activate workflow ${existingWorkflow.id}`, error as Error);
|
||||
this.logger.error(`Failed to activate workflow ${existingWorkflow.id}`, error as Error);
|
||||
} finally {
|
||||
await Db.collections.Workflow.update(
|
||||
{ id: existingWorkflow.id },
|
||||
|
@ -347,7 +349,7 @@ export class SourceControlImportService {
|
|||
let importCredentialsResult: Array<{ id: string; name: string; type: string }> = [];
|
||||
importCredentialsResult = await Promise.all(
|
||||
candidates.map(async (candidate) => {
|
||||
LoggerProxy.debug(`Importing credentials file ${candidate.file}`);
|
||||
this.logger.debug(`Importing credentials file ${candidate.file}`);
|
||||
const credential = jsonParse<ExportableCredential>(
|
||||
await fsReadFile(candidate.file, { encoding: 'utf8' }),
|
||||
);
|
||||
|
@ -367,7 +369,7 @@ export class SourceControlImportService {
|
|||
}
|
||||
newCredentialObject.nodesAccess = nodesAccess || existingCredential?.nodesAccess || [];
|
||||
|
||||
LoggerProxy.debug(`Updating credential id ${newCredentialObject.id as string}`);
|
||||
this.logger.debug(`Updating credential id ${newCredentialObject.id as string}`);
|
||||
await Db.collections.Credentials.upsert(newCredentialObject, ['id']);
|
||||
|
||||
if (!sharedOwner) {
|
||||
|
@ -395,13 +397,13 @@ export class SourceControlImportService {
|
|||
public async importTagsFromWorkFolder(candidate: SourceControlledFile) {
|
||||
let mappedTags;
|
||||
try {
|
||||
LoggerProxy.debug(`Importing tags from file ${candidate.file}`);
|
||||
this.logger.debug(`Importing tags from file ${candidate.file}`);
|
||||
mappedTags = jsonParse<{ tags: TagEntity[]; mappings: WorkflowTagMapping[] }>(
|
||||
await fsReadFile(candidate.file, { encoding: 'utf8' }),
|
||||
{ fallbackValue: { tags: [], mappings: [] } },
|
||||
);
|
||||
} catch (error) {
|
||||
LoggerProxy.error(`Failed to import tags from file ${candidate.file}`, error as Error);
|
||||
this.logger.error(`Failed to import tags from file ${candidate.file}`, error as Error);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -462,13 +464,13 @@ export class SourceControlImportService {
|
|||
const result: { imported: string[] } = { imported: [] };
|
||||
let importedVariables;
|
||||
try {
|
||||
LoggerProxy.debug(`Importing variables from file ${candidate.file}`);
|
||||
this.logger.debug(`Importing variables from file ${candidate.file}`);
|
||||
importedVariables = jsonParse<Array<Partial<Variables>>>(
|
||||
await fsReadFile(candidate.file, { encoding: 'utf8' }),
|
||||
{ fallbackValue: [] },
|
||||
);
|
||||
} catch (error) {
|
||||
LoggerProxy.error(`Failed to import tags from file ${candidate.file}`, error as Error);
|
||||
this.logger.error(`Failed to import tags from file ${candidate.file}`, error as Error);
|
||||
return;
|
||||
}
|
||||
const overriddenKeys = Object.keys(valueOverrides ?? {});
|
||||
|
@ -490,12 +492,12 @@ export class SourceControlImportService {
|
|||
await Db.collections.Variables.upsert({ ...variable }, ['id']);
|
||||
} catch (errorUpsert) {
|
||||
if (isUniqueConstraintError(errorUpsert as Error)) {
|
||||
LoggerProxy.debug(`Variable ${variable.key} already exists, updating instead`);
|
||||
this.logger.debug(`Variable ${variable.key} already exists, updating instead`);
|
||||
try {
|
||||
await Db.collections.Variables.update({ key: variable.key }, { ...variable });
|
||||
} catch (errorUpdate) {
|
||||
LoggerProxy.debug(`Failed to update variable ${variable.key}, skipping`);
|
||||
LoggerProxy.debug((errorUpdate as Error).message);
|
||||
this.logger.debug(`Failed to update variable ${variable.key}, skipping`);
|
||||
this.logger.debug((errorUpdate as Error).message);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
|
|
|
@ -10,7 +10,7 @@ import {
|
|||
sourceControlFoldersExistCheck,
|
||||
} from './sourceControlHelper.ee';
|
||||
import { InstanceSettings } from 'n8n-core';
|
||||
import { LoggerProxy, jsonParse } from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import * as Db from '@/Db';
|
||||
import {
|
||||
SOURCE_CONTROL_SSH_FOLDER,
|
||||
|
@ -21,6 +21,7 @@ import {
|
|||
import path from 'path';
|
||||
import type { KeyPairType } from './types/keyPairType';
|
||||
import config from '@/config';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class SourceControlPreferencesService {
|
||||
|
@ -32,7 +33,10 @@ export class SourceControlPreferencesService {
|
|||
|
||||
readonly gitFolder: string;
|
||||
|
||||
constructor(instanceSettings: InstanceSettings) {
|
||||
constructor(
|
||||
instanceSettings: InstanceSettings,
|
||||
private readonly logger: Logger,
|
||||
) {
|
||||
this.sshFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_SSH_FOLDER);
|
||||
this.gitFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_GIT_FOLDER);
|
||||
this.sshKeyName = path.join(this.sshFolder, SOURCE_CONTROL_SSH_KEY_NAME);
|
||||
|
@ -66,7 +70,7 @@ export class SourceControlPreferencesService {
|
|||
try {
|
||||
return fsReadFileSync(this.sshKeyName + '.pub', { encoding: 'utf8' });
|
||||
} catch (error) {
|
||||
LoggerProxy.error(`Failed to read public key: ${(error as Error).message}`);
|
||||
this.logger.error(`Failed to read public key: ${(error as Error).message}`);
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
@ -79,7 +83,7 @@ export class SourceControlPreferencesService {
|
|||
try {
|
||||
await fsRm(this.sshFolder, { recursive: true });
|
||||
} catch (error) {
|
||||
LoggerProxy.error(`Failed to delete ssh folder: ${(error as Error).message}`);
|
||||
this.logger.error(`Failed to delete ssh folder: ${(error as Error).message}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -160,7 +164,7 @@ export class SourceControlPreferencesService {
|
|||
const keyPairType =
|
||||
preferences.keyGeneratorType ??
|
||||
(config.get('sourceControl.defaultKeyPairType') as KeyPairType);
|
||||
LoggerProxy.debug(`No key pair files found, generating new pair using type: ${keyPairType}`);
|
||||
this.logger.debug(`No key pair files found, generating new pair using type: ${keyPairType}`);
|
||||
await this.generateAndSaveKeyPair(keyPairType);
|
||||
}
|
||||
this.sourceControlPreferences = preferences;
|
||||
|
@ -194,7 +198,7 @@ export class SourceControlPreferencesService {
|
|||
return preferences;
|
||||
}
|
||||
} catch (error) {
|
||||
LoggerProxy.warn(
|
||||
this.logger.warn(
|
||||
`Could not parse Source Control settings from database: ${(error as Error).message}`,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import express from 'express';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { Container } from 'typedi';
|
||||
|
||||
import * as ResponseHelper from '@/ResponseHelper';
|
||||
import type { VariablesRequest } from '@/requests';
|
||||
|
@ -9,14 +9,11 @@ import {
|
|||
VariablesValidationError,
|
||||
} from './variables.service.ee';
|
||||
import { isVariablesEnabled } from './enviromentHelpers';
|
||||
import Container from 'typedi';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
export const EEVariablesController = express.Router();
|
||||
|
||||
/**
|
||||
* Initialize Logger if needed
|
||||
*/
|
||||
EEVariablesController.use((req, res, next) => {
|
||||
if (!isVariablesEnabled()) {
|
||||
next('router');
|
||||
|
@ -30,9 +27,12 @@ EEVariablesController.post(
|
|||
'/',
|
||||
ResponseHelper.send(async (req: VariablesRequest.Create) => {
|
||||
if (req.user.globalRole.name !== 'owner') {
|
||||
LoggerProxy.info('Attempt to update a variable blocked due to lack of permissions', {
|
||||
userId: req.user.id,
|
||||
});
|
||||
Container.get(Logger).info(
|
||||
'Attempt to update a variable blocked due to lack of permissions',
|
||||
{
|
||||
userId: req.user.id,
|
||||
},
|
||||
);
|
||||
throw new ResponseHelper.AuthError('Unauthorized');
|
||||
}
|
||||
const variable = req.body;
|
||||
|
@ -55,10 +55,13 @@ EEVariablesController.patch(
|
|||
ResponseHelper.send(async (req: VariablesRequest.Update) => {
|
||||
const id = req.params.id;
|
||||
if (req.user.globalRole.name !== 'owner') {
|
||||
LoggerProxy.info('Attempt to update a variable blocked due to lack of permissions', {
|
||||
id,
|
||||
userId: req.user.id,
|
||||
});
|
||||
Container.get(Logger).info(
|
||||
'Attempt to update a variable blocked due to lack of permissions',
|
||||
{
|
||||
id,
|
||||
userId: req.user.id,
|
||||
},
|
||||
);
|
||||
throw new ResponseHelper.AuthError('Unauthorized');
|
||||
}
|
||||
const variable = req.body;
|
||||
|
|
|
@ -1,29 +1,15 @@
|
|||
import express from 'express';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { Container } from 'typedi';
|
||||
|
||||
import { getLogger } from '@/Logger';
|
||||
import * as ResponseHelper from '@/ResponseHelper';
|
||||
import type { VariablesRequest } from '@/requests';
|
||||
import { VariablesService } from './variables.service';
|
||||
import { EEVariablesController } from './variables.controller.ee';
|
||||
import Container from 'typedi';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export const variablesController = express.Router();
|
||||
|
||||
variablesController.use('/', EEVariablesController);
|
||||
|
||||
/**
|
||||
* Initialize Logger if needed
|
||||
*/
|
||||
variablesController.use((req, res, next) => {
|
||||
try {
|
||||
LoggerProxy.getInstance();
|
||||
} catch (error) {
|
||||
LoggerProxy.init(getLogger());
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
variablesController.use(EEVariablesController);
|
||||
|
||||
variablesController.get(
|
||||
|
@ -64,10 +50,13 @@ variablesController.delete(
|
|||
ResponseHelper.send(async (req: VariablesRequest.Delete) => {
|
||||
const id = req.params.id;
|
||||
if (req.user.globalRole.name !== 'owner') {
|
||||
LoggerProxy.info('Attempt to delete a variable blocked due to lack of permissions', {
|
||||
id,
|
||||
userId: req.user.id,
|
||||
});
|
||||
Container.get(Logger).info(
|
||||
'Attempt to delete a variable blocked due to lack of permissions',
|
||||
{
|
||||
id,
|
||||
userId: req.user.id,
|
||||
},
|
||||
);
|
||||
throw new ResponseHelper.AuthError('Unauthorized');
|
||||
}
|
||||
await Container.get(VariablesService).delete(id);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { LoggerProxy, jsonParse } from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import type { MessageEventBusDestinationOptions } from 'n8n-workflow';
|
||||
import type { DeleteResult } from 'typeorm';
|
||||
import { In } from 'typeorm';
|
||||
|
@ -28,11 +28,12 @@ import {
|
|||
} from '../EventMessageClasses/EventMessageGeneric';
|
||||
import { recoverExecutionDataFromEventLogMessages } from './recoverEvents';
|
||||
import { METRICS_EVENT_NAME } from '../MessageEventBusDestination/Helpers.ee';
|
||||
import Container, { Service } from 'typedi';
|
||||
import { Container, Service } from 'typedi';
|
||||
import { ExecutionRepository, WorkflowRepository } from '@/databases/repositories';
|
||||
import type { AbstractEventMessageOptions } from '../EventMessageClasses/AbstractEventMessageOptions';
|
||||
import { getEventMessageObjectByType } from '../EventMessageClasses/Helpers';
|
||||
import { OrchestrationMainService } from '@/services/orchestration/main/orchestration.main.service';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export type EventMessageReturnMode = 'sent' | 'unsent' | 'all' | 'unfinished';
|
||||
|
||||
|
@ -48,8 +49,6 @@ export interface MessageEventBusInitializeOptions {
|
|||
|
||||
@Service()
|
||||
export class MessageEventBus extends EventEmitter {
|
||||
private static instance: MessageEventBus;
|
||||
|
||||
isInitialized: boolean;
|
||||
|
||||
logWriter: MessageEventBusLogWriter;
|
||||
|
@ -60,18 +59,11 @@ export class MessageEventBus extends EventEmitter {
|
|||
|
||||
private pushIntervalTimer: NodeJS.Timer;
|
||||
|
||||
constructor() {
|
||||
constructor(private readonly logger: Logger) {
|
||||
super();
|
||||
this.isInitialized = false;
|
||||
}
|
||||
|
||||
static getInstance(): MessageEventBus {
|
||||
if (!MessageEventBus.instance) {
|
||||
MessageEventBus.instance = new MessageEventBus();
|
||||
}
|
||||
return MessageEventBus.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Needs to be called once at startup to set the event bus instance up. Will launch the event log writer and,
|
||||
* if configured to do so, the previously stored event destinations.
|
||||
|
@ -85,7 +77,7 @@ export class MessageEventBus extends EventEmitter {
|
|||
return;
|
||||
}
|
||||
|
||||
LoggerProxy.debug('Initializing event bus...');
|
||||
this.logger.debug('Initializing event bus...');
|
||||
|
||||
const savedEventDestinations = await Db.collections.EventDestinations.find({});
|
||||
if (savedEventDestinations.length > 0) {
|
||||
|
@ -97,12 +89,12 @@ export class MessageEventBus extends EventEmitter {
|
|||
}
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
if (error.message) LoggerProxy.debug(error.message as string);
|
||||
if (error.message) this.logger.debug(error.message as string);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LoggerProxy.debug('Initializing event writer');
|
||||
this.logger.debug('Initializing event writer');
|
||||
if (options?.workerId) {
|
||||
// only add 'worker' to log file name since the ID changes on every start and we
|
||||
// would not be able to recover the log files from the previous run not knowing it
|
||||
|
@ -115,19 +107,19 @@ export class MessageEventBus extends EventEmitter {
|
|||
}
|
||||
|
||||
if (!this.logWriter) {
|
||||
LoggerProxy.warn('Could not initialize event writer');
|
||||
this.logger.warn('Could not initialize event writer');
|
||||
}
|
||||
|
||||
if (options?.skipRecoveryPass) {
|
||||
LoggerProxy.debug('Skipping unsent event check');
|
||||
this.logger.debug('Skipping unsent event check');
|
||||
} else {
|
||||
// unsent event check:
|
||||
// - find unsent messages in current event log(s)
|
||||
// - cycle event logs and start the logging to a fresh file
|
||||
// - retry sending events
|
||||
LoggerProxy.debug('Checking for unsent event messages');
|
||||
this.logger.debug('Checking for unsent event messages');
|
||||
const unsentAndUnfinished = await this.getUnsentAndUnfinishedExecutions();
|
||||
LoggerProxy.debug(
|
||||
this.logger.debug(
|
||||
`Start logging into ${this.logWriter?.getLogFileName() ?? 'unknown filename'} `,
|
||||
);
|
||||
this.logWriter?.startLogging();
|
||||
|
@ -152,16 +144,16 @@ export class MessageEventBus extends EventEmitter {
|
|||
}
|
||||
|
||||
if (unfinishedExecutionIds.length > 0) {
|
||||
LoggerProxy.warn(`Found unfinished executions: ${unfinishedExecutionIds.join(', ')}`);
|
||||
LoggerProxy.info('This could be due to a crash of an active workflow or a restart of n8n.');
|
||||
this.logger.warn(`Found unfinished executions: ${unfinishedExecutionIds.join(', ')}`);
|
||||
this.logger.info('This could be due to a crash of an active workflow or a restart of n8n.');
|
||||
const activeWorkflows = await Container.get(WorkflowRepository).find({
|
||||
where: { active: true },
|
||||
select: ['id', 'name'],
|
||||
});
|
||||
if (activeWorkflows.length > 0) {
|
||||
LoggerProxy.info('Currently active workflows:');
|
||||
this.logger.info('Currently active workflows:');
|
||||
for (const workflowData of activeWorkflows) {
|
||||
LoggerProxy.info(` - ${workflowData.name} (ID: ${workflowData.id})`);
|
||||
this.logger.info(` - ${workflowData.name} (ID: ${workflowData.id})`);
|
||||
}
|
||||
}
|
||||
const recoveryAlreadyAttempted = this.logWriter?.isRecoveryProcessRunning();
|
||||
|
@ -171,14 +163,14 @@ export class MessageEventBus extends EventEmitter {
|
|||
// a possible reason would be that recreating the workflow data itself caused e.g an OOM error
|
||||
// in that case, we do not want to retry the recovery process, but rather mark the executions as crashed
|
||||
if (recoveryAlreadyAttempted)
|
||||
LoggerProxy.warn('Skipped recovery process since it previously failed.');
|
||||
this.logger.warn('Skipped recovery process since it previously failed.');
|
||||
} else {
|
||||
// start actual recovery process and write recovery process flag file
|
||||
this.logWriter?.startRecoveryProcess();
|
||||
for (const executionId of unfinishedExecutionIds) {
|
||||
LoggerProxy.warn(`Attempting to recover execution ${executionId}`);
|
||||
this.logger.warn(`Attempting to recover execution ${executionId}`);
|
||||
if (!unsentAndUnfinished.unfinishedExecutions[executionId]?.length) {
|
||||
LoggerProxy.debug(
|
||||
this.logger.debug(
|
||||
`No event messages found, marking execution ${executionId} as 'crashed'`,
|
||||
);
|
||||
await Container.get(ExecutionRepository).markAsCrashed([executionId]);
|
||||
|
@ -205,7 +197,7 @@ export class MessageEventBus extends EventEmitter {
|
|||
}, config.getEnv('eventBus.checkUnsentInterval'));
|
||||
}
|
||||
|
||||
LoggerProxy.debug('MessageEventBus initialized');
|
||||
this.logger.debug('MessageEventBus initialized');
|
||||
this.isInitialized = true;
|
||||
}
|
||||
|
||||
|
@ -263,25 +255,25 @@ export class MessageEventBus extends EventEmitter {
|
|||
private async trySendingUnsent(msgs?: EventMessageTypes[]) {
|
||||
const unsentMessages = msgs ?? (await this.getEventsUnsent());
|
||||
if (unsentMessages.length > 0) {
|
||||
LoggerProxy.debug(`Found unsent event messages: ${unsentMessages.length}`);
|
||||
this.logger.debug(`Found unsent event messages: ${unsentMessages.length}`);
|
||||
for (const unsentMsg of unsentMessages) {
|
||||
LoggerProxy.debug(`Retrying: ${unsentMsg.id} ${unsentMsg.__type}`);
|
||||
this.logger.debug(`Retrying: ${unsentMsg.id} ${unsentMsg.__type}`);
|
||||
await this.emitMessage(unsentMsg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async close() {
|
||||
LoggerProxy.debug('Shutting down event writer...');
|
||||
this.logger.debug('Shutting down event writer...');
|
||||
await this.logWriter?.close();
|
||||
for (const destinationName of Object.keys(this.destinations)) {
|
||||
LoggerProxy.debug(
|
||||
this.logger.debug(
|
||||
`Shutting down event destination ${this.destinations[destinationName].getId()}...`,
|
||||
);
|
||||
await this.destinations[destinationName].close();
|
||||
}
|
||||
this.isInitialized = false;
|
||||
LoggerProxy.debug('EventBus shut down.');
|
||||
this.logger.debug('EventBus shut down.');
|
||||
}
|
||||
|
||||
async restart() {
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
import { v4 as uuid } from 'uuid';
|
||||
import { Container } from 'typedi';
|
||||
import type { DeleteResult, InsertResult } from 'typeorm';
|
||||
import type { INodeCredentials } from 'n8n-workflow';
|
||||
import {
|
||||
LoggerProxy,
|
||||
MessageEventBusDestinationTypeNames,
|
||||
MessageEventBusDestinationOptions,
|
||||
} from 'n8n-workflow';
|
||||
import * as Db from '@/Db';
|
||||
import { Logger } from '@/Logger';
|
||||
import type { AbstractEventMessage } from '../EventMessageClasses/AbstractEventMessage';
|
||||
import type { EventMessageTypes } from '../EventMessageClasses';
|
||||
import type { DeleteResult, InsertResult } from 'typeorm';
|
||||
import type { EventMessageConfirmSource } from '../EventMessageClasses/EventMessageConfirm';
|
||||
import { MessageEventBus } from '../MessageEventBus/MessageEventBus';
|
||||
import type { MessageWithCallback } from '../MessageEventBus/MessageEventBus';
|
||||
|
@ -20,6 +21,8 @@ export abstract class MessageEventBusDestination implements MessageEventBusDesti
|
|||
|
||||
readonly eventBusInstance: MessageEventBus;
|
||||
|
||||
protected readonly logger: Logger;
|
||||
|
||||
__type: MessageEventBusDestinationTypeNames;
|
||||
|
||||
label: string;
|
||||
|
@ -33,6 +36,7 @@ export abstract class MessageEventBusDestination implements MessageEventBusDesti
|
|||
anonymizeAuditMessages: boolean;
|
||||
|
||||
constructor(eventBusInstance: MessageEventBus, options: MessageEventBusDestinationOptions) {
|
||||
this.logger = Container.get(Logger);
|
||||
this.eventBusInstance = eventBusInstance;
|
||||
this.id = !options.id || options.id.length !== 36 ? uuid() : options.id;
|
||||
this.__type = options.__type ?? MessageEventBusDestinationTypeNames.abstract;
|
||||
|
@ -41,7 +45,7 @@ export abstract class MessageEventBusDestination implements MessageEventBusDesti
|
|||
this.subscribedEvents = options.subscribedEvents ?? [];
|
||||
this.anonymizeAuditMessages = options.anonymizeAuditMessages ?? false;
|
||||
if (options.credentials) this.credentials = options.credentials;
|
||||
LoggerProxy.debug(`${this.__type}(${this.id}) event destination constructed`);
|
||||
this.logger.debug(`${this.__type}(${this.id}) event destination constructed`);
|
||||
}
|
||||
|
||||
startListening() {
|
||||
|
@ -55,7 +59,7 @@ export abstract class MessageEventBusDestination implements MessageEventBusDesti
|
|||
await this.receiveFromEventBus({ msg, confirmCallback });
|
||||
},
|
||||
);
|
||||
LoggerProxy.debug(`${this.id} listener started`);
|
||||
this.logger.debug(`${this.id} listener started`);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import { MessageEventBusDestinationTypeNames, LoggerProxy } from 'n8n-workflow';
|
||||
import { MessageEventBusDestinationTypeNames } from 'n8n-workflow';
|
||||
import type { EventDestinations } from '@/databases/entities/EventDestinations';
|
||||
import type { MessageEventBus } from '../MessageEventBus/MessageEventBus';
|
||||
import type { MessageEventBusDestination } from './MessageEventBusDestination.ee';
|
||||
import { MessageEventBusDestinationSentry } from './MessageEventBusDestinationSentry.ee';
|
||||
import { MessageEventBusDestinationSyslog } from './MessageEventBusDestinationSyslog.ee';
|
||||
import { MessageEventBusDestinationWebhook } from './MessageEventBusDestinationWebhook.ee';
|
||||
import { Container } from 'typedi';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export function messageEventBusDestinationFromDb(
|
||||
eventBusInstance: MessageEventBus,
|
||||
|
@ -20,7 +22,7 @@ export function messageEventBusDestinationFromDb(
|
|||
case MessageEventBusDestinationTypeNames.webhook:
|
||||
return MessageEventBusDestinationWebhook.deserialize(eventBusInstance, destinationData);
|
||||
default:
|
||||
LoggerProxy.debug('MessageEventBusDestination __type unknown');
|
||||
Container.get(Logger).debug('MessageEventBusDestination __type unknown');
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
import { MessageEventBusDestination } from './MessageEventBusDestination.ee';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import {
|
||||
LoggerProxy,
|
||||
MessageEventBusDestinationTypeNames,
|
||||
MessageEventBusDestinationSentryOptions,
|
||||
} from 'n8n-workflow';
|
||||
|
@ -90,7 +89,7 @@ export class MessageEventBusDestinationSentry
|
|||
sendResult = true;
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.message) LoggerProxy.debug(error.message as string);
|
||||
if (error.message) this.logger.debug(error.message as string);
|
||||
}
|
||||
return sendResult;
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import syslog from 'syslog-client';
|
||||
|
||||
import type { MessageEventBusDestinationOptions } from 'n8n-workflow';
|
||||
import {
|
||||
LoggerProxy,
|
||||
MessageEventBusDestinationTypeNames,
|
||||
MessageEventBusDestinationSyslogOptions,
|
||||
} from 'n8n-workflow';
|
||||
|
@ -11,7 +11,6 @@ import { isLogStreamingEnabled } from '../MessageEventBus/MessageEventBusHelper'
|
|||
import { eventMessageGenericDestinationTestEvent } from '../EventMessageClasses/EventMessageGeneric';
|
||||
import { MessageEventBus } from '../MessageEventBus/MessageEventBus';
|
||||
import type { MessageWithCallback } from '../MessageEventBus/MessageEventBus';
|
||||
|
||||
export const isMessageEventBusDestinationSyslogOptions = (
|
||||
candidate: unknown,
|
||||
): candidate is MessageEventBusDestinationSyslogOptions => {
|
||||
|
@ -63,7 +62,7 @@ export class MessageEventBusDestinationSyslog
|
|||
? syslog.Transport.Tcp
|
||||
: syslog.Transport.Udp,
|
||||
});
|
||||
LoggerProxy.debug(`MessageEventBusDestinationSyslog with id ${this.getId()} initialized`);
|
||||
this.logger.debug(`MessageEventBusDestinationSyslog with id ${this.getId()} initialized`);
|
||||
this.client.on('error', function (error) {
|
||||
console.error(error);
|
||||
});
|
||||
|
@ -93,7 +92,7 @@ export class MessageEventBusDestinationSyslog
|
|||
},
|
||||
async (error) => {
|
||||
if (error?.message) {
|
||||
LoggerProxy.debug(error.message);
|
||||
this.logger.debug(error.message);
|
||||
} else {
|
||||
// eventBus.confirmSent(msg, { id: this.id, name: this.label });
|
||||
confirmCallback(msg, { id: this.id, name: this.label });
|
||||
|
@ -102,7 +101,7 @@ export class MessageEventBusDestinationSyslog
|
|||
},
|
||||
);
|
||||
} catch (error) {
|
||||
if (error.message) LoggerProxy.debug(error.message as string);
|
||||
if (error.message) this.logger.debug(error.message as string);
|
||||
}
|
||||
if (msg.eventName === eventMessageGenericDestinationTestEvent) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
|
|
@ -7,7 +7,6 @@ import axios from 'axios';
|
|||
import type { AxiosRequestConfig, Method } from 'axios';
|
||||
import {
|
||||
jsonParse,
|
||||
LoggerProxy,
|
||||
MessageEventBusDestinationTypeNames,
|
||||
MessageEventBusDestinationWebhookOptions,
|
||||
} from 'n8n-workflow';
|
||||
|
@ -102,7 +101,7 @@ export class MessageEventBusDestinationWebhook
|
|||
if (options.sendPayload) this.sendPayload = options.sendPayload;
|
||||
if (options.options) this.options = options.options;
|
||||
|
||||
LoggerProxy.debug(`MessageEventBusDestinationWebhook with id ${this.getId()} initialized`);
|
||||
this.logger.debug(`MessageEventBusDestinationWebhook with id ${this.getId()} initialized`);
|
||||
}
|
||||
|
||||
async matchDecryptedCredentialType(credentialType: string) {
|
||||
|
@ -359,7 +358,7 @@ export class MessageEventBusDestinationWebhook
|
|||
}
|
||||
}
|
||||
} catch (error) {
|
||||
LoggerProxy.warn(
|
||||
this.logger.warn(
|
||||
`Webhook destination ${this.label} failed to send message to: ${this.url} - ${
|
||||
(error as Error).message
|
||||
}`,
|
||||
|
|
|
@ -6,7 +6,7 @@ import path, { parse } from 'path';
|
|||
import { Worker } from 'worker_threads';
|
||||
import { createReadStream, existsSync, rmSync } from 'fs';
|
||||
import readline from 'readline';
|
||||
import { jsonParse, LoggerProxy } from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import remove from 'lodash/remove';
|
||||
import config from '@/config';
|
||||
import { getEventMessageObjectByType } from '../EventMessageClasses/Helpers';
|
||||
|
@ -19,6 +19,7 @@ import {
|
|||
} from '../EventMessageClasses/EventMessageConfirm';
|
||||
import { once as eventOnce } from 'events';
|
||||
import { inTest } from '@/constants';
|
||||
import { Logger } from '@/Logger';
|
||||
import Container from 'typedi';
|
||||
|
||||
interface MessageEventBusLogWriterConstructorOptions {
|
||||
|
@ -48,8 +49,14 @@ export class MessageEventBusLogWriter {
|
|||
|
||||
static options: Required<MessageEventBusLogWriterOptions>;
|
||||
|
||||
private readonly logger: Logger;
|
||||
|
||||
private _worker: Worker | undefined;
|
||||
|
||||
constructor() {
|
||||
this.logger = Container.get(Logger);
|
||||
}
|
||||
|
||||
public get worker(): Worker | undefined {
|
||||
return this._worker;
|
||||
}
|
||||
|
@ -136,7 +143,7 @@ export class MessageEventBusLogWriter {
|
|||
this._worker = new Worker(workerFileName);
|
||||
if (this.worker) {
|
||||
this.worker.on('messageerror', async (error) => {
|
||||
LoggerProxy.error('Event Bus Log Writer thread error, attempting to restart...', error);
|
||||
this.logger.error('Event Bus Log Writer thread error, attempting to restart...', error);
|
||||
await MessageEventBusLogWriter.instance.startThread();
|
||||
});
|
||||
return true;
|
||||
|
@ -235,7 +242,7 @@ export class MessageEventBusLogWriter {
|
|||
}
|
||||
}
|
||||
} catch (error) {
|
||||
LoggerProxy.error(
|
||||
this.logger.error(
|
||||
`Error reading line messages from file: ${logFileName}, line: ${line}, ${error.message}}`,
|
||||
);
|
||||
}
|
||||
|
@ -243,7 +250,7 @@ export class MessageEventBusLogWriter {
|
|||
// wait for stream to finish before continue
|
||||
await eventOnce(rl, 'close');
|
||||
} catch {
|
||||
LoggerProxy.error(`Error reading logged messages from file: ${logFileName}`);
|
||||
this.logger.error(`Error reading logged messages from file: ${logFileName}`);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
|
@ -308,7 +315,7 @@ export class MessageEventBusLogWriter {
|
|||
if (msg !== null) messages.push(msg);
|
||||
}
|
||||
} catch {
|
||||
LoggerProxy.error(
|
||||
this.logger.error(
|
||||
`Error reading line messages from file: ${logFileName}, line: ${line}`,
|
||||
);
|
||||
}
|
||||
|
@ -316,7 +323,7 @@ export class MessageEventBusLogWriter {
|
|||
// wait for stream to finish before continue
|
||||
await eventOnce(rl, 'close');
|
||||
} catch {
|
||||
LoggerProxy.error(`Error reading logged messages from file: ${logFileName}`);
|
||||
this.logger.error(`Error reading logged messages from file: ${logFileName}`);
|
||||
}
|
||||
}
|
||||
return messages;
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import { Container } from 'typedi';
|
||||
import type { ExecutionStatus, IRun, IWorkflowBase } from 'n8n-workflow';
|
||||
import type { ExecutionPayload, IExecutionDb } from '@/Interfaces';
|
||||
import pick from 'lodash/pick';
|
||||
import { isWorkflowIdValid } from '@/utils';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import Container from 'typedi';
|
||||
import { ExecutionRepository } from '../../databases/repositories';
|
||||
import { ExecutionMetadataService } from '../../services/executionMetadata.service';
|
||||
import { ExecutionRepository } from '@db/repositories';
|
||||
import { ExecutionMetadataService } from '@/services/executionMetadata.service';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export function determineFinalExecutionStatus(runData: IRun): ExecutionStatus {
|
||||
const workflowHasCrashed = runData.status === 'crashed';
|
||||
|
@ -69,9 +69,10 @@ export async function updateExistingExecution(parameters: {
|
|||
workflowId: string;
|
||||
executionData: Partial<IExecutionDb>;
|
||||
}) {
|
||||
const logger = Container.get(Logger);
|
||||
const { executionId, workflowId, executionData } = parameters;
|
||||
// Leave log message before flatten as that operation increased memory usage a lot and the chance of a crash is highest here
|
||||
LoggerProxy.debug(`Save execution data to database for execution ID ${executionId}`, {
|
||||
logger.debug(`Save execution data to database for execution ID ${executionId}`, {
|
||||
executionId,
|
||||
workflowId,
|
||||
finished: executionData.finished,
|
||||
|
@ -88,7 +89,7 @@ export async function updateExistingExecution(parameters: {
|
|||
);
|
||||
}
|
||||
} catch (e) {
|
||||
LoggerProxy.error(`Failed to save metadata for execution ID ${executionId}`, e as Error);
|
||||
logger.error(`Failed to save metadata for execution ID ${executionId}`, e as Error);
|
||||
}
|
||||
|
||||
if (executionData.finished === true && executionData.retryOf !== undefined) {
|
||||
|
|
|
@ -1,30 +1,15 @@
|
|||
import express from 'express';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import type {
|
||||
IExecutionFlattedResponse,
|
||||
IExecutionResponse,
|
||||
IExecutionsListResponse,
|
||||
} from '@/Interfaces';
|
||||
import * as ResponseHelper from '@/ResponseHelper';
|
||||
import { getLogger } from '@/Logger';
|
||||
import type { ExecutionRequest } from '@/requests';
|
||||
import { EEExecutionsController } from './executions.controller.ee';
|
||||
import { ExecutionsService } from './executions.service';
|
||||
|
||||
export const executionsController = express.Router();
|
||||
|
||||
/**
|
||||
* Initialise Logger if needed
|
||||
*/
|
||||
executionsController.use((req, res, next) => {
|
||||
try {
|
||||
LoggerProxy.getInstance();
|
||||
} catch (error) {
|
||||
LoggerProxy.init(getLogger());
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
executionsController.use('/', EEExecutionsController);
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { validate as jsonSchemaValidate } from 'jsonschema';
|
||||
import type { IWorkflowBase, JsonObject, ExecutionStatus } from 'n8n-workflow';
|
||||
import { LoggerProxy, jsonParse, Workflow } from 'n8n-workflow';
|
||||
import { jsonParse, Workflow } from 'n8n-workflow';
|
||||
import type { FindOperator } from 'typeorm';
|
||||
import { In } from 'typeorm';
|
||||
import { ActiveExecutions } from '@/ActiveExecutions';
|
||||
|
@ -23,6 +23,7 @@ import * as GenericHelpers from '@/GenericHelpers';
|
|||
import { Container } from 'typedi';
|
||||
import { getStatusUsingPreviousExecutionStatusMethod } from './executionHelpers';
|
||||
import { ExecutionRepository } from '@db/repositories';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export interface IGetExecutionsQueryFilter {
|
||||
id?: FindOperator<string> | string;
|
||||
|
@ -110,7 +111,7 @@ export class ExecutionsService {
|
|||
}
|
||||
}
|
||||
} catch (error) {
|
||||
LoggerProxy.error('Failed to parse filter', {
|
||||
Container.get(Logger).error('Failed to parse filter', {
|
||||
userId: req.user.id,
|
||||
filter: req.query.filter,
|
||||
});
|
||||
|
@ -123,7 +124,7 @@ export class ExecutionsService {
|
|||
// safeguard against querying workflowIds not shared with the user
|
||||
const workflowId = filter?.workflowId?.toString();
|
||||
if (workflowId !== undefined && !sharedWorkflowIds.includes(workflowId)) {
|
||||
LoggerProxy.verbose(
|
||||
Container.get(Logger).verbose(
|
||||
`User ${req.user.id} attempted to query non-shared workflow ${workflowId}`,
|
||||
);
|
||||
return {
|
||||
|
@ -193,10 +194,13 @@ export class ExecutionsService {
|
|||
});
|
||||
|
||||
if (!execution) {
|
||||
LoggerProxy.info('Attempt to read execution was blocked due to insufficient permissions', {
|
||||
userId: req.user.id,
|
||||
executionId,
|
||||
});
|
||||
Container.get(Logger).info(
|
||||
'Attempt to read execution was blocked due to insufficient permissions',
|
||||
{
|
||||
userId: req.user.id,
|
||||
executionId,
|
||||
},
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
@ -221,7 +225,7 @@ export class ExecutionsService {
|
|||
});
|
||||
|
||||
if (!execution) {
|
||||
LoggerProxy.info(
|
||||
Container.get(Logger).info(
|
||||
'Attempt to retry an execution was blocked due to insufficient permissions',
|
||||
{
|
||||
userId: req.user.id,
|
||||
|
@ -299,11 +303,14 @@ export class ExecutionsService {
|
|||
// Find the data of the last executed node in the new workflow
|
||||
const node = workflowInstance.getNode(stack.node.name);
|
||||
if (node === null) {
|
||||
LoggerProxy.error('Failed to retry an execution because a node could not be found', {
|
||||
userId: req.user.id,
|
||||
executionId,
|
||||
nodeName: stack.node.name,
|
||||
});
|
||||
Container.get(Logger).error(
|
||||
'Failed to retry an execution because a node could not be found',
|
||||
{
|
||||
userId: req.user.id,
|
||||
executionId,
|
||||
nodeName: stack.node.name,
|
||||
},
|
||||
);
|
||||
throw new Error(
|
||||
`Could not find the node "${stack.node.name}" in workflow. It probably got deleted or renamed. Without it the workflow can sadly not be retried.`,
|
||||
);
|
||||
|
|
|
@ -1,38 +1,25 @@
|
|||
import express from 'express';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { Container } from 'typedi';
|
||||
|
||||
import { getLogger } from '@/Logger';
|
||||
import { Logger } from '@/Logger';
|
||||
import * as ResponseHelper from '@/ResponseHelper';
|
||||
import type { ILicensePostResponse, ILicenseReadResponse } from '@/Interfaces';
|
||||
import { LicenseService } from './License.service';
|
||||
import { License } from '@/License';
|
||||
import type { AuthenticatedRequest, LicenseRequest } from '@/requests';
|
||||
import { Container } from 'typedi';
|
||||
import { InternalHooks } from '@/InternalHooks';
|
||||
|
||||
export const licenseController = express.Router();
|
||||
|
||||
const OWNER_ROUTES = ['/activate', '/renew'];
|
||||
|
||||
/**
|
||||
* Initialize Logger if needed
|
||||
*/
|
||||
licenseController.use((req, res, next) => {
|
||||
try {
|
||||
LoggerProxy.getInstance();
|
||||
} catch (error) {
|
||||
LoggerProxy.init(getLogger());
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
/**
|
||||
* Owner checking
|
||||
*/
|
||||
licenseController.use((req: AuthenticatedRequest, res, next) => {
|
||||
if (OWNER_ROUTES.includes(req.path) && req.user) {
|
||||
if (!req.user.isOwner) {
|
||||
LoggerProxy.info('Non-owner attempted to activate or renew a license', {
|
||||
Container.get(Logger).info('Non-owner attempted to activate or renew a license', {
|
||||
userId: req.user.id,
|
||||
});
|
||||
ResponseHelper.sendErrorResponse(
|
||||
|
@ -95,7 +82,7 @@ licenseController.post(
|
|||
break;
|
||||
default:
|
||||
message += `: ${error.message}`;
|
||||
getLogger().error(message, { stack: error.stack ?? 'n/a' });
|
||||
Container.get(Logger).error(message, { stack: error.stack ?? 'n/a' });
|
||||
}
|
||||
|
||||
throw new ResponseHelper.BadRequestError(message);
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
import type { Application, NextFunction, Request, RequestHandler, Response } from 'express';
|
||||
import { Container } from 'typedi';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import passport from 'passport';
|
||||
import { Strategy } from 'passport-jwt';
|
||||
import { sync as globSync } from 'fast-glob';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
import type { JwtPayload } from '@/Interfaces';
|
||||
import type { AuthenticatedRequest } from '@/requests';
|
||||
import config from '@/config';
|
||||
import { AUTH_COOKIE_NAME, EDITOR_UI_DIST_DIR } from '@/constants';
|
||||
import { issueCookie, resolveJwtContent } from '@/auth/jwt';
|
||||
import { canSkipAuth } from '@/decorators/registerController';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
const jwtFromRequest = (req: Request) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
|
@ -27,7 +28,7 @@ const userManagementJwtAuth = (): RequestHandler => {
|
|||
const user = await resolveJwtContent(jwtPayload);
|
||||
return done(null, user);
|
||||
} catch (error) {
|
||||
Logger.debug('Failed to extract user from JWT payload', { jwtPayload });
|
||||
Container.get(Logger).debug('Failed to extract user from JWT payload', { jwtPayload });
|
||||
return done(null, false, { message: 'User not found' });
|
||||
}
|
||||
},
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { jsonStringify, LoggerProxy as Logger } from 'n8n-workflow';
|
||||
import { jsonStringify } from 'n8n-workflow';
|
||||
import type { IPushDataType } from '@/Interfaces';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export abstract class AbstractPush<T> {
|
||||
protected connections: Record<string, T> = {};
|
||||
|
@ -7,9 +8,11 @@ export abstract class AbstractPush<T> {
|
|||
protected abstract close(connection: T): void;
|
||||
protected abstract sendToOne(connection: T, data: string): void;
|
||||
|
||||
constructor(private readonly logger: Logger) {}
|
||||
|
||||
protected add(sessionId: string, connection: T): void {
|
||||
const { connections } = this;
|
||||
Logger.debug('Add editor-UI session', { sessionId });
|
||||
this.logger.debug('Add editor-UI session', { sessionId });
|
||||
|
||||
const existingConnection = connections[sessionId];
|
||||
if (existingConnection) {
|
||||
|
@ -22,7 +25,7 @@ export abstract class AbstractPush<T> {
|
|||
|
||||
protected remove(sessionId?: string): void {
|
||||
if (sessionId !== undefined) {
|
||||
Logger.debug('Remove editor-UI session', { sessionId });
|
||||
this.logger.debug('Remove editor-UI session', { sessionId });
|
||||
delete this.connections[sessionId];
|
||||
}
|
||||
}
|
||||
|
@ -30,11 +33,11 @@ export abstract class AbstractPush<T> {
|
|||
send<D>(type: IPushDataType, data: D, sessionId: string | undefined) {
|
||||
const { connections } = this;
|
||||
if (sessionId !== undefined && connections[sessionId] === undefined) {
|
||||
Logger.error(`The session "${sessionId}" is not registered.`, { sessionId });
|
||||
this.logger.error(`The session "${sessionId}" is not registered.`, { sessionId });
|
||||
return;
|
||||
}
|
||||
|
||||
Logger.debug(`Send data of type "${type}" to editor-UI`, { dataType: type, sessionId });
|
||||
this.logger.debug(`Send data of type "${type}" to editor-UI`, { dataType: type, sessionId });
|
||||
|
||||
const sendData = jsonStringify({ type, data }, { replaceCircularRefs: true });
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ const useWebSockets = config.getEnv('push.backend') === 'websocket';
|
|||
|
||||
@Service()
|
||||
export class Push extends EventEmitter {
|
||||
private backend = useWebSockets ? new WebSocketPush() : new SSEPush();
|
||||
private backend = useWebSockets ? Container.get(WebSocketPush) : Container.get(SSEPush);
|
||||
|
||||
handleRequest(req: SSEPushRequest | WebSocketPushRequest, res: PushResponse) {
|
||||
if (req.ws) {
|
||||
|
|
|
@ -1,16 +1,19 @@
|
|||
import SSEChannel from 'sse-channel';
|
||||
import { Service } from 'typedi';
|
||||
import { Logger } from '@/Logger';
|
||||
import { AbstractPush } from './abstract.push';
|
||||
import type { PushRequest, PushResponse } from './types';
|
||||
|
||||
type Connection = { req: PushRequest; res: PushResponse };
|
||||
|
||||
@Service()
|
||||
export class SSEPush extends AbstractPush<Connection> {
|
||||
readonly channel = new SSEChannel();
|
||||
|
||||
readonly connections: Record<string, Connection> = {};
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
constructor(logger: Logger) {
|
||||
super(logger);
|
||||
this.channel.on('disconnect', (channel, { req }) => {
|
||||
this.remove(req?.query?.sessionId);
|
||||
});
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
import type WebSocket from 'ws';
|
||||
import { Service } from 'typedi';
|
||||
import { Logger } from '@/Logger';
|
||||
import { AbstractPush } from './abstract.push';
|
||||
|
||||
function heartbeat(this: WebSocket) {
|
||||
this.isAlive = true;
|
||||
}
|
||||
|
||||
@Service()
|
||||
export class WebSocketPush extends AbstractPush<WebSocket> {
|
||||
constructor() {
|
||||
super();
|
||||
constructor(logger: Logger) {
|
||||
super(logger);
|
||||
|
||||
// Ping all connected clients every 60 seconds
|
||||
setInterval(() => this.pingAll(), 60 * 1000);
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import { exec } from 'child_process';
|
||||
import { access as fsAccess, mkdir as fsMkdir } from 'fs/promises';
|
||||
|
||||
import { Service } from 'typedi';
|
||||
import { promisify } from 'util';
|
||||
import axios from 'axios';
|
||||
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
import type { PublicInstalledPackage } from 'n8n-workflow';
|
||||
import { InstanceSettings } from 'n8n-core';
|
||||
import type { PackageDirectoryLoader } from 'n8n-core';
|
||||
|
@ -21,6 +21,7 @@ import {
|
|||
} from '@/constants';
|
||||
import type { CommunityPackages } from '@/Interfaces';
|
||||
import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
const {
|
||||
PACKAGE_NAME_NOT_PROVIDED,
|
||||
|
@ -48,6 +49,7 @@ export class CommunityPackagesService {
|
|||
|
||||
constructor(
|
||||
private readonly instanceSettings: InstanceSettings,
|
||||
private readonly logger: Logger,
|
||||
private readonly installedPackageRepository: InstalledPackagesRepository,
|
||||
private readonly loadNodesAndCredentials: LoadNodesAndCredentials,
|
||||
) {}
|
||||
|
@ -81,7 +83,7 @@ export class CommunityPackagesService {
|
|||
} catch (maybeError) {
|
||||
const error = toError(maybeError);
|
||||
|
||||
Logger.error('Failed to save installed packages and nodes', {
|
||||
this.logger.error('Failed to save installed packages and nodes', {
|
||||
error,
|
||||
packageName: packageLoader.packageJson.name,
|
||||
});
|
||||
|
@ -156,7 +158,7 @@ export class CommunityPackagesService {
|
|||
if (errorMessage.includes(npmMessage)) throw new Error(n8nMessage);
|
||||
});
|
||||
|
||||
Logger.warn('npm command failed', { errorMessage });
|
||||
this.logger.warn('npm command failed', { errorMessage });
|
||||
|
||||
throw new Error(PACKAGE_FAILED_TO_INSTALL);
|
||||
}
|
||||
|
@ -269,12 +271,12 @@ export class CommunityPackagesService {
|
|||
|
||||
if (missingPackages.size === 0) return;
|
||||
|
||||
Logger.error(
|
||||
this.logger.error(
|
||||
'n8n detected that some packages are missing. For more information, visit https://docs.n8n.io/integrations/community-nodes/troubleshooting/',
|
||||
);
|
||||
|
||||
if (reinstallMissingPackages || process.env.N8N_REINSTALL_MISSING_PACKAGES) {
|
||||
Logger.info('Attempting to reinstall missing packages', { missingPackages });
|
||||
this.logger.info('Attempting to reinstall missing packages', { missingPackages });
|
||||
try {
|
||||
// Optimistic approach - stop if any installation fails
|
||||
|
||||
|
@ -283,9 +285,9 @@ export class CommunityPackagesService {
|
|||
|
||||
missingPackages.delete(missingPackage);
|
||||
}
|
||||
Logger.info('Packages reinstalled successfully. Resuming regular initialization.');
|
||||
this.logger.info('Packages reinstalled successfully. Resuming regular initialization.');
|
||||
} catch (error) {
|
||||
Logger.error('n8n was unable to install the missing packages.');
|
||||
this.logger.error('n8n was unable to install the missing packages.');
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
import { EventEmitter } from 'events';
|
||||
import Container, { Service } from 'typedi';
|
||||
import { Container, Service } from 'typedi';
|
||||
import type { INode, IRun, IWorkflowBase } from 'n8n-workflow';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { StatisticsNames } from '@db/entities/WorkflowStatistics';
|
||||
import { WorkflowStatisticsRepository } from '@db/repositories';
|
||||
import { UserService } from '@/services/user.service';
|
||||
import { Logger } from '@/Logger';
|
||||
import { OwnershipService } from './ownership.service';
|
||||
|
||||
@Service()
|
||||
export class EventsService extends EventEmitter {
|
||||
constructor(
|
||||
private repository: WorkflowStatisticsRepository,
|
||||
private ownershipService: OwnershipService,
|
||||
private readonly logger: Logger,
|
||||
private readonly repository: WorkflowStatisticsRepository,
|
||||
private readonly ownershipService: OwnershipService,
|
||||
) {
|
||||
super({ captureRejections: true });
|
||||
if ('SKIP_STATISTICS_EVENTS' in process.env) return;
|
||||
|
@ -43,7 +44,7 @@ export class EventsService extends EventEmitter {
|
|||
try {
|
||||
const upsertResult = await this.repository.upsertWorkflowStatistics(name, workflowId);
|
||||
|
||||
if (name === 'production_success' && upsertResult === 'insert') {
|
||||
if (name === StatisticsNames.productionSuccess && upsertResult === 'insert') {
|
||||
const owner = await Container.get(OwnershipService).getWorkflowOwnerCached(workflowId);
|
||||
const metrics = {
|
||||
user_id: owner.id,
|
||||
|
@ -61,7 +62,7 @@ export class EventsService extends EventEmitter {
|
|||
this.emit('telemetry.onFirstProductionWorkflowSuccess', metrics);
|
||||
}
|
||||
} catch (error) {
|
||||
LoggerProxy.verbose('Unable to fire first workflow success telemetry event');
|
||||
this.logger.verbose('Unable to fire first workflow success telemetry event');
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@ import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
|
|||
import { License } from '@/License';
|
||||
import { getInstanceBaseUrl } from '@/UserManagement/UserManagementHelper';
|
||||
import * as WebhookHelpers from '@/WebhookHelpers';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import config from '@/config';
|
||||
import { getCurrentAuthenticationMethod } from '@/sso/ssoHelpers';
|
||||
import { getLdapLoginLabel } from '@/Ldap/helpers';
|
||||
|
@ -31,6 +30,7 @@ import {
|
|||
} from '@/workflows/workflowHistory/workflowHistoryHelper.ee';
|
||||
import { UserManagementMailer } from '@/UserManagement/email';
|
||||
import type { CommunityPackagesService } from '@/services/communityPackages.service';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class FrontendService {
|
||||
|
@ -39,6 +39,7 @@ export class FrontendService {
|
|||
private communityPackagesService?: CommunityPackagesService;
|
||||
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly loadNodesAndCredentials: LoadNodesAndCredentials,
|
||||
private readonly credentialTypes: CredentialTypes,
|
||||
private readonly credentialsOverwrites: CredentialsOverwrites,
|
||||
|
@ -72,7 +73,7 @@ export class FrontendService {
|
|||
const [key, url] = conf.split(';');
|
||||
|
||||
if (!key || !url) {
|
||||
LoggerProxy.warn('Diagnostics frontend config is invalid');
|
||||
this.logger.warn('Diagnostics frontend config is invalid');
|
||||
telemetrySettings.enabled = false;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ import promClient, { type Counter } from 'prom-client';
|
|||
import semverParse from 'semver/functions/parse';
|
||||
import { Service } from 'typedi';
|
||||
import EventEmitter from 'events';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import { CacheService } from '@/services/cache.service';
|
||||
import type { EventMessageTypes } from '@/eventbus/EventMessageClasses';
|
||||
|
@ -15,10 +14,14 @@ import {
|
|||
getLabelsForEvent,
|
||||
} from '@/eventbus/MessageEventBusDestination/Helpers.ee';
|
||||
import { eventBus } from '@/eventbus';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class MetricsService extends EventEmitter {
|
||||
constructor(private readonly cacheService: CacheService) {
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly cacheService: CacheService,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
|
@ -130,7 +133,7 @@ export class MetricsService extends EventEmitter {
|
|||
prefix + event.eventName.replace('n8n.', '').replace(/\./g, '_') + '_total';
|
||||
|
||||
if (!promClient.validateMetricName(metricName)) {
|
||||
LoggerProxy.debug(`Invalid metric name: ${metricName}. Ignoring it!`);
|
||||
this.logger.debug(`Invalid metric name: ${metricName}. Ignoring it!`);
|
||||
this.counters[event.eventName] = null;
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import { LoggerProxy, jsonParse } from 'n8n-workflow';
|
||||
import { Container } from 'typedi';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import { Logger } from '@/Logger';
|
||||
import type { RedisServiceCommandObject } from '../redis/RedisServiceCommands';
|
||||
import { COMMAND_REDIS_CHANNEL } from '../redis/RedisServiceHelper';
|
||||
import * as os from 'os';
|
||||
|
@ -13,7 +15,7 @@ export function messageToRedisServiceCommandObject(messageString: string) {
|
|||
try {
|
||||
message = jsonParse<RedisServiceCommandObject>(messageString);
|
||||
} catch {
|
||||
LoggerProxy.debug(
|
||||
Container.get(Logger).debug(
|
||||
`Received invalid message via channel ${COMMAND_REDIS_CHANNEL}: "${messageString}"`,
|
||||
);
|
||||
return;
|
||||
|
|
|
@ -1,18 +1,19 @@
|
|||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { Container } from 'typedi';
|
||||
import { debounceMessageReceiver, messageToRedisServiceCommandObject } from '../helpers';
|
||||
import config from '@/config';
|
||||
import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus';
|
||||
import Container from 'typedi';
|
||||
import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee';
|
||||
import { License } from '@/License';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export async function handleCommandMessageMain(messageString: string) {
|
||||
const queueModeId = config.get('redis.queueModeId');
|
||||
const isMainInstance = config.get('generic.instanceType') === 'main';
|
||||
const message = messageToRedisServiceCommandObject(messageString);
|
||||
const logger = Container.get(Logger);
|
||||
|
||||
if (message) {
|
||||
LoggerProxy.debug(
|
||||
logger.debug(
|
||||
`RedisCommandHandler(main): Received command message ${message.command} from ${message.senderId}`,
|
||||
);
|
||||
if (
|
||||
|
@ -20,7 +21,7 @@ export async function handleCommandMessageMain(messageString: string) {
|
|||
(message.targets && !message.targets.includes(queueModeId))
|
||||
) {
|
||||
// Skipping command message because it's not for this instance
|
||||
LoggerProxy.debug(
|
||||
logger.debug(
|
||||
`Skipping command message ${message.command} because it's not for this instance.`,
|
||||
);
|
||||
return message;
|
||||
|
@ -35,7 +36,7 @@ export async function handleCommandMessageMain(messageString: string) {
|
|||
}
|
||||
if (isMainInstance) {
|
||||
// at this point in time, only a single main instance is supported, thus this command _should_ never be caught currently
|
||||
LoggerProxy.error(
|
||||
logger.error(
|
||||
'Received command to reload license via Redis, but this should not have happened and is not supported on the main instance yet.',
|
||||
);
|
||||
return message;
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
import { jsonParse, LoggerProxy } from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import Container from 'typedi';
|
||||
import { Logger } from '@/Logger';
|
||||
import type { RedisServiceWorkerResponseObject } from '../../redis/RedisServiceCommands';
|
||||
|
||||
export async function handleWorkerResponseMessageMain(messageString: string) {
|
||||
const workerResponse = jsonParse<RedisServiceWorkerResponseObject>(messageString);
|
||||
if (workerResponse) {
|
||||
// TODO: Handle worker response
|
||||
LoggerProxy.debug(
|
||||
Container.get(Logger).debug(
|
||||
`Received worker response ${workerResponse.command} from ${workerResponse.workerId}`,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,29 +1,31 @@
|
|||
import { jsonParse, LoggerProxy } from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import Container from 'typedi';
|
||||
import type { RedisServiceCommandObject } from '@/services/redis/RedisServiceCommands';
|
||||
import { COMMAND_REDIS_CHANNEL } from '@/services/redis/RedisServiceHelper';
|
||||
import * as os from 'os';
|
||||
import Container from 'typedi';
|
||||
import { License } from '@/License';
|
||||
import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus';
|
||||
import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee';
|
||||
import { debounceMessageReceiver, getOsCpuString } from '../helpers';
|
||||
import type { WorkerCommandReceivedHandlerOptions } from './types';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export function getWorkerCommandReceivedHandler(options: WorkerCommandReceivedHandlerOptions) {
|
||||
return async (channel: string, messageString: string) => {
|
||||
if (channel === COMMAND_REDIS_CHANNEL) {
|
||||
if (!messageString) return;
|
||||
const logger = Container.get(Logger);
|
||||
let message: RedisServiceCommandObject;
|
||||
try {
|
||||
message = jsonParse<RedisServiceCommandObject>(messageString);
|
||||
} catch {
|
||||
LoggerProxy.debug(
|
||||
logger.debug(
|
||||
`Received invalid message via channel ${COMMAND_REDIS_CHANNEL}: "${messageString}"`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (message) {
|
||||
LoggerProxy.debug(
|
||||
logger.debug(
|
||||
`RedisCommandHandler(worker): Received command message ${message.command} from ${message.senderId}`,
|
||||
);
|
||||
if (message.targets && !message.targets.includes(options.queueModeId)) {
|
||||
|
@ -115,7 +117,7 @@ export function getWorkerCommandReceivedHandler(options: WorkerCommandReceivedHa
|
|||
// await this.stopProcess();
|
||||
break;
|
||||
default:
|
||||
LoggerProxy.debug(
|
||||
logger.debug(
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
`Received unknown command via channel ${COMMAND_REDIS_CHANNEL}: "${message.command}"`,
|
||||
);
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import type Redis from 'ioredis';
|
||||
import type { Cluster } from 'ioredis';
|
||||
import { getDefaultRedisClient } from './RedisServiceHelper';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { Service } from 'typedi';
|
||||
import config from '@/config';
|
||||
import { Logger } from '@/Logger';
|
||||
import { getDefaultRedisClient } from './RedisServiceHelper';
|
||||
|
||||
export type RedisClientType =
|
||||
| 'subscriber'
|
||||
|
@ -22,11 +23,14 @@ export type RedisServiceMessageHandler =
|
|||
| ((channel: string, message: string) => void)
|
||||
| ((stream: string, id: string, message: string[]) => void);
|
||||
|
||||
@Service()
|
||||
class RedisServiceBase {
|
||||
redisClient: Redis | Cluster | undefined;
|
||||
|
||||
isInitialized = false;
|
||||
|
||||
constructor(protected readonly logger: Logger) {}
|
||||
|
||||
async init(type: RedisClientType = 'client'): Promise<void> {
|
||||
if (this.redisClient && this.isInitialized) {
|
||||
return;
|
||||
|
@ -34,13 +38,13 @@ class RedisServiceBase {
|
|||
this.redisClient = await getDefaultRedisClient(undefined, type);
|
||||
|
||||
this.redisClient.on('close', () => {
|
||||
LoggerProxy.warn('Redis unavailable - trying to reconnect...');
|
||||
this.logger.warn('Redis unavailable - trying to reconnect...');
|
||||
});
|
||||
|
||||
this.redisClient.on('error', (error) => {
|
||||
if (!String(error).includes('ECONNREFUSED')) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
LoggerProxy.warn('Error with Redis: ', error);
|
||||
this.logger.warn('Error with Redis: ', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import type Redis from 'ioredis';
|
||||
import type { Cluster, RedisOptions } from 'ioredis';
|
||||
import config from '@/config';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import type { RedisClientType } from './RedisServiceBaseClasses';
|
||||
import Container from 'typedi';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export const EVENT_BUS_REDIS_STREAM = 'n8n:eventstream';
|
||||
export const COMMAND_REDIS_STREAM = 'n8n:commandstream';
|
||||
|
@ -56,7 +57,9 @@ export function getRedisStandardClient(
|
|||
maxRetriesPerRequest: null,
|
||||
};
|
||||
if (config.getEnv('queue.bull.redis.tls')) sharedRedisOptions.tls = {};
|
||||
LoggerProxy.debug(
|
||||
|
||||
const logger = Container.get(Logger);
|
||||
logger.debug(
|
||||
`Initialising Redis client${redisType ? ` of type ${redisType}` : ''} connection with host: ${
|
||||
host ?? 'localhost'
|
||||
} and port: ${port ?? '6379'}`,
|
||||
|
@ -73,7 +76,7 @@ export function getRedisStandardClient(
|
|||
cumulativeTimeout += now - lastTimer;
|
||||
lastTimer = now;
|
||||
if (cumulativeTimeout > redisConnectionTimeoutLimit) {
|
||||
LoggerProxy.error(
|
||||
logger.error(
|
||||
`Unable to connect to Redis after ${redisConnectionTimeoutLimit}. Exiting process.`,
|
||||
);
|
||||
process.exit(1);
|
||||
|
@ -103,7 +106,9 @@ export function getRedisClusterClient(
|
|||
maxRetriesPerRequest: null,
|
||||
};
|
||||
if (config.getEnv('queue.bull.redis.tls')) sharedRedisOptions.tls = {};
|
||||
LoggerProxy.debug(
|
||||
|
||||
const logger = Container.get(Logger);
|
||||
logger.debug(
|
||||
`Initialising Redis cluster${
|
||||
redisType ? ` of type ${redisType}` : ''
|
||||
} connection with nodes: ${clusterNodes.map((e) => `${e.host}:${e.port}`).join(',')}`,
|
||||
|
@ -122,7 +127,7 @@ export function getRedisClusterClient(
|
|||
cumulativeTimeout += now - lastTimer;
|
||||
lastTimer = now;
|
||||
if (cumulativeTimeout > redisConnectionTimeoutLimit) {
|
||||
LoggerProxy.error(
|
||||
logger.error(
|
||||
`Unable to connect to Redis after ${redisConnectionTimeoutLimit}. Exiting process.`,
|
||||
);
|
||||
process.exit(1);
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { Service } from 'typedi';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import { WORKER_RESPONSE_REDIS_LIST } from './RedisServiceHelper';
|
||||
import type { RedisServiceWorkerResponseObject } from './RedisServiceCommands';
|
||||
import { RedisServiceBaseReceiver } from './RedisServiceBaseClasses';
|
||||
import { LoggerProxy, jsonParse } from 'n8n-workflow';
|
||||
|
||||
@Service()
|
||||
export class RedisServiceListReceiver extends RedisServiceBaseReceiver {
|
||||
|
@ -37,7 +37,7 @@ export class RedisServiceListReceiver extends RedisServiceBaseReceiver {
|
|||
}
|
||||
return workerResponse;
|
||||
} catch (error) {
|
||||
LoggerProxy.warn(
|
||||
this.logger.warn(
|
||||
`Error parsing worker response on list ${list}: ${(error as Error).message}`,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import { Service } from 'typedi';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
import {
|
||||
COMMAND_REDIS_CHANNEL,
|
||||
EVENT_BUS_REDIS_CHANNEL,
|
||||
|
@ -25,9 +24,9 @@ export class RedisServicePubSubSubscriber extends RedisServiceBaseReceiver {
|
|||
}
|
||||
await this.redisClient?.subscribe(channel, (error, _count: number) => {
|
||||
if (error) {
|
||||
Logger.error(`Error subscribing to channel ${channel}`);
|
||||
this.logger.error(`Error subscribing to channel ${channel}`);
|
||||
} else {
|
||||
Logger.debug(`Subscribed Redis PubSub client to channel: ${channel}`);
|
||||
this.logger.debug(`Subscribed Redis PubSub client to channel: ${channel}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -38,9 +37,9 @@ export class RedisServicePubSubSubscriber extends RedisServiceBaseReceiver {
|
|||
}
|
||||
await this.redisClient?.unsubscribe(channel, (error, _count: number) => {
|
||||
if (error) {
|
||||
Logger.error(`Error unsubscribing from channel ${channel}`);
|
||||
this.logger.error(`Error unsubscribing from channel ${channel}`);
|
||||
} else {
|
||||
Logger.debug(`Unsubscribed Redis PubSub client from channel: ${channel}`);
|
||||
this.logger.debug(`Unsubscribed Redis PubSub client from channel: ${channel}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import { Service } from 'typedi';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { RedisServiceBaseReceiver } from './RedisServiceBaseClasses';
|
||||
|
||||
type LastId = string;
|
||||
|
@ -26,7 +25,7 @@ export class RedisServiceStreamConsumer extends RedisServiceBaseReceiver {
|
|||
if (!this.redisClient) {
|
||||
await this.init();
|
||||
}
|
||||
LoggerProxy.debug(`Redis client now listening to stream ${stream} starting with id ${lastId}`);
|
||||
this.logger.debug(`Redis client now listening to stream ${stream} starting with id ${lastId}`);
|
||||
this.setLastId(stream, lastId);
|
||||
const interval = this.streams.get(stream)?.pollingInterval ?? 1000;
|
||||
const waiter = setInterval(async () => {
|
||||
|
@ -54,7 +53,7 @@ export class RedisServiceStreamConsumer extends RedisServiceBaseReceiver {
|
|||
}
|
||||
|
||||
stopListeningToStream(stream: StreamName): void {
|
||||
LoggerProxy.debug(`Redis client stopped listening to stream ${stream}`);
|
||||
this.logger.debug(`Redis client stopped listening to stream ${stream}`);
|
||||
const existing = this.streams.get(stream);
|
||||
if (existing?.waiter) {
|
||||
clearInterval(existing.waiter);
|
||||
|
|
|
@ -2,7 +2,7 @@ import type express from 'express';
|
|||
import { Service } from 'typedi';
|
||||
import * as Db from '@/Db';
|
||||
import type { User } from '@db/entities/User';
|
||||
import { jsonParse, LoggerProxy } from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import { AuthError, BadRequestError } from '@/ResponseHelper';
|
||||
import { getServiceProviderInstance } from './serviceProvider.ee';
|
||||
import type { SamlUserAttributes } from './types/samlUserAttributes';
|
||||
|
@ -27,6 +27,7 @@ import https from 'https';
|
|||
import type { SamlLoginBinding } from './types';
|
||||
import { validateMetadata, validateResponse } from './samlValidator';
|
||||
import { getInstanceBaseUrl } from '@/UserManagement/UserManagementHelper';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
@Service()
|
||||
export class SamlService {
|
||||
|
@ -70,6 +71,8 @@ export class SamlService {
|
|||
};
|
||||
}
|
||||
|
||||
constructor(private readonly logger: Logger) {}
|
||||
|
||||
async init(): Promise<void> {
|
||||
// load preferences first but do not apply so as to not load samlify unnecessarily
|
||||
await this.loadFromDbAndApplySamlPreferences(false);
|
||||
|
@ -81,7 +84,7 @@ export class SamlService {
|
|||
|
||||
async loadSamlify() {
|
||||
if (this.samlify === undefined) {
|
||||
LoggerProxy.debug('Loading samlify library into memory');
|
||||
this.logger.debug('Loading samlify library into memory');
|
||||
this.samlify = await import('samlify');
|
||||
}
|
||||
this.samlify.setSchemaValidator({
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { Container } from 'typedi';
|
||||
import type { XMLFileInfo } from 'xmllint-wasm';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
let xml: XMLFileInfo;
|
||||
let xmldsigCore: XMLFileInfo;
|
||||
|
@ -14,7 +15,7 @@ let xmllintWasm: typeof import('xmllint-wasm') | undefined;
|
|||
// dynamically load schema files
|
||||
async function loadSchemas(): Promise<void> {
|
||||
if (!xml || xml.contents === '') {
|
||||
LoggerProxy.debug('Loading XML schema files for SAML validation into memory');
|
||||
Container.get(Logger).debug('Loading XML schema files for SAML validation into memory');
|
||||
const f = await import('./schema/xml.xsd');
|
||||
xml = {
|
||||
fileName: 'xml.xsd',
|
||||
|
@ -61,12 +62,13 @@ async function loadSchemas(): Promise<void> {
|
|||
// dynamically load xmllint-wasm
|
||||
async function loadXmllintWasm(): Promise<void> {
|
||||
if (xmllintWasm === undefined) {
|
||||
LoggerProxy.debug('Loading xmllint-wasm library into memory');
|
||||
Container.get(Logger).debug('Loading xmllint-wasm library into memory');
|
||||
xmllintWasm = await import('xmllint-wasm');
|
||||
}
|
||||
}
|
||||
|
||||
export async function validateMetadata(metadata: string): Promise<boolean> {
|
||||
const logger = Container.get(Logger);
|
||||
try {
|
||||
await loadXmllintWasm();
|
||||
await loadSchemas();
|
||||
|
@ -82,20 +84,21 @@ export async function validateMetadata(metadata: string): Promise<boolean> {
|
|||
preload: [xmlProtocol, xmlAssertion, xmldsigCore, xmlXenc, xml],
|
||||
});
|
||||
if (validationResult?.valid) {
|
||||
LoggerProxy.debug('SAML Metadata is valid');
|
||||
logger.debug('SAML Metadata is valid');
|
||||
return true;
|
||||
} else {
|
||||
LoggerProxy.warn('SAML Validate Metadata: Invalid metadata');
|
||||
LoggerProxy.warn(validationResult ? validationResult.errors.join('\n') : '');
|
||||
logger.warn('SAML Validate Metadata: Invalid metadata');
|
||||
logger.warn(validationResult ? validationResult.errors.join('\n') : '');
|
||||
}
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
LoggerProxy.warn(error);
|
||||
logger.warn(error);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export async function validateResponse(response: string): Promise<boolean> {
|
||||
const logger = Container.get(Logger);
|
||||
try {
|
||||
await loadXmllintWasm();
|
||||
await loadSchemas();
|
||||
|
@ -111,15 +114,15 @@ export async function validateResponse(response: string): Promise<boolean> {
|
|||
preload: [xmlMetadata, xmlAssertion, xmldsigCore, xmlXenc, xml],
|
||||
});
|
||||
if (validationResult?.valid) {
|
||||
LoggerProxy.debug('SAML Response is valid');
|
||||
logger.debug('SAML Response is valid');
|
||||
return true;
|
||||
} else {
|
||||
LoggerProxy.warn('SAML Validate Response: Failed');
|
||||
LoggerProxy.warn(validationResult ? validationResult.errors.join('\n') : '');
|
||||
logger.warn('SAML Validate Response: Failed');
|
||||
logger.warn(validationResult ? validationResult.errors.join('\n') : '');
|
||||
}
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
LoggerProxy.warn(error);
|
||||
logger.warn(error);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
import type RudderStack from '@rudderstack/rudder-sdk-node';
|
||||
import { PostHogClient } from '@/posthog';
|
||||
import { Container, Service } from 'typedi';
|
||||
import type { ITelemetryTrackProperties } from 'n8n-workflow';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import config from '@/config';
|
||||
import type { IExecutionTrackProperties } from '@/Interfaces';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { Logger } from '@/Logger';
|
||||
import { License } from '@/License';
|
||||
import { LicenseService } from '@/license/License.service';
|
||||
import { N8N_VERSION } from '@/constants';
|
||||
import Container, { Service } from 'typedi';
|
||||
import { SourceControlPreferencesService } from '../environments/sourceControl/sourceControlPreferences.service.ee';
|
||||
import { InstanceSettings } from 'n8n-core';
|
||||
|
||||
|
@ -38,6 +37,7 @@ export class Telemetry {
|
|||
private executionCountsBuffer: IExecutionsBuffer = {};
|
||||
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private postHog: PostHogClient,
|
||||
private license: License,
|
||||
private readonly instanceSettings: InstanceSettings,
|
||||
|
@ -50,9 +50,7 @@ export class Telemetry {
|
|||
const [key, url] = conf.split(';');
|
||||
|
||||
if (!key || !url) {
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
logger.warn('Diagnostics backend config is invalid');
|
||||
this.logger.warn('Diagnostics backend config is invalid');
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ import { SharedWorkflowRepository } from '@/databases/repositories';
|
|||
import { WorkflowHistoryRepository } from '@db/repositories/workflowHistory.repository';
|
||||
import { Service } from 'typedi';
|
||||
import { isWorkflowHistoryEnabled } from './workflowHistoryHelper.ee';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export class SharedWorkflowNotFoundError extends Error {}
|
||||
export class HistoryVersionNotFoundError extends Error {}
|
||||
|
@ -14,6 +14,7 @@ export class HistoryVersionNotFoundError extends Error {}
|
|||
@Service()
|
||||
export class WorkflowHistoryService {
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly workflowHistoryRepository: WorkflowHistoryRepository,
|
||||
private readonly sharedWorkflowRepository: SharedWorkflowRepository,
|
||||
) {}
|
||||
|
@ -76,7 +77,7 @@ export class WorkflowHistoryService {
|
|||
workflowId,
|
||||
});
|
||||
} catch (e) {
|
||||
getLogger().error(
|
||||
this.logger.error(
|
||||
`Failed to save workflow history version for workflow ${workflowId}`,
|
||||
e as Error,
|
||||
);
|
||||
|
|
|
@ -11,7 +11,6 @@ import { isSharingEnabled, rightDiff } from '@/UserManagement/UserManagementHelp
|
|||
import { EEWorkflowsService as EEWorkflows } from './workflows.services.ee';
|
||||
import { ExternalHooks } from '@/ExternalHooks';
|
||||
import { SharedWorkflow } from '@db/entities/SharedWorkflow';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { CredentialsService } from '../credentials/credentials.service';
|
||||
import type { IExecutionPushResponse } from '@/Interfaces';
|
||||
import * as GenericHelpers from '@/GenericHelpers';
|
||||
|
@ -22,6 +21,7 @@ import { RoleService } from '@/services/role.service';
|
|||
import * as utils from '@/utils';
|
||||
import { listQueryMiddleware } from '@/middlewares';
|
||||
import { TagService } from '@/services/tag.service';
|
||||
import { Logger } from '@/Logger';
|
||||
import { WorkflowHistoryService } from './workflowHistory/workflowHistory.service.ee';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
|
@ -181,7 +181,7 @@ EEWorkflowController.post(
|
|||
});
|
||||
|
||||
if (!savedWorkflow) {
|
||||
LoggerProxy.error('Failed to create workflow', { userId: req.user.id });
|
||||
Container.get(Logger).error('Failed to create workflow', { userId: req.user.id });
|
||||
throw new ResponseHelper.InternalServerError(
|
||||
'An error occurred while saving your workflow. Please try again.',
|
||||
);
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import express from 'express';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import axios from 'axios';
|
||||
import * as Db from '@/Db';
|
||||
|
@ -13,7 +12,6 @@ import { SharedWorkflow } from '@db/entities/SharedWorkflow';
|
|||
import { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
import { validateEntity } from '@/GenericHelpers';
|
||||
import { ExternalHooks } from '@/ExternalHooks';
|
||||
import { getLogger } from '@/Logger';
|
||||
import type { ListQuery, WorkflowRequest } from '@/requests';
|
||||
import { isBelowOnboardingThreshold } from '@/WorkflowHelpers';
|
||||
import { EEWorkflowController } from './workflows.controller.ee';
|
||||
|
@ -27,21 +25,9 @@ import * as utils from '@/utils';
|
|||
import { listQueryMiddleware } from '@/middlewares';
|
||||
import { TagService } from '@/services/tag.service';
|
||||
import { WorkflowHistoryService } from './workflowHistory/workflowHistory.service.ee';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export const workflowsController = express.Router();
|
||||
|
||||
/**
|
||||
* Initialize Logger if needed
|
||||
*/
|
||||
workflowsController.use((req, res, next) => {
|
||||
try {
|
||||
LoggerProxy.getInstance();
|
||||
} catch (error) {
|
||||
LoggerProxy.init(getLogger());
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
workflowsController.use('/', EEWorkflowController);
|
||||
|
||||
/**
|
||||
|
@ -96,7 +82,7 @@ workflowsController.post(
|
|||
});
|
||||
|
||||
if (!savedWorkflow) {
|
||||
LoggerProxy.error('Failed to create workflow', { userId: req.user.id });
|
||||
Container.get(Logger).error('Failed to create workflow', { userId: req.user.id });
|
||||
throw new ResponseHelper.InternalServerError('Failed to save workflow');
|
||||
}
|
||||
|
||||
|
@ -230,7 +216,7 @@ workflowsController.get(
|
|||
});
|
||||
|
||||
if (!shared) {
|
||||
LoggerProxy.verbose('User attempted to access a workflow without permissions', {
|
||||
Container.get(Logger).verbose('User attempted to access a workflow without permissions', {
|
||||
workflowId,
|
||||
userId: req.user.id,
|
||||
});
|
||||
|
@ -280,7 +266,7 @@ workflowsController.delete(
|
|||
|
||||
const workflow = await WorkflowsService.delete(req.user, workflowId);
|
||||
if (!workflow) {
|
||||
LoggerProxy.verbose('User attempted to delete a workflow without permissions', {
|
||||
Container.get(Logger).verbose('User attempted to delete a workflow without permissions', {
|
||||
workflowId,
|
||||
userId: req.user.id,
|
||||
});
|
||||
|
|
|
@ -1,11 +1,6 @@
|
|||
import { Container } from 'typedi';
|
||||
import type { IDataObject, INode, IPinData } from 'n8n-workflow';
|
||||
import {
|
||||
NodeApiError,
|
||||
ErrorReporterProxy as ErrorReporter,
|
||||
LoggerProxy,
|
||||
Workflow,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeApiError, ErrorReporterProxy as ErrorReporter, Workflow } from 'n8n-workflow';
|
||||
import type { FindManyOptions, FindOptionsSelect, FindOptionsWhere, UpdateResult } from 'typeorm';
|
||||
import { In, Like } from 'typeorm';
|
||||
import pick from 'lodash/pick';
|
||||
|
@ -35,6 +30,7 @@ import { OwnershipService } from '@/services/ownership.service';
|
|||
import { isStringArray, isWorkflowIdValid } from '@/utils';
|
||||
import { WorkflowHistoryService } from './workflowHistory/workflowHistory.service.ee';
|
||||
import { BinaryDataService } from 'n8n-core';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
export class WorkflowsService {
|
||||
static async getSharing(
|
||||
|
@ -200,8 +196,9 @@ export class WorkflowsService {
|
|||
}),
|
||||
});
|
||||
|
||||
const logger = Container.get(Logger);
|
||||
if (!shared) {
|
||||
LoggerProxy.verbose('User attempted to update a workflow without permissions', {
|
||||
logger.verbose('User attempted to update a workflow without permissions', {
|
||||
workflowId,
|
||||
userId: user.id,
|
||||
});
|
||||
|
@ -237,7 +234,7 @@ export class WorkflowsService {
|
|||
} else {
|
||||
// Update the workflow's version
|
||||
workflow.versionId = uuid();
|
||||
LoggerProxy.verbose(
|
||||
logger.verbose(
|
||||
`Updating versionId for workflow ${workflowId} for user ${user.id} after saving`,
|
||||
{
|
||||
previousVersionId: shared.workflow.versionId,
|
||||
|
@ -514,7 +511,7 @@ export class WorkflowsService {
|
|||
workflow.staticData.__dataChanged = false;
|
||||
} catch (error) {
|
||||
ErrorReporter.error(error);
|
||||
LoggerProxy.error(
|
||||
Container.get(Logger).error(
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
`There was a problem saving the workflow with id "${workflow.id}" to save changed staticData: "${error.message}"`,
|
||||
{ workflowId: workflow.id },
|
||||
|
|
|
@ -5,7 +5,7 @@ import * as utils from '../shared/utils/';
|
|||
import type { ExternalSecretsSettings, SecretsProviderState } from '@/Interfaces';
|
||||
import { Cipher } from 'n8n-core';
|
||||
import { SettingsRepository } from '@/databases/repositories/settings.repository';
|
||||
import Container from 'typedi';
|
||||
import { Container } from 'typedi';
|
||||
import { ExternalSecretsProviders } from '@/ExternalSecrets/ExternalSecretsProviders.ee';
|
||||
import {
|
||||
DummyProvider,
|
||||
|
@ -17,6 +17,7 @@ import config from '@/config';
|
|||
import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee';
|
||||
import { CREDENTIAL_BLANKING_VALUE } from '@/constants';
|
||||
import { jsonParse, type IDataObject } from 'n8n-workflow';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
|
||||
let authOwnerAgent: SuperAgentTest;
|
||||
let authMemberAgent: SuperAgentTest;
|
||||
|
@ -52,6 +53,7 @@ const resetManager = async () => {
|
|||
Container.set(
|
||||
ExternalSecretsManager,
|
||||
new ExternalSecretsManager(
|
||||
mock(),
|
||||
Container.get(SettingsRepository),
|
||||
licenseLike,
|
||||
mockProvidersInstance,
|
||||
|
|
|
@ -7,11 +7,6 @@ import { getRiskSection } from './utils';
|
|||
import * as testDb from '../shared/testDb';
|
||||
import { generateNanoId } from '@db/utils/generators';
|
||||
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { getLogger } from '@/Logger';
|
||||
|
||||
LoggerProxy.init(getLogger());
|
||||
|
||||
beforeAll(async () => {
|
||||
await testDb.init();
|
||||
});
|
||||
|
|
|
@ -10,11 +10,6 @@ import { getRiskSection, saveManualTriggerWorkflow } from './utils';
|
|||
import * as testDb from '../shared/testDb';
|
||||
import { generateNanoId } from '@db/utils/generators';
|
||||
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { getLogger } from '@/Logger';
|
||||
|
||||
LoggerProxy.init(getLogger());
|
||||
|
||||
beforeAll(async () => {
|
||||
await testDb.init();
|
||||
});
|
||||
|
|
|
@ -5,11 +5,6 @@ import { FILESYSTEM_INTERACTION_NODE_TYPES, FILESYSTEM_REPORT } from '@/audit/co
|
|||
import { getRiskSection, saveManualTriggerWorkflow } from './utils';
|
||||
import * as testDb from '../shared/testDb';
|
||||
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { getLogger } from '@/Logger';
|
||||
|
||||
LoggerProxy.init(getLogger());
|
||||
|
||||
beforeAll(async () => {
|
||||
await testDb.init();
|
||||
});
|
||||
|
|
|
@ -14,11 +14,6 @@ import { toReportTitle } from '@/audit/utils';
|
|||
import config from '@/config';
|
||||
import { generateNanoId } from '@db/utils/generators';
|
||||
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { getLogger } from '@/Logger';
|
||||
|
||||
LoggerProxy.init(getLogger());
|
||||
|
||||
beforeAll(async () => {
|
||||
await testDb.init();
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { v4 as uuid } from 'uuid';
|
||||
import { Container } from 'typedi';
|
||||
import * as Db from '@/Db';
|
||||
import { audit } from '@/audit';
|
||||
import { OFFICIAL_RISKY_NODE_TYPES, NODES_REPORT } from '@/audit/constants';
|
||||
|
@ -9,12 +10,6 @@ import { mockInstance } from '../shared/utils/';
|
|||
import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
|
||||
import { NodeTypes } from '@/NodeTypes';
|
||||
import { CommunityPackagesService } from '@/services/communityPackages.service';
|
||||
import Container from 'typedi';
|
||||
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { getLogger } from '@/Logger';
|
||||
|
||||
LoggerProxy.init(getLogger());
|
||||
|
||||
const nodesAndCredentials = mockInstance(LoadNodesAndCredentials);
|
||||
nodesAndCredentials.getCustomDirectories.mockReturnValue([]);
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
import * as Config from '@oclif/config';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { type ILogger, LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import { InternalHooks } from '@/InternalHooks';
|
||||
import { ImportWorkflowsCommand } from '@/commands/import/workflow';
|
||||
|
@ -8,8 +6,6 @@ import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
|
|||
import * as testDb from '../shared/testDb';
|
||||
import { mockInstance } from '../shared/utils/';
|
||||
|
||||
LoggerProxy.init(mock<ILogger>());
|
||||
|
||||
beforeAll(async () => {
|
||||
mockInstance(InternalHooks);
|
||||
mockInstance(LoadNodesAndCredentials);
|
||||
|
|
|
@ -2,9 +2,7 @@ import { mockInstance } from '../shared/utils/';
|
|||
import { Worker } from '@/commands/worker';
|
||||
import * as Config from '@oclif/config';
|
||||
import config from '@/config';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { Telemetry } from '@/telemetry';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee';
|
||||
import { BinaryDataService } from 'n8n-core';
|
||||
import { CacheService } from '@/services/cache.service';
|
||||
|
@ -23,7 +21,6 @@ import { OrchestrationWorkerService } from '@/services/orchestration/worker/orch
|
|||
const oclifConfig: Config.IConfig = new Config.Config({ root: __dirname });
|
||||
|
||||
beforeAll(async () => {
|
||||
LoggerProxy.init(getLogger());
|
||||
config.set('executions.mode', 'queue');
|
||||
config.set('binaryDataManager.availableModes', 'filesystem');
|
||||
mockInstance(Telemetry);
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import type { SuperAgentTest } from 'supertest';
|
||||
import type { Entry as LdapUser } from 'ldapts';
|
||||
import { Not } from 'typeorm';
|
||||
import { type ILogger, jsonParse, LoggerProxy } from 'n8n-workflow';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
|
||||
import config from '@/config';
|
||||
import * as Db from '@/Db';
|
||||
|
@ -27,8 +26,6 @@ let globalMemberRole: Role;
|
|||
let owner: User;
|
||||
let authOwnerAgent: SuperAgentTest;
|
||||
|
||||
LoggerProxy.init(mock<ILogger>());
|
||||
|
||||
const defaultLdapConfig = {
|
||||
...LDAP_DEFAULT_CONFIGURATION,
|
||||
loginEnabled: true,
|
||||
|
|
|
@ -2,8 +2,8 @@ import { setupTestServer } from './shared/utils';
|
|||
import config from '@/config';
|
||||
import request from 'supertest';
|
||||
import Container from 'typedi';
|
||||
import { MetricsService } from '../../src/services/metrics.service';
|
||||
import { N8N_VERSION } from '../../src/constants';
|
||||
import { MetricsService } from '@/services/metrics.service';
|
||||
import { N8N_VERSION } from '@/constants';
|
||||
import { parse as semverParse } from 'semver';
|
||||
|
||||
jest.unmock('@/eventbus/MessageEventBus/MessageEventBus');
|
||||
|
|
|
@ -5,8 +5,6 @@ import type { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner';
|
|||
import { randomApiKey } from '../shared/random';
|
||||
import * as utils from '../shared/utils/';
|
||||
import * as testDb from '../shared/testDb';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { getLogger } from '@/Logger';
|
||||
|
||||
let owner: User;
|
||||
let user1: User;
|
||||
|
@ -16,8 +14,6 @@ let authUser1Agent: SuperAgentTest;
|
|||
let authUser2Agent: SuperAgentTest;
|
||||
let workflowRunner: ActiveWorkflowRunner;
|
||||
|
||||
LoggerProxy.init(getLogger());
|
||||
|
||||
const testServer = utils.setupTestServer({ endpointGroups: ['publicApi'] });
|
||||
|
||||
beforeAll(async () => {
|
||||
|
|
|
@ -3,11 +3,9 @@ import * as Db from '@/Db';
|
|||
|
||||
import * as testDb from '../shared/testDb';
|
||||
import type { ExecutionStatus } from 'n8n-workflow';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { getLogger } from '@/Logger';
|
||||
import type { ExecutionRepository } from '../../../src/databases/repositories';
|
||||
import type { ExecutionEntity } from '../../../src/databases/entities/ExecutionEntity';
|
||||
import { TIME } from '../../../src/constants';
|
||||
import type { ExecutionRepository } from '@/databases/repositories';
|
||||
import type { ExecutionEntity } from '@/databases/entities/ExecutionEntity';
|
||||
import { TIME } from '@/constants';
|
||||
|
||||
describe('softDeleteOnPruningCycle()', () => {
|
||||
const now = new Date();
|
||||
|
@ -16,7 +14,6 @@ describe('softDeleteOnPruningCycle()', () => {
|
|||
let workflow: Awaited<ReturnType<typeof testDb.createWorkflow>>;
|
||||
|
||||
beforeAll(async () => {
|
||||
LoggerProxy.init(getLogger());
|
||||
await testDb.init();
|
||||
|
||||
const { Execution } = Db.collections;
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import { Container } from 'typedi';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import express from 'express';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import type superagent from 'superagent';
|
||||
import request from 'supertest';
|
||||
import { URL } from 'url';
|
||||
|
@ -13,7 +12,6 @@ import { workflowsController } from '@/workflows/workflows.controller';
|
|||
import { AUTH_COOKIE_NAME } from '@/constants';
|
||||
import { credentialsController } from '@/credentials/credentials.controller';
|
||||
import type { User } from '@db/entities/User';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { loadPublicApiVersions } from '@/PublicApi/';
|
||||
import { issueJWT } from '@/auth/jwt';
|
||||
import { UserManagementMailer } from '@/UserManagement/email/UserManagementMailer';
|
||||
|
@ -61,6 +59,7 @@ import { UserService } from '@/services/user.service';
|
|||
import { executionsController } from '@/executions/executions.controller';
|
||||
import { WorkflowHistoryController } from '@/workflows/workflowHistory/workflowHistory.controller.ee';
|
||||
import { BinaryDataController } from '@/controllers/binaryData.controller';
|
||||
import { Logger } from '@/Logger';
|
||||
|
||||
/**
|
||||
* Plugin to prefix a path segment into a request URL pathname.
|
||||
|
@ -137,8 +136,10 @@ export const setupTestServer = ({
|
|||
app.use(rawBodyReader);
|
||||
app.use(cookieParser());
|
||||
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
// Mock all telemetry and logging
|
||||
const logger = mockInstance(Logger);
|
||||
mockInstance(InternalHooks);
|
||||
mockInstance(PostHogClient);
|
||||
|
||||
const testServer: TestServer = {
|
||||
app,
|
||||
|
@ -151,10 +152,6 @@ export const setupTestServer = ({
|
|||
beforeAll(async () => {
|
||||
await testDb.init();
|
||||
|
||||
// Mock all telemetry.
|
||||
mockInstance(InternalHooks);
|
||||
mockInstance(PostHogClient);
|
||||
|
||||
config.set('userManagement.jwtSecret', 'My JWT secret');
|
||||
config.set('userManagement.isInstanceOwnerSetUp', true);
|
||||
|
||||
|
@ -213,11 +210,7 @@ export const setupTestServer = ({
|
|||
registerController(app, config, new EventBusControllerEE());
|
||||
break;
|
||||
case 'auth':
|
||||
registerController(
|
||||
app,
|
||||
config,
|
||||
new AuthController(config, logger, internalHooks, mfaService, userService),
|
||||
);
|
||||
registerController(app, config, Container.get(AuthController));
|
||||
break;
|
||||
case 'mfa':
|
||||
registerController(app, config, new MFAController(mfaService));
|
||||
|
@ -240,11 +233,7 @@ export const setupTestServer = ({
|
|||
);
|
||||
registerController(app, config, Container.get(CommunityPackagesController));
|
||||
case 'me':
|
||||
registerController(
|
||||
app,
|
||||
config,
|
||||
new MeController(logger, externalHooks, internalHooks, userService),
|
||||
);
|
||||
registerController(app, config, Container.get(MeController));
|
||||
break;
|
||||
case 'passwordReset':
|
||||
registerController(
|
||||
|
|
|
@ -2,12 +2,10 @@ import { readFileSync } from 'fs';
|
|||
import type { SuperAgentTest } from 'supertest';
|
||||
import { agent as testAgent } from 'supertest';
|
||||
import type { INodeType, INodeTypeDescription, IWebhookFunctions } from 'n8n-workflow';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import { AbstractServer } from '@/AbstractServer';
|
||||
import { ExternalHooks } from '@/ExternalHooks';
|
||||
import { InternalHooks } from '@/InternalHooks';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { NodeTypes } from '@/NodeTypes';
|
||||
import { Push } from '@/push';
|
||||
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
|
@ -19,7 +17,6 @@ describe('Webhook API', () => {
|
|||
mockInstance(ExternalHooks);
|
||||
mockInstance(InternalHooks);
|
||||
mockInstance(Push);
|
||||
LoggerProxy.init(getLogger());
|
||||
|
||||
let agent: SuperAgentTest;
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ import type { IExecuteResponsePromiseData, IRun } from 'n8n-workflow';
|
|||
import { createDeferredPromise } from 'n8n-workflow';
|
||||
import type { IWorkflowExecutionDataProcess } from '@/Interfaces';
|
||||
import { ExecutionRepository } from '@db/repositories';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
|
||||
const FAKE_EXECUTION_ID = '15';
|
||||
const FAKE_SECOND_EXECUTION_ID = '20';
|
||||
|
@ -24,7 +25,7 @@ describe('ActiveExecutions', () => {
|
|||
let activeExecutions: ActiveExecutions;
|
||||
|
||||
beforeEach(() => {
|
||||
activeExecutions = new ActiveExecutions();
|
||||
activeExecutions = new ActiveExecutions(mock());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue