Merge pull request #3883 from louislam/1.23.X-merge-to-2.X.X

Merge 1.23.4 changes to 2.0.0
This commit is contained in:
Louis Lam 2023-11-13 21:26:40 +08:00 committed by GitHub
commit 014231ef86
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 1555 additions and 3276 deletions

View file

@ -493,8 +493,11 @@ ALTER TABLE monitor
await knex.schema.table("monitor", function (table) { await knex.schema.table("monitor", function (table) {
table.string("kafka_producer_topic", 255); table.string("kafka_producer_topic", 255);
table.text("kafka_producer_brokers"); table.text("kafka_producer_brokers");
table.integer("kafka_producer_ssl");
table.string("kafka_producer_allow_auto_topic_creation", 255); // patch-fix-kafka-producer-booleans.sql
table.boolean("kafka_producer_ssl").defaultTo(0).notNullable();
table.boolean("kafka_producer_allow_auto_topic_creation").defaultTo(0).notNullable();
table.text("kafka_producer_sasl_options"); table.text("kafka_producer_sasl_options");
table.text("kafka_producer_message"); table.text("kafka_producer_message");
}); });

View file

@ -0,0 +1,32 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- Rename COLUMNs to another one (suffixed by `_old`)
ALTER TABLE monitor
RENAME COLUMN kafka_producer_ssl TO kafka_producer_ssl_old;
ALTER TABLE monitor
RENAME COLUMN kafka_producer_allow_auto_topic_creation TO kafka_producer_allow_auto_topic_creation_old;
-- Add correct COLUMNs
ALTER TABLE monitor
ADD COLUMN kafka_producer_ssl BOOLEAN default 0 NOT NULL;
ALTER TABLE monitor
ADD COLUMN kafka_producer_allow_auto_topic_creation BOOLEAN default 0 NOT NULL;
-- Set bring old values from `_old` COLUMNs to correct ones
UPDATE monitor SET kafka_producer_allow_auto_topic_creation = monitor.kafka_producer_allow_auto_topic_creation_old
WHERE monitor.kafka_producer_allow_auto_topic_creation_old IS NOT NULL;
UPDATE monitor SET kafka_producer_ssl = monitor.kafka_producer_ssl_old
WHERE monitor.kafka_producer_ssl_old IS NOT NULL;
-- Remove old COLUMNs
ALTER TABLE monitor
DROP COLUMN kafka_producer_allow_auto_topic_creation_old;
ALTER TABLE monitor
DROP COLUMN kafka_producer_ssl_old;
COMMIT;

4539
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{ {
"name": "uptime-kuma", "name": "uptime-kuma",
"version": "1.23.3", "version": "1.23.4",
"license": "MIT", "license": "MIT",
"repository": { "repository": {
"type": "git", "type": "git",
@ -41,7 +41,7 @@
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .", "build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test2 --target pr-test2 . --push", "build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test2 --target pr-test2 . --push",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain", "upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.23.3 && npm ci --production && npm run download-dist", "setup": "git checkout 1.23.4 && npm ci --production && npm run download-dist",
"download-dist": "node extra/download-dist.js", "download-dist": "node extra/download-dist.js",
"mark-as-nightly": "node extra/mark-as-nightly.js", "mark-as-nightly": "node extra/mark-as-nightly.js",
"reset-password": "node extra/reset-password.js", "reset-password": "node extra/reset-password.js",
@ -49,6 +49,8 @@
"simple-dns-server": "node extra/simple-dns-server.js", "simple-dns-server": "node extra/simple-dns-server.js",
"simple-mqtt-server": "node extra/simple-mqtt-server.js", "simple-mqtt-server": "node extra/simple-mqtt-server.js",
"simple-mongo": "docker run --rm -p 27017:27017 mongo", "simple-mongo": "docker run --rm -p 27017:27017 mongo",
"simple-postgres": "docker run --rm -p 5432:5432 -e POSTGRES_PASSWORD=postgres postgres",
"simple-mariadb": "docker run --rm -p 3306:3306 -e MYSQL_ROOT_PASSWORD=mariadb# mariadb",
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix", "update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js", "release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts", "release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
@ -108,7 +110,7 @@
"mongodb": "~4.17.1", "mongodb": "~4.17.1",
"mqtt": "~4.3.7", "mqtt": "~4.3.7",
"mssql": "~8.1.4", "mssql": "~8.1.4",
"mysql2": "~2.3.3", "mysql2": "~3.6.2",
"nanoid": "~3.3.4", "nanoid": "~3.3.4",
"node-cloudflared-tunnel": "~1.0.9", "node-cloudflared-tunnel": "~1.0.9",
"node-radius-client": "~1.0.0", "node-radius-client": "~1.0.0",
@ -117,8 +119,8 @@
"notp": "~2.0.3", "notp": "~2.0.3",
"openid-client": "^5.4.2", "openid-client": "^5.4.2",
"password-hash": "~1.2.2", "password-hash": "~1.2.2",
"pg": "~8.8.0", "pg": "~8.11.3",
"pg-connection-string": "~2.5.0", "pg-connection-string": "~2.6.2",
"playwright-core": "~1.35.1", "playwright-core": "~1.35.1",
"prom-client": "~13.2.0", "prom-client": "~13.2.0",
"prometheus-api-metrics": "~3.2.1", "prometheus-api-metrics": "~3.2.1",

View file

@ -103,7 +103,8 @@ class Database {
"patch-monitor-oauth-cc.sql": true, "patch-monitor-oauth-cc.sql": true,
"patch-add-timeout-monitor.sql": true, "patch-add-timeout-monitor.sql": true,
"patch-add-gamedig-given-port.sql": true, "patch-add-gamedig-given-port.sql": true,
"patch-notification-config.sql": true, // The last file so far converted to a knex migration file "patch-notification-config.sql": true,
"patch-fix-kafka-producer-booleans.sql": true, // The last file so far converted to a knex migration file
}; };
/** /**

View file

@ -3,10 +3,10 @@ const dayjs = require("dayjs");
const axios = require("axios"); const axios = require("axios");
const { Prometheus } = require("../prometheus"); const { Prometheus } = require("../prometheus");
const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND, const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND,
SQL_DATETIME_FORMAT SQL_DATETIME_FORMAT, isDev, sleep, getRandomInt
} = require("../../src/util"); } = require("../../src/util");
const { tcping, ping, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, mqttAsync, setSetting, httpNtlm, radius, grpcQuery, const { tcping, ping, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, mqttAsync, setSetting, httpNtlm, radius, grpcQuery,
redisPingAsync, mongodbPing, kafkaProducerAsync, getOidcTokenClientCredentials, axiosAbortSignal redisPingAsync, mongodbPing, kafkaProducerAsync, getOidcTokenClientCredentials, rootCertificatesFingerprints, axiosAbortSignal
} = require("../util-server"); } = require("../util-server");
const { R } = require("redbean-node"); const { R } = require("redbean-node");
const { BeanModel } = require("redbean-node/dist/bean-model"); const { BeanModel } = require("redbean-node/dist/bean-model");
@ -23,6 +23,8 @@ const jsonata = require("jsonata");
const jwt = require("jsonwebtoken"); const jwt = require("jsonwebtoken");
const { UptimeCalculator } = require("../uptime-calculator"); const { UptimeCalculator } = require("../uptime-calculator");
const rootCertificates = rootCertificatesFingerprints();
/** /**
* status: * status:
* 0 = DOWN * 0 = DOWN
@ -146,8 +148,8 @@ class Monitor extends BeanModel {
expectedValue: this.expectedValue, expectedValue: this.expectedValue,
kafkaProducerTopic: this.kafkaProducerTopic, kafkaProducerTopic: this.kafkaProducerTopic,
kafkaProducerBrokers: JSON.parse(this.kafkaProducerBrokers), kafkaProducerBrokers: JSON.parse(this.kafkaProducerBrokers),
kafkaProducerSsl: this.kafkaProducerSsl === "1" && true || false, kafkaProducerSsl: this.getKafkaProducerSsl(),
kafkaProducerAllowAutoTopicCreation: this.kafkaProducerAllowAutoTopicCreation === "1" && true || false, kafkaProducerAllowAutoTopicCreation: this.getKafkaProducerAllowAutoTopicCreation(),
kafkaProducerMessage: this.kafkaProducerMessage, kafkaProducerMessage: this.kafkaProducerMessage,
screenshot, screenshot,
}; };
@ -298,6 +300,22 @@ class Monitor extends BeanModel {
return Boolean(this.gamedigGivenPortOnly); return Boolean(this.gamedigGivenPortOnly);
} }
/**
* Parse to boolean
* @returns {boolean} Kafka Producer Ssl enabled?
*/
getKafkaProducerSsl() {
return Boolean(this.kafkaProducerSsl);
}
/**
* Parse to boolean
* @returns {boolean} Kafka Producer Allow Auto Topic Creation Enabled?
*/
getKafkaProducerAllowAutoTopicCreation() {
return Boolean(this.kafkaProducerAllowAutoTopicCreation);
}
/** /**
* Start monitor * Start monitor
* @param {Server} io Socket server instance * @param {Server} io Socket server instance
@ -324,6 +342,16 @@ class Monitor extends BeanModel {
} }
} }
// Evil
if (isDev) {
if (process.env.EVIL_RANDOM_MONITOR_SLEEP === "SURE") {
if (getRandomInt(0, 100) === 0) {
log.debug("evil", `[${this.name}] Evil mode: Random sleep: ` + beatInterval * 10000);
await sleep(beatInterval * 10000);
}
}
}
// Expose here for prometheus update // Expose here for prometheus update
// undefined if not https // undefined if not https
let tlsInfo = undefined; let tlsInfo = undefined;
@ -346,6 +374,12 @@ class Monitor extends BeanModel {
bean.status = flipStatus(bean.status); bean.status = flipStatus(bean.status);
} }
// Runtime patch timeout if it is 0
// See https://github.com/louislam/uptime-kuma/pull/3961#issuecomment-1804149144
if (this.timeout <= 0) {
this.timeout = this.interval * 1000 * 0.8;
}
try { try {
if (await Monitor.isUnderMaintenance(this.id)) { if (await Monitor.isUnderMaintenance(this.id)) {
bean.msg = "Monitor under maintenance"; bean.msg = "Monitor under maintenance";
@ -728,7 +762,7 @@ class Monitor extends BeanModel {
} else if (this.type === "sqlserver") { } else if (this.type === "sqlserver") {
let startTime = dayjs().valueOf(); let startTime = dayjs().valueOf();
await mssqlQuery(this.databaseConnectionString, this.databaseQuery); await mssqlQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1");
bean.msg = ""; bean.msg = "";
bean.status = UP; bean.status = UP;
@ -767,7 +801,7 @@ class Monitor extends BeanModel {
} else if (this.type === "postgres") { } else if (this.type === "postgres") {
let startTime = dayjs().valueOf(); let startTime = dayjs().valueOf();
await postgresQuery(this.databaseConnectionString, this.databaseQuery); await postgresQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1");
bean.msg = ""; bean.msg = "";
bean.status = UP; bean.status = UP;
@ -775,7 +809,11 @@ class Monitor extends BeanModel {
} else if (this.type === "mysql") { } else if (this.type === "mysql") {
let startTime = dayjs().valueOf(); let startTime = dayjs().valueOf();
bean.msg = await mysqlQuery(this.databaseConnectionString, this.databaseQuery); // Use `radius_password` as `password` field, since there are too many unnecessary fields
// TODO: rename `radius_password` to `password` later for general use
let mysqlPassword = this.radiusPassword;
bean.msg = await mysqlQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1", mysqlPassword);
bean.status = UP; bean.status = UP;
bean.ping = dayjs().valueOf() - startTime; bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "mongodb") { } else if (this.type === "mongodb") {
@ -959,6 +997,7 @@ class Monitor extends BeanModel {
log.debug("monitor", `[${this.name}] Next heartbeat in: ${intervalRemainingMs}ms`); log.debug("monitor", `[${this.name}] Next heartbeat in: ${intervalRemainingMs}ms`);
this.heartbeatInterval = setTimeout(safeBeat, intervalRemainingMs); this.heartbeatInterval = setTimeout(safeBeat, intervalRemainingMs);
this.lastScheduleBeatTime = dayjs();
} else { } else {
log.info("monitor", `[${this.name}] isStop = true, no next check.`); log.info("monitor", `[${this.name}] isStop = true, no next check.`);
} }
@ -971,7 +1010,9 @@ class Monitor extends BeanModel {
*/ */
const safeBeat = async () => { const safeBeat = async () => {
try { try {
this.lastStartBeatTime = dayjs();
await beat(); await beat();
this.lastEndBeatTime = dayjs();
} catch (e) { } catch (e) {
console.trace(e); console.trace(e);
UptimeKumaServer.errorLog(e, false); UptimeKumaServer.errorLog(e, false);
@ -980,6 +1021,9 @@ class Monitor extends BeanModel {
if (! this.isStop) { if (! this.isStop) {
log.info("monitor", "Try to restart the monitor"); log.info("monitor", "Try to restart the monitor");
this.heartbeatInterval = setTimeout(safeBeat, this.interval * 1000); this.heartbeatInterval = setTimeout(safeBeat, this.interval * 1000);
this.lastScheduleBeatTime = dayjs();
} else {
log.info("monitor", "isStop = true, no next check.");
} }
} }
}; };
@ -1320,7 +1364,10 @@ class Monitor extends BeanModel {
let certInfo = tlsInfoObject.certInfo; let certInfo = tlsInfoObject.certInfo;
while (certInfo) { while (certInfo) {
let subjectCN = certInfo.subject["CN"]; let subjectCN = certInfo.subject["CN"];
if (certInfo.daysRemaining > targetDays) { if (rootCertificates.has(certInfo.fingerprint256)) {
log.debug("monitor", `Known root cert: ${certInfo.certType} certificate "${subjectCN}" (${certInfo.daysRemaining} days valid) on ${targetDays} deadline.`);
break;
} else if (certInfo.daysRemaining > targetDays) {
log.debug("monitor", `No need to send cert notification for ${certInfo.certType} certificate "${subjectCN}" (${certInfo.daysRemaining} days valid) on ${targetDays} deadline.`); log.debug("monitor", `No need to send cert notification for ${certInfo.certType} certificate "${subjectCN}" (${certInfo.daysRemaining} days valid) on ${targetDays} deadline.`);
} else { } else {
log.debug("monitor", `call sendCertNotificationByTargetDays for ${targetDays} deadline on certificate ${subjectCN}.`); log.debug("monitor", `call sendCertNotificationByTargetDays for ${targetDays} deadline on certificate ${subjectCN}.`);

View file

@ -50,7 +50,7 @@ router.get("/api/push/:pushToken", async (request, response) => {
let pushToken = request.params.pushToken; let pushToken = request.params.pushToken;
let msg = request.query.msg || "OK"; let msg = request.query.msg || "OK";
let ping = parseInt(request.query.ping) || null; let ping = parseFloat(request.query.ping) || null;
let statusString = request.query.status || "up"; let statusString = request.query.status || "up";
let status = (statusString === "up") ? UP : DOWN; let status = (statusString === "up") ? UP : DOWN;

View file

@ -823,6 +823,9 @@ let needSetup = false;
bean.kafkaProducerAllowAutoTopicCreation = monitor.kafkaProducerAllowAutoTopicCreation; bean.kafkaProducerAllowAutoTopicCreation = monitor.kafkaProducerAllowAutoTopicCreation;
bean.kafkaProducerSaslOptions = JSON.stringify(monitor.kafkaProducerSaslOptions); bean.kafkaProducerSaslOptions = JSON.stringify(monitor.kafkaProducerSaslOptions);
bean.kafkaProducerMessage = monitor.kafkaProducerMessage; bean.kafkaProducerMessage = monitor.kafkaProducerMessage;
bean.kafkaProducerSsl = monitor.kafkaProducerSsl;
bean.kafkaProducerAllowAutoTopicCreation =
monitor.kafkaProducerAllowAutoTopicCreation;
bean.gamedigGivenPortOnly = monitor.gamedigGivenPortOnly; bean.gamedigGivenPortOnly = monitor.gamedigGivenPortOnly;
bean.validate(); bean.validate();
@ -1793,8 +1796,10 @@ gracefulShutdown(server.httpServer, {
}); });
// Catch unexpected errors here // Catch unexpected errors here
process.addListener("unhandledRejection", (error, promise) => { let unexpectedErrorHandler = (error, promise) => {
console.trace(error); console.trace(error);
UptimeKumaServer.errorLog(error, false); UptimeKumaServer.errorLog(error, false);
console.error("If you keep encountering errors, please report to https://github.com/louislam/uptime-kuma/issues"); console.error("If you keep encountering errors, please report to https://github.com/louislam/uptime-kuma/issues");
}); };
process.addListener("unhandledRejection", unexpectedErrorHandler);
process.addListener("uncaughtException", unexpectedErrorHandler);

View file

@ -62,6 +62,8 @@ class UptimeKumaServer {
*/ */
jwtSecret = null; jwtSecret = null;
checkMonitorsInterval = null;
/** /**
* Get the current instance of the server if it exists, otherwise * Get the current instance of the server if it exists, otherwise
* create a new instance. * create a new instance.
@ -87,7 +89,10 @@ class UptimeKumaServer {
// Set axios default user-agent to Uptime-Kuma/version // Set axios default user-agent to Uptime-Kuma/version
axios.defaults.headers.common["User-Agent"] = this.getUserAgent(); axios.defaults.headers.common["User-Agent"] = this.getUserAgent();
log.debug("server", "Creating express and socket.io instance"); // Set default axios timeout to 5 minutes instead of infinity
axios.defaults.timeout = 300 * 1000;
log.info("server", "Creating express and socket.io instance");
this.app = express(); this.app = express();
if (sslKey && sslCert) { if (sslKey && sslCert) {
log.info("server", "Server Type: HTTPS"); log.info("server", "Server Type: HTTPS");
@ -371,6 +376,10 @@ class UptimeKumaServer {
if (enable || enable === null) { if (enable || enable === null) {
this.startNSCDServices(); this.startNSCDServices();
} }
this.checkMonitorsInterval = setInterval(() => {
this.checkMonitors();
}, 60 * 1000);
} }
/** /**
@ -383,6 +392,8 @@ class UptimeKumaServer {
if (enable || enable === null) { if (enable || enable === null) {
this.stopNSCDServices(); this.stopNSCDServices();
} }
clearInterval(this.checkMonitorsInterval);
} }
/** /**
@ -416,6 +427,83 @@ class UptimeKumaServer {
} }
} }
/**
* Start the specified monitor
* @param {number} monitorID ID of monitor to start
* @returns {Promise<void>}
*/
async startMonitor(monitorID) {
log.info("manage", `Resume Monitor: ${monitorID} by server`);
await R.exec("UPDATE monitor SET active = 1 WHERE id = ?", [
monitorID,
]);
let monitor = await R.findOne("monitor", " id = ? ", [
monitorID,
]);
if (monitor.id in this.monitorList) {
this.monitorList[monitor.id].stop();
}
this.monitorList[monitor.id] = monitor;
monitor.start(this.io);
}
/**
* Restart a given monitor
* @param {number} monitorID ID of monitor to start
* @returns {Promise<void>}
*/
async restartMonitor(monitorID) {
return await this.startMonitor(monitorID);
}
/**
* Check if monitors are running properly
*/
async checkMonitors() {
log.debug("monitor_checker", "Checking monitors");
for (let monitorID in this.monitorList) {
let monitor = this.monitorList[monitorID];
// Not for push monitor
if (monitor.type === "push") {
continue;
}
if (!monitor.active) {
continue;
}
// Check the lastStartBeatTime, if it is too long, then restart
if (monitor.lastScheduleBeatTime ) {
let diff = dayjs().diff(monitor.lastStartBeatTime, "second");
if (diff > monitor.interval * 1.5) {
log.error("monitor_checker", `Monitor Interval: ${monitor.interval} Monitor ` + monitorID + " lastStartBeatTime diff: " + diff);
log.error("monitor_checker", "Unexpected error: Monitor " + monitorID + " is struck for unknown reason");
log.error("monitor_checker", "Last start beat time: " + R.isoDateTime(monitor.lastStartBeatTime));
log.error("monitor_checker", "Last end beat time: " + R.isoDateTime(monitor.lastEndBeatTime));
log.error("monitor_checker", "Last ScheduleBeatTime: " + R.isoDateTime(monitor.lastScheduleBeatTime));
// Restart
log.error("monitor_checker", `Restarting monitor ${monitorID} automatically now`);
this.restartMonitor(monitorID);
} else {
//log.debug("monitor_checker", "Monitor " + monitorID + " is running normally");
}
} else {
//log.debug("monitor_checker", "Monitor " + monitorID + " is not started yet, skipp");
}
}
log.debug("monitor_checker", "Checking monitors end");
}
/** /**
* Default User-Agent when making HTTP requests * Default User-Agent when making HTTP requests
* @returns {string} User-Agent * @returns {string} User-Agent

View file

@ -22,6 +22,7 @@ const protojs = require("protobufjs");
const radiusClient = require("node-radius-client"); const radiusClient = require("node-radius-client");
const redis = require("redis"); const redis = require("redis");
const oidc = require("openid-client"); const oidc = require("openid-client");
const tls = require("tls");
const { const {
dictionaries: { dictionaries: {
@ -397,6 +398,9 @@ exports.mssqlQuery = async function (connectionString, query) {
try { try {
pool = new mssql.ConnectionPool(connectionString); pool = new mssql.ConnectionPool(connectionString);
await pool.connect(); await pool.connect();
if (!query) {
query = "SELECT 1";
}
await pool.request().query(query); await pool.request().query(query);
pool.close(); pool.close();
} catch (e) { } catch (e) {
@ -418,12 +422,22 @@ exports.postgresQuery = function (connectionString, query) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const config = postgresConParse(connectionString); const config = postgresConParse(connectionString);
if (config.password === "") { // Fix #3868, which true/false is not parsed to boolean
// See https://github.com/brianc/node-postgres/issues/1927 if (typeof config.ssl === "string") {
return reject(new Error("Password is undefined.")); config.ssl = config.ssl === "true";
} }
const client = new Client({ connectionString }); if (config.password === "") {
// See https://github.com/brianc/node-postgres/issues/1927
reject(new Error("Password is undefined."));
return;
}
const client = new Client(config);
client.on("error", (error) => {
log.debug("postgres", "Error caught in the error event handler.");
reject(error);
});
client.connect((err) => { client.connect((err) => {
if (err) { if (err) {
@ -458,11 +472,15 @@ exports.postgresQuery = function (connectionString, query) {
* Run a query on MySQL/MariaDB * Run a query on MySQL/MariaDB
* @param {string} connectionString The database connection string * @param {string} connectionString The database connection string
* @param {string} query The query to validate the database with * @param {string} query The query to validate the database with
* @param {?string} password The password to use
* @returns {Promise<(string)>} Response from server * @returns {Promise<(string)>} Response from server
*/ */
exports.mysqlQuery = function (connectionString, query) { exports.mysqlQuery = function (connectionString, query, password = undefined) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const connection = mysql.createConnection(connectionString); const connection = mysql.createConnection({
uri: connectionString,
password
});
connection.on("error", (err) => { connection.on("error", (err) => {
reject(err); reject(err);
@ -1060,6 +1078,30 @@ module.exports.grpcQuery = async (options) => {
}); });
}; };
/**
* Returns an array of SHA256 fingerprints for all known root certificates.
* @returns {Set} A set of SHA256 fingerprints.
*/
module.exports.rootCertificatesFingerprints = () => {
let fingerprints = tls.rootCertificates.map(cert => {
let certLines = cert.split("\n");
certLines.shift();
certLines.pop();
let certBody = certLines.join("");
let buf = Buffer.from(certBody, "base64");
const shasum = crypto.createHash("sha256");
shasum.update(buf);
return shasum.digest("hex").toUpperCase().replace(/(.{2})(?!$)/g, "$1:");
});
fingerprints.push("6D:99:FB:26:5E:B1:C5:B3:74:47:65:FC:BC:64:8F:3C:D8:E1:BF:FA:FD:C4:C2:F9:9B:9D:47:CF:7F:F1:C2:4F"); // ISRG X1 cross-signed with DST X3
fingerprints.push("8B:05:B6:8C:C6:59:E5:ED:0F:CB:38:F2:C9:42:FB:FD:20:0E:6F:2F:F9:F8:5D:63:C6:99:4E:F5:E0:B0:27:01"); // ISRG X2 cross-signed with ISRG X1
return new Set(fingerprints);
};
module.exports.SHAKE256_LENGTH = 16; module.exports.SHAKE256_LENGTH = 16;
/** /**
@ -1103,13 +1145,17 @@ if (process.env.TEST_BACKEND) {
*/ */
module.exports.axiosAbortSignal = (timeoutMs) => { module.exports.axiosAbortSignal = (timeoutMs) => {
try { try {
// Just in case, as 0 timeout here will cause the request to be aborted immediately
if (!timeoutMs || timeoutMs <= 0) {
timeoutMs = 5000;
}
return AbortSignal.timeout(timeoutMs); return AbortSignal.timeout(timeoutMs);
} catch (_) { } catch (_) {
// v16-: AbortSignal.timeout is not supported // v16-: AbortSignal.timeout is not supported
try { try {
const abortController = new AbortController(); const abortController = new AbortController();
setTimeout(() => abortController.abort(), timeoutMs || 0); setTimeout(() => abortController.abort(), timeoutMs);
return abortController.signal; return abortController.signal;
} catch (_) { } catch (_) {

View file

@ -368,11 +368,20 @@
<input id="connectionString" v-model="monitor.databaseConnectionString" type="text" class="form-control" required> <input id="connectionString" v-model="monitor.databaseConnectionString" type="text" class="form-control" required>
</div> </div>
</template> </template>
<template v-if="monitor.type === 'mysql'">
<div class="my-3">
<label for="mysql-password" class="form-label">{{ $t("Password") }}</label>
<!-- TODO: Rename monitor.radiusPassword to monitor.password for general use -->
<HiddenInput id="mysql-password" v-model="monitor.radiusPassword" autocomplete="false"></HiddenInput>
</div>
</template>
<!-- SQL Server / PostgreSQL / MySQL --> <!-- SQL Server / PostgreSQL / MySQL -->
<template v-if="monitor.type === 'sqlserver' || monitor.type === 'postgres' || monitor.type === 'mysql'"> <template v-if="monitor.type === 'sqlserver' || monitor.type === 'postgres' || monitor.type === 'mysql'">
<div class="my-3"> <div class="my-3">
<label for="sqlQuery" class="form-label">{{ $t("Query") }}</label> <label for="sqlQuery" class="form-label">{{ $t("Query") }}</label>
<textarea id="sqlQuery" v-model="monitor.databaseQuery" class="form-control" :placeholder="$t('Example:', [ 'select getdate()' ])" required></textarea> <textarea id="sqlQuery" v-model="monitor.databaseQuery" class="form-control" :placeholder="$t('Example:', [ 'SELECT 1' ])"></textarea>
</div> </div>
</template> </template>
@ -842,6 +851,7 @@ import TagsManager from "../components/TagsManager.vue";
import { genSecret, isDev, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND } from "../util.ts"; import { genSecret, isDev, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND } from "../util.ts";
import { hostNameRegexPattern } from "../util-frontend"; import { hostNameRegexPattern } from "../util-frontend";
import { sleep } from "../util"; import { sleep } from "../util";
import HiddenInput from "../components/HiddenInput.vue";
const toast = useToast; const toast = useToast;
@ -882,11 +892,13 @@ const monitorDefaults = {
mechanism: "None", mechanism: "None",
}, },
kafkaProducerSsl: false, kafkaProducerSsl: false,
kafkaProducerAllowAutoTopicCreation: false,
gamedigGivenPortOnly: true, gamedigGivenPortOnly: true,
}; };
export default { export default {
components: { components: {
HiddenInput,
ActionSelect, ActionSelect,
ProxyDialog, ProxyDialog,
CopyableInput, CopyableInput,