mirror of
https://github.com/louislam/uptime-kuma.git
synced 2024-11-09 23:24:07 -08:00
Merge branch 'master' into add-JSDoc-comments
This commit is contained in:
commit
6d22ebedca
|
@ -28,6 +28,8 @@ SECURITY.md
|
|||
tsconfig.json
|
||||
.env
|
||||
/tmp
|
||||
/babel.config.js
|
||||
/ecosystem.config.js
|
||||
|
||||
### .gitignore content (commented rules are duplicated)
|
||||
|
||||
|
@ -42,4 +44,6 @@ dist-ssr
|
|||
#!/data/.gitkeep
|
||||
#.vscode
|
||||
|
||||
|
||||
|
||||
### End of .gitignore content
|
||||
|
|
57
.eslintrc.js
57
.eslintrc.js
|
@ -1,4 +1,9 @@
|
|||
module.exports = {
|
||||
ignorePatterns: [
|
||||
"test/*",
|
||||
"server/modules/apicache/*",
|
||||
"src/util.js"
|
||||
],
|
||||
root: true,
|
||||
env: {
|
||||
browser: true,
|
||||
|
@ -17,39 +22,47 @@ module.exports = {
|
|||
requireConfigFile: false,
|
||||
},
|
||||
rules: {
|
||||
"linebreak-style": ["error", "unix"],
|
||||
"camelcase": ["warn", {
|
||||
"yoda": "error",
|
||||
eqeqeq: [ "warn", "smart" ],
|
||||
"linebreak-style": [ "error", "unix" ],
|
||||
"camelcase": [ "warn", {
|
||||
"properties": "never",
|
||||
"ignoreImports": true
|
||||
}],
|
||||
// override/add rules settings here, such as:
|
||||
// 'vue/no-unused-vars': 'error'
|
||||
"no-unused-vars": "warn",
|
||||
"no-unused-vars": [ "warn", {
|
||||
"args": "none"
|
||||
}],
|
||||
indent: [
|
||||
"error",
|
||||
4,
|
||||
{
|
||||
ignoredNodes: ["TemplateLiteral"],
|
||||
ignoredNodes: [ "TemplateLiteral" ],
|
||||
SwitchCase: 1,
|
||||
},
|
||||
],
|
||||
quotes: ["warn", "double"],
|
||||
semi: "warn",
|
||||
"vue/html-indent": ["warn", 4], // default: 2
|
||||
quotes: [ "warn", "double" ],
|
||||
semi: "error",
|
||||
"vue/html-indent": [ "warn", 4 ], // default: 2
|
||||
"vue/max-attributes-per-line": "off",
|
||||
"vue/singleline-html-element-content-newline": "off",
|
||||
"vue/html-self-closing": "off",
|
||||
"vue/require-component-is": "off", // not allow is="style" https://github.com/vuejs/eslint-plugin-vue/issues/462#issuecomment-430234675
|
||||
"vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly
|
||||
"no-multi-spaces": ["error", {
|
||||
"no-multi-spaces": [ "error", {
|
||||
ignoreEOLComments: true,
|
||||
}],
|
||||
"space-before-function-paren": ["error", {
|
||||
"array-bracket-spacing": [ "warn", "always", {
|
||||
"singleValue": true,
|
||||
"objectsInArrays": false,
|
||||
"arraysInArrays": false
|
||||
}],
|
||||
"space-before-function-paren": [ "error", {
|
||||
"anonymous": "always",
|
||||
"named": "never",
|
||||
"asyncArrow": "always"
|
||||
}],
|
||||
"curly": "error",
|
||||
"object-curly-spacing": ["error", "always"],
|
||||
"object-curly-spacing": [ "error", "always" ],
|
||||
"object-curly-newline": "off",
|
||||
"object-property-newline": "error",
|
||||
"comma-spacing": "error",
|
||||
|
@ -60,36 +73,36 @@ module.exports = {
|
|||
"space-infix-ops": "warn",
|
||||
"arrow-spacing": "warn",
|
||||
"no-trailing-spaces": "warn",
|
||||
"no-constant-condition": ["error", {
|
||||
"no-constant-condition": [ "error", {
|
||||
"checkLoops": false,
|
||||
}],
|
||||
"space-before-blocks": "warn",
|
||||
//'no-console': 'warn',
|
||||
"no-extra-boolean-cast": "off",
|
||||
"no-multiple-empty-lines": ["warn", {
|
||||
"no-multiple-empty-lines": [ "warn", {
|
||||
"max": 1,
|
||||
"maxBOF": 0,
|
||||
}],
|
||||
"lines-between-class-members": ["warn", "always", {
|
||||
"lines-between-class-members": [ "warn", "always", {
|
||||
exceptAfterSingleLine: true,
|
||||
}],
|
||||
"no-unneeded-ternary": "error",
|
||||
"array-bracket-newline": ["error", "consistent"],
|
||||
"eol-last": ["error", "always"],
|
||||
"array-bracket-newline": [ "error", "consistent" ],
|
||||
"eol-last": [ "error", "always" ],
|
||||
//'prefer-template': 'error',
|
||||
"comma-dangle": ["warn", "only-multiline"],
|
||||
"no-empty": ["error", {
|
||||
"comma-dangle": [ "warn", "only-multiline" ],
|
||||
"no-empty": [ "error", {
|
||||
"allowEmptyCatch": true
|
||||
}],
|
||||
"no-control-regex": "off",
|
||||
"one-var": ["error", "never"],
|
||||
"max-statements-per-line": ["error", { "max": 1 }]
|
||||
"one-var": [ "error", "never" ],
|
||||
"max-statements-per-line": [ "error", { "max": 1 }]
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": [ "src/languages/*.js", "src/icon.js" ],
|
||||
"rules": {
|
||||
"comma-dangle": ["error", "always-multiline"],
|
||||
"comma-dangle": [ "error", "always-multiline" ],
|
||||
}
|
||||
},
|
||||
|
||||
|
|
1
.github/workflows/auto-test.yml
vendored
1
.github/workflows/auto-test.yml
vendored
|
@ -20,6 +20,7 @@ jobs:
|
|||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
|
|
22
.github/workflows/stale-bot.yml
vendored
22
.github/workflows/stale-bot.yml
vendored
|
@ -1,22 +0,0 @@
|
|||
name: 'Automatically close stale issues and PRs'
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
#Run once a day at midnight
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v4
|
||||
with:
|
||||
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 6 months with no activity. Remove stale label or comment or this will be closed in 7 days.'
|
||||
stale-pr-message: 'We are clearing up our old Pull Requests and yours has been open for 6 months with no activity. Remove stale label or comment or this will be closed in 7 days.'
|
||||
close-issue-message: 'This issue was closed because it has been stalled for 7 days with no activity.'
|
||||
close-pr-message: 'This PR was closed because it has been stalled for 7 days with no activity.'
|
||||
days-before-stale: 180
|
||||
days-before-close: 7
|
||||
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,'
|
||||
exempt-pr-labels: 'awaiting-approval,work-in-progress,enhancement,'
|
||||
exempt-issue-assignees: 'louislam'
|
||||
exempt-pr-assignees: 'louislam'
|
|
@ -1,9 +1,13 @@
|
|||
{
|
||||
"extends": "stylelint-config-standard",
|
||||
"customSyntax": "postcss-html",
|
||||
"rules": {
|
||||
"indentation": 4,
|
||||
"no-descending-specificity": null,
|
||||
"selector-list-comma-newline-after": null,
|
||||
"declaration-empty-line-before": null
|
||||
"declaration-empty-line-before": null,
|
||||
"alpha-value-notation": "number",
|
||||
"color-function-notation": "legacy",
|
||||
"shorthand-property-no-redundant-values": null
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,12 +27,25 @@ The frontend code build into "dist" directory. The server (express.js) exposes t
|
|||
|
||||
## Can I create a pull request for Uptime Kuma?
|
||||
|
||||
Generally, if the pull request is working fine, and it does not affect any existing logic, workflow and performance, I will merge into the master branch once it is tested.
|
||||
⚠️ 2022-03-02 Update:
|
||||
|
||||
If you are not sure whether I will accept your pull request, feel free to create an empty pull request draft first.
|
||||
Since I found that merging pull requests is a pretty heavy task for me, I try to rearrange it.
|
||||
|
||||
✅ Accept:
|
||||
- Bug/Security fix
|
||||
- Translations
|
||||
- Adding notification providers
|
||||
|
||||
❌ Avoid:
|
||||
- Large pull requests
|
||||
- New big features
|
||||
|
||||
My long story here: https://www.reddit.com/r/UptimeKuma/comments/t1t6or/comment/hynyijx/
|
||||
|
||||
### Recommended Pull Request Guideline
|
||||
|
||||
Before deep into coding, disscussion first is preferred. Creating an empty pull request for disscussion would be recommended.
|
||||
|
||||
1. Fork the project
|
||||
1. Clone your fork repo to local
|
||||
1. Create a new branch
|
||||
|
@ -42,42 +55,7 @@ If you are not sure whether I will accept your pull request, feel free to create
|
|||
1. Create a pull request: https://github.com/louislam/uptime-kuma/compare
|
||||
1. Write a proper description
|
||||
1. Click "Change to draft"
|
||||
|
||||
### Pull Request Examples
|
||||
|
||||
Here are some example situations in the past.
|
||||
|
||||
#### ✅ High - Medium Priority
|
||||
|
||||
Easy to review, no breaking change and not touching the existing code
|
||||
|
||||
- Add a new notification
|
||||
- Add a chart
|
||||
- Fix a bug
|
||||
- Translations
|
||||
- Add a independent new feature
|
||||
|
||||
#### *️⃣ Requires one more reviewer
|
||||
|
||||
I do not have such knowledge to test it.
|
||||
|
||||
- Add k8s supports
|
||||
|
||||
#### ⚠ Low Priority - Harsh Mode
|
||||
|
||||
Some pull requests are required to modify the core. To be honest, I do not want anyone to try to do that, because it would spend a lot of your time. I will review your pull request harshly. Also, you may need to write a lot of unit tests to ensure that there is no breaking change.
|
||||
|
||||
- Touch large parts of code of any very important features
|
||||
- Touch monitoring logic
|
||||
- Drop a table or drop a column for any reason
|
||||
- Touch the entry point of Docker or Node.js
|
||||
- Modify auth
|
||||
|
||||
#### *️⃣ Low Priority
|
||||
|
||||
It changed my current workflow and require further studies.
|
||||
|
||||
- Change my release approach
|
||||
1. Discussion
|
||||
|
||||
#### ❌ Won't Merge
|
||||
|
||||
|
@ -221,14 +199,13 @@ https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
|
|||
### Release Procedures
|
||||
|
||||
1. Draft a release note
|
||||
1. Make sure the repo is cleared
|
||||
1. `npm run update-version 1.X.X`
|
||||
1. `npm run build`
|
||||
1. `npm run build-docker`
|
||||
1. `git push`
|
||||
1. Publish the release note as 1.X.X
|
||||
1. `npm run upload-artifacts`
|
||||
1. SSH to demo site server and update to 1.X.X
|
||||
2. Make sure the repo is cleared
|
||||
3. `npm run release-final with env vars: `VERSION` and `GITHUB_TOKEN`
|
||||
4. Wait until the `Press any key to continue`
|
||||
5. `git push`
|
||||
6. Publish the release note as 1.X.X
|
||||
7. Press any key to continue
|
||||
8. SSH to demo site server and update to 1.X.X
|
||||
|
||||
Checking:
|
||||
|
||||
|
@ -236,6 +213,15 @@ Checking:
|
|||
- Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 / armv7)
|
||||
- Try clean installation with Node.js
|
||||
|
||||
### Release Beta Procedures
|
||||
|
||||
1. Draft a release note, check "This is a pre-release"
|
||||
2. Make sure the repo is cleared
|
||||
3. `npm run release-beta` with env vars: `VERSION` and `GITHUB_TOKEN`
|
||||
4. Wait until the `Press any key to continue`
|
||||
5. Publish the release note as 1.X.X-beta.X
|
||||
6. Press any key to continue
|
||||
|
||||
### Release Wiki
|
||||
|
||||
#### Setup Repo
|
||||
|
|
41
README.md
41
README.md
|
@ -37,7 +37,6 @@ VPS is sponsored by Uptime Kuma sponsors on [Open Collective](https://opencollec
|
|||
### 🐳 Docker
|
||||
|
||||
```bash
|
||||
docker volume create uptime-kuma
|
||||
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
||||
```
|
||||
|
||||
|
@ -47,7 +46,10 @@ Browse to http://localhost:3001 after starting.
|
|||
|
||||
### 💪🏻 Non-Docker
|
||||
|
||||
Required Tools: Node.js >= 14, git and pm2.
|
||||
Required Tools:
|
||||
- [Node.js](https://nodejs.org/en/download/) >= 14
|
||||
- [Git](https://git-scm.com/downloads)
|
||||
- [pm2](https://pm2.keymetrics.io/) - For run in background
|
||||
|
||||
```bash
|
||||
# Update your npm to the latest version
|
||||
|
@ -61,12 +63,26 @@ npm run setup
|
|||
node server/server.js
|
||||
|
||||
# (Recommended) Option 2. Run in background using PM2
|
||||
# Install PM2 if you don't have it: npm install pm2 -g
|
||||
pm2 start server/server.js --name uptime-kuma
|
||||
```
|
||||
# Install PM2 if you don't have it:
|
||||
npm install pm2 -g && pm2 install pm2-logrotate
|
||||
|
||||
# Start Server
|
||||
pm2 start server/server.js --name uptime-kuma
|
||||
|
||||
|
||||
```
|
||||
Browse to http://localhost:3001 after starting.
|
||||
|
||||
More useful PM2 Commands
|
||||
|
||||
```bash
|
||||
# If you want to see the current console output
|
||||
pm2 monit
|
||||
|
||||
# If you want to add it to startup
|
||||
pm2 save && pm2 startup
|
||||
```
|
||||
|
||||
### Advanced Installation
|
||||
|
||||
If you need more options or need to browse via a reverse proxy, please read:
|
||||
|
@ -93,7 +109,7 @@ https://github.com/louislam/uptime-kuma/projects/1
|
|||
|
||||
Thank you so much! (GitHub Sponsors will be updated manually. OpenCollective sponsors will be updated automatically, the list will be cached by GitHub though. It may need some time to be updated)
|
||||
|
||||
<img src="https://uptime.kuma.pet/sponsors?v=3" alt />
|
||||
<img src="https://uptime.kuma.pet/sponsors?v=6" alt />
|
||||
|
||||
## 🖼 More Screenshots
|
||||
|
||||
|
@ -115,7 +131,7 @@ Telegram Notification Sample:
|
|||
|
||||
## Motivation
|
||||
|
||||
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and unmaintained.
|
||||
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and no longer maintained.
|
||||
* Want to build a fancy UI.
|
||||
* Learn Vue 3 and vite.js.
|
||||
* Show the power of Bootstrap 5.
|
||||
|
@ -138,10 +154,17 @@ https://www.reddit.com/r/UptimeKuma/
|
|||
|
||||
## Contribute
|
||||
|
||||
### Test Beta Version
|
||||
|
||||
Check out the latest beta release here: https://github.com/louislam/uptime-kuma/releases
|
||||
|
||||
### Bug Reports / Feature Requests
|
||||
If you want to report a bug or request a new feature. Free feel to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
|
||||
|
||||
### Translations
|
||||
If you want to translate Uptime Kuma into your language, please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
|
||||
|
||||
If you want to modify Uptime Kuma, this guideline may be useful for you: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
|
||||
Feel free to correct my grammar in this README, source code, or wiki, as my mother language is not English and my grammar is not that great.
|
||||
|
||||
English proofreading is needed too because my grammar is not that great, sadly. Feel free to correct my grammar in this README, source code, or wiki.
|
||||
### Pull Requests
|
||||
If you want to modify Uptime Kuma, this guideline may be useful for you: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
const config = {};
|
||||
|
||||
if (process.env.TEST_FRONTEND) {
|
||||
config.presets = ["@babel/preset-env"];
|
||||
config.presets = [ "@babel/preset-env" ];
|
||||
}
|
||||
|
||||
if (process.env.TEST_BACKEND) {
|
||||
config.plugins = ["babel-plugin-rewire"];
|
||||
config.plugins = [ "babel-plugin-rewire" ];
|
||||
}
|
||||
|
||||
module.exports = config;
|
||||
|
|
|
@ -10,15 +10,15 @@ export default defineConfig({
|
|||
plugins: [
|
||||
vue(),
|
||||
legacy({
|
||||
targets: ["ie > 11"],
|
||||
additionalLegacyPolyfills: ["regenerator-runtime/runtime"]
|
||||
targets: [ "ie > 11" ],
|
||||
additionalLegacyPolyfills: [ "regenerator-runtime/runtime" ]
|
||||
})
|
||||
],
|
||||
css: {
|
||||
postcss: {
|
||||
"parser": postCssScss,
|
||||
"map": false,
|
||||
"plugins": [postcssRTLCSS]
|
||||
"plugins": [ postcssRTLCSS ]
|
||||
}
|
||||
},
|
||||
});
|
||||
|
|
16
db/patch-added-mqtt-monitor.sql
Normal file
16
db/patch-added-mqtt-monitor.sql
Normal file
|
@ -0,0 +1,16 @@
|
|||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD mqtt_topic TEXT;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD mqtt_success_message VARCHAR(255);
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD mqtt_username VARCHAR(255);
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD mqtt_password VARCHAR(255);
|
||||
|
||||
COMMIT;
|
7
db/patch-monitor-expiry-notification.sql
Normal file
7
db/patch-monitor-expiry-notification.sql
Normal file
|
@ -0,0 +1,7 @@
|
|||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD expiry_notification BOOLEAN default 1;
|
||||
|
||||
COMMIT;
|
23
db/patch-proxy.sql
Normal file
23
db/patch-proxy.sql
Normal file
|
@ -0,0 +1,23 @@
|
|||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE proxy (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INT NOT NULL,
|
||||
protocol VARCHAR(10) NOT NULL,
|
||||
host VARCHAR(255) NOT NULL,
|
||||
port SMALLINT NOT NULL,
|
||||
auth BOOLEAN NOT NULL,
|
||||
username VARCHAR(255) NULL,
|
||||
password VARCHAR(255) NULL,
|
||||
active BOOLEAN NOT NULL DEFAULT 1,
|
||||
'default' BOOLEAN NOT NULL DEFAULT 0,
|
||||
created_date DATETIME DEFAULT (DATETIME('now')) NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE monitor ADD COLUMN proxy_id INTEGER REFERENCES proxy(id);
|
||||
|
||||
CREATE INDEX proxy_id ON monitor (proxy_id);
|
||||
CREATE INDEX proxy_user_id ON proxy (user_id);
|
||||
|
||||
COMMIT;
|
6
db/patch-status-page-footer-css.sql
Normal file
6
db/patch-status-page-footer-css.sql
Normal file
|
@ -0,0 +1,6 @@
|
|||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
ALTER TABLE status_page ADD footer_text TEXT;
|
||||
ALTER TABLE status_page ADD custom_css TEXT;
|
||||
ALTER TABLE status_page ADD show_powered_by BOOLEAN NOT NULL DEFAULT 1;
|
||||
COMMIT;
|
31
db/patch-status-page.sql
Normal file
31
db/patch-status-page.sql
Normal file
|
@ -0,0 +1,31 @@
|
|||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE [status_page](
|
||||
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
[slug] VARCHAR(255) NOT NULL UNIQUE,
|
||||
[title] VARCHAR(255) NOT NULL,
|
||||
[description] TEXT,
|
||||
[icon] VARCHAR(255) NOT NULL,
|
||||
[theme] VARCHAR(30) NOT NULL,
|
||||
[published] BOOLEAN NOT NULL DEFAULT 1,
|
||||
[search_engine_index] BOOLEAN NOT NULL DEFAULT 1,
|
||||
[show_tags] BOOLEAN NOT NULL DEFAULT 0,
|
||||
[password] VARCHAR,
|
||||
[created_date] DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
[modified_date] DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX [slug] ON [status_page]([slug]);
|
||||
|
||||
|
||||
CREATE TABLE [status_page_cname](
|
||||
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
[status_page_id] INTEGER NOT NULL REFERENCES [status_page]([id]) ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
[domain] VARCHAR NOT NULL UNIQUE
|
||||
);
|
||||
|
||||
ALTER TABLE incident ADD status_page_id INTEGER;
|
||||
ALTER TABLE [group] ADD status_page_id INTEGER;
|
||||
|
||||
COMMIT;
|
|
@ -1,8 +1,8 @@
|
|||
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
|
||||
FROM node:14-alpine3.12
|
||||
FROM node:16-alpine3.12
|
||||
WORKDIR /app
|
||||
|
||||
# Install apprise, iputils for non-root ping, setpriv
|
||||
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib && \
|
||||
pip3 --no-cache-dir install apprise==0.9.6 && \
|
||||
pip3 --no-cache-dir install apprise==0.9.8 && \
|
||||
rm -rf /root/.cache
|
||||
|
|
|
@ -1,12 +1,26 @@
|
|||
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
|
||||
# If the image changed, the second stage image should be changed too
|
||||
FROM node:14-buster-slim
|
||||
FROM node:16-buster-slim
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install Curl
|
||||
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
|
||||
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine!
|
||||
RUN apt update && \
|
||||
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
|
||||
sqlite3 iputils-ping util-linux dumb-init && \
|
||||
pip3 --no-cache-dir install apprise==0.9.6 && \
|
||||
pip3 --no-cache-dir install apprise==0.9.8 && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install cloudflared
|
||||
# dpkg --add-architecture arm: cloudflared do not provide armhf, this is workaround. Read more: https://github.com/cloudflare/cloudflared/issues/583
|
||||
COPY extra/download-cloudflared.js ./extra/download-cloudflared.js
|
||||
RUN node ./extra/download-cloudflared.js $TARGETPLATFORM && \
|
||||
dpkg --add-architecture arm && \
|
||||
apt update && \
|
||||
apt --yes --no-install-recommends install ./cloudflared.deb && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
rm -f cloudflared.deb
|
||||
|
||||
|
|
|
@ -5,9 +5,10 @@ version: '3.3'
|
|||
|
||||
services:
|
||||
uptime-kuma:
|
||||
image: louislam/uptime-kuma
|
||||
image: louislam/uptime-kuma:1
|
||||
container_name: uptime-kuma
|
||||
volumes:
|
||||
- ./uptime-kuma:/app/data
|
||||
ports:
|
||||
- 3001:3001
|
||||
restart: always
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
module.exports = {
|
||||
apps: [{
|
||||
name: "uptime-kuma",
|
||||
script: "./server/server.js",
|
||||
}]
|
||||
}
|
||||
apps: [{
|
||||
name: "uptime-kuma",
|
||||
script: "./server/server.js",
|
||||
}]
|
||||
};
|
||||
|
|
62
extra/beta/update-version.js
Normal file
62
extra/beta/update-version.js
Normal file
|
@ -0,0 +1,62 @@
|
|||
const pkg = require("../../package.json");
|
||||
const fs = require("fs");
|
||||
const childProcess = require("child_process");
|
||||
const util = require("../../src/util");
|
||||
|
||||
util.polyfill();
|
||||
|
||||
const version = process.env.VERSION;
|
||||
|
||||
console.log("Beta Version: " + version);
|
||||
|
||||
if (!version || !version.includes("-beta.")) {
|
||||
console.error("invalid version, beta version only");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const exists = tagExists(version);
|
||||
|
||||
if (! exists) {
|
||||
// Process package.json
|
||||
pkg.version = version;
|
||||
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
|
||||
commit(version);
|
||||
tag(version);
|
||||
|
||||
} else {
|
||||
console.log("version tag exists, please delete the tag or use another tag");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function commit(version) {
|
||||
let msg = "Update to " + version;
|
||||
|
||||
let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
|
||||
let stdout = res.stdout.toString().trim();
|
||||
console.log(stdout);
|
||||
|
||||
if (stdout.includes("no changes added to commit")) {
|
||||
throw new Error("commit error");
|
||||
}
|
||||
|
||||
res = childProcess.spawnSync("git", [ "push", "origin", "master" ]);
|
||||
console.log(res.stdout.toString().trim());
|
||||
}
|
||||
|
||||
function tag(version) {
|
||||
let res = childProcess.spawnSync("git", [ "tag", version ]);
|
||||
console.log(res.stdout.toString().trim());
|
||||
|
||||
res = childProcess.spawnSync("git", [ "push", "origin", version ]);
|
||||
console.log(res.stdout.toString().trim());
|
||||
}
|
||||
|
||||
function tagExists(version) {
|
||||
if (! version) {
|
||||
throw new Error("invalid version");
|
||||
}
|
||||
|
||||
let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
|
||||
|
||||
return res.stdout.toString().trim() === version;
|
||||
}
|
|
@ -29,7 +29,7 @@ const github = require("@actions/github");
|
|||
owner: issue.owner,
|
||||
repo: issue.repo,
|
||||
issue_number: issue.number,
|
||||
labels: ["invalid-format"]
|
||||
labels: [ "invalid-format" ]
|
||||
});
|
||||
|
||||
// Add the issue closing comment
|
||||
|
|
44
extra/download-cloudflared.js
Normal file
44
extra/download-cloudflared.js
Normal file
|
@ -0,0 +1,44 @@
|
|||
//
|
||||
|
||||
const http = require("https"); // or 'https' for https:// URLs
|
||||
const fs = require("fs");
|
||||
|
||||
const platform = process.argv[2];
|
||||
|
||||
if (!platform) {
|
||||
console.error("No platform??");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let arch = null;
|
||||
|
||||
if (platform === "linux/amd64") {
|
||||
arch = "amd64";
|
||||
} else if (platform === "linux/arm64") {
|
||||
arch = "arm64";
|
||||
} else if (platform === "linux/arm/v7") {
|
||||
arch = "arm";
|
||||
} else {
|
||||
console.error("Invalid platform?? " + platform);
|
||||
}
|
||||
|
||||
const file = fs.createWriteStream("cloudflared.deb");
|
||||
get("https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-" + arch + ".deb");
|
||||
|
||||
function get(url) {
|
||||
http.get(url, function (res) {
|
||||
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
|
||||
console.log("Redirect to " + res.headers.location);
|
||||
get(res.headers.location);
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
res.pipe(file);
|
||||
|
||||
res.on("end", function () {
|
||||
console.log("Downloaded");
|
||||
});
|
||||
} else {
|
||||
console.error(res.statusCode);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
}
|
|
@ -4,6 +4,7 @@ const tar = require("tar");
|
|||
|
||||
const packageJSON = require("../package.json");
|
||||
const fs = require("fs");
|
||||
const rmSync = require("./fs-rmSync.js");
|
||||
const version = packageJSON.version;
|
||||
|
||||
const filename = "dist.tar.gz";
|
||||
|
@ -11,6 +12,12 @@ const filename = "dist.tar.gz";
|
|||
const url = `https://github.com/louislam/uptime-kuma/releases/download/${version}/${filename}`;
|
||||
download(url);
|
||||
|
||||
/**
|
||||
* Downloads the latest version of the dist from a GitHub release.
|
||||
* @param {string} url The URL to download from.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function download(url) {
|
||||
console.log(url);
|
||||
|
||||
|
@ -21,7 +28,7 @@ function download(url) {
|
|||
if (fs.existsSync("./dist")) {
|
||||
|
||||
if (fs.existsSync("./dist-backup")) {
|
||||
fs.rmdirSync("./dist-backup", {
|
||||
rmSync("./dist-backup", {
|
||||
recursive: true
|
||||
});
|
||||
}
|
||||
|
@ -35,7 +42,7 @@ function download(url) {
|
|||
|
||||
tarStream.on("close", () => {
|
||||
if (fs.existsSync("./dist-backup")) {
|
||||
fs.rmdirSync("./dist-backup", {
|
||||
rmSync("./dist-backup", {
|
||||
recursive: true
|
||||
});
|
||||
}
|
||||
|
|
19
extra/env2arg.js
Normal file
19
extra/env2arg.js
Normal file
|
@ -0,0 +1,19 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const childProcess = require("child_process");
|
||||
let env = process.env;
|
||||
|
||||
let cmd = process.argv[2];
|
||||
let args = process.argv.slice(3);
|
||||
let replacedArgs = [];
|
||||
|
||||
for (let arg of args) {
|
||||
for (let key in env) {
|
||||
arg = arg.replaceAll(`$${key}`, env[key]);
|
||||
}
|
||||
replacedArgs.push(arg);
|
||||
}
|
||||
|
||||
let child = childProcess.spawn(cmd, replacedArgs);
|
||||
child.stdout.pipe(process.stdout);
|
||||
child.stderr.pipe(process.stderr);
|
23
extra/fs-rmSync.js
Normal file
23
extra/fs-rmSync.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
const fs = require("fs");
|
||||
/**
|
||||
* Detect if `fs.rmSync` is available
|
||||
* to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16,
|
||||
* or the `recursive` property removing completely in the future Node.js version.
|
||||
* See the link below.
|
||||
*
|
||||
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
|
||||
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
|
||||
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
|
||||
* @param {fs.PathLike} path Valid types for path values in "fs".
|
||||
* @param {fs.RmDirOptions} [options] options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
|
||||
*/
|
||||
const rmSync = (path, options) => {
|
||||
if (typeof fs.rmSync === "function") {
|
||||
if (options.recursive) {
|
||||
options.force = true;
|
||||
}
|
||||
return fs.rmSync(path, options);
|
||||
}
|
||||
return fs.rmdirSync(path, options);
|
||||
};
|
||||
module.exports = rmSync;
|
|
@ -189,7 +189,7 @@ if (type == "local") {
|
|||
bash("check=$(pm2 --version)");
|
||||
if (check == "") {
|
||||
println("Installing PM2");
|
||||
bash("npm install pm2 -g");
|
||||
bash("npm install pm2 -g && pm2 install pm2-logrotate");
|
||||
bash("pm2 startup");
|
||||
}
|
||||
|
||||
|
|
|
@ -4,21 +4,21 @@ const util = require("../src/util");
|
|||
|
||||
util.polyfill();
|
||||
|
||||
const oldVersion = pkg.version
|
||||
const newVersion = oldVersion + "-nightly"
|
||||
const oldVersion = pkg.version;
|
||||
const newVersion = oldVersion + "-nightly";
|
||||
|
||||
console.log("Old Version: " + oldVersion)
|
||||
console.log("New Version: " + newVersion)
|
||||
console.log("Old Version: " + oldVersion);
|
||||
console.log("New Version: " + newVersion);
|
||||
|
||||
if (newVersion) {
|
||||
// Process package.json
|
||||
pkg.version = newVersion
|
||||
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion)
|
||||
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion)
|
||||
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n")
|
||||
pkg.version = newVersion;
|
||||
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion);
|
||||
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion);
|
||||
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
|
||||
|
||||
// Process README.md
|
||||
if (fs.existsSync("README.md")) {
|
||||
fs.writeFileSync("README.md", fs.readFileSync("README.md", "utf8").replaceAll(oldVersion, newVersion))
|
||||
fs.writeFileSync("README.md", fs.readFileSync("README.md", "utf8").replaceAll(oldVersion, newVersion));
|
||||
}
|
||||
}
|
||||
|
|
6
extra/press-any-key.js
Normal file
6
extra/press-any-key.js
Normal file
|
@ -0,0 +1,6 @@
|
|||
console.log("Git Push and Publish the release note on github, then press any key to continue");
|
||||
|
||||
process.stdin.setRawMode(true);
|
||||
process.stdin.resume();
|
||||
process.stdin.on("data", process.exit.bind(process, 0));
|
||||
|
|
@ -1,11 +1,10 @@
|
|||
console.log("== Uptime Kuma Reset Password Tool ==");
|
||||
|
||||
console.log("Loading the database");
|
||||
|
||||
const Database = require("../server/database");
|
||||
const { R } = require("redbean-node");
|
||||
const readline = require("readline");
|
||||
const { initJWTSecret } = require("../server/util-server");
|
||||
const User = require("../server/model/user");
|
||||
const args = require("args-parser")(process.argv);
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
|
@ -13,8 +12,9 @@ const rl = readline.createInterface({
|
|||
});
|
||||
|
||||
const main = async () => {
|
||||
console.log("Connecting the database");
|
||||
Database.init(args);
|
||||
await Database.connect();
|
||||
await Database.connect(false, false, true);
|
||||
|
||||
try {
|
||||
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
|
||||
|
@ -31,7 +31,7 @@ const main = async () => {
|
|||
let confirmPassword = await question("Confirm New Password: ");
|
||||
|
||||
if (password === confirmPassword) {
|
||||
await user.resetPassword(password);
|
||||
await User.resetPassword(user.id, password);
|
||||
|
||||
// Reset all sessions by reset jwt secret
|
||||
await initJWTSecret();
|
||||
|
|
|
@ -26,7 +26,7 @@ server.on("request", (request, send, rinfo) => {
|
|||
ttl: 300,
|
||||
address: "1.2.3.4"
|
||||
});
|
||||
} if (question.type === Packet.TYPE.AAAA) {
|
||||
} else if (question.type === Packet.TYPE.AAAA) {
|
||||
response.answers.push({
|
||||
name: question.name,
|
||||
type: question.type,
|
||||
|
|
50
extra/simple-mqtt-server.js
Normal file
50
extra/simple-mqtt-server.js
Normal file
|
@ -0,0 +1,50 @@
|
|||
const { log } = require("../src/util");
|
||||
|
||||
const mqttUsername = "louis1";
|
||||
const mqttPassword = "!@#$LLam";
|
||||
|
||||
class SimpleMqttServer {
|
||||
aedes = require("aedes")();
|
||||
server = require("net").createServer(this.aedes.handle);
|
||||
|
||||
constructor(port) {
|
||||
this.port = port;
|
||||
}
|
||||
|
||||
start() {
|
||||
this.server.listen(this.port, () => {
|
||||
console.log("server started and listening on port ", this.port);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let server1 = new SimpleMqttServer(10000);
|
||||
|
||||
server1.aedes.authenticate = function (client, username, password, callback) {
|
||||
if (username && password) {
|
||||
console.log(password.toString("utf-8"));
|
||||
callback(null, username === mqttUsername && password.toString("utf-8") === mqttPassword);
|
||||
} else {
|
||||
callback(null, false);
|
||||
}
|
||||
};
|
||||
|
||||
server1.aedes.on("subscribe", (subscriptions, client) => {
|
||||
console.log(subscriptions);
|
||||
|
||||
for (let s of subscriptions) {
|
||||
if (s.topic === "test") {
|
||||
server1.aedes.publish({
|
||||
topic: "test",
|
||||
payload: Buffer.from("ok"),
|
||||
}, (error) => {
|
||||
if (error) {
|
||||
log.error("mqtt_server", error);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
server1.start();
|
|
@ -3,6 +3,7 @@
|
|||
import fs from "fs";
|
||||
import path from "path";
|
||||
import util from "util";
|
||||
import rmSync from "../fs-rmSync.js";
|
||||
|
||||
// https://stackoverflow.com/questions/13786160/copy-folder-recursively-in-node-js
|
||||
/**
|
||||
|
@ -30,7 +31,7 @@ console.log("Arguments:", process.argv);
|
|||
const baseLangCode = process.argv[2] || "en";
|
||||
console.log("Base Lang: " + baseLangCode);
|
||||
if (fs.existsSync("./languages")) {
|
||||
fs.rmdirSync("./languages", { recursive: true });
|
||||
rmSync("./languages", { recursive: true });
|
||||
}
|
||||
copyRecursiveSync("../../src/languages", "./languages");
|
||||
|
||||
|
@ -40,7 +41,7 @@ const files = fs.readdirSync("./languages");
|
|||
console.log("Files:", files);
|
||||
|
||||
for (const file of files) {
|
||||
if (!file.endsWith(".js")) {
|
||||
if (! file.endsWith(".js")) {
|
||||
console.log("Skipping " + file);
|
||||
continue;
|
||||
}
|
||||
|
@ -82,5 +83,5 @@ for (const file of files) {
|
|||
fs.writeFileSync(`../../src/languages/${file}`, code);
|
||||
}
|
||||
|
||||
fs.rmdirSync("./languages", { recursive: true });
|
||||
rmSync("./languages", { recursive: true });
|
||||
console.log("Done. Fixing formatting by ESLint...");
|
||||
|
|
|
@ -1,14 +1,12 @@
|
|||
const pkg = require("../package.json");
|
||||
const fs = require("fs");
|
||||
const child_process = require("child_process");
|
||||
const childProcess = require("child_process");
|
||||
const util = require("../src/util");
|
||||
|
||||
util.polyfill();
|
||||
|
||||
const oldVersion = pkg.version;
|
||||
const newVersion = process.argv[2];
|
||||
const newVersion = process.env.VERSION;
|
||||
|
||||
console.log("Old Version: " + oldVersion);
|
||||
console.log("New Version: " + newVersion);
|
||||
|
||||
if (! newVersion) {
|
||||
|
@ -22,25 +20,28 @@ if (! exists) {
|
|||
|
||||
// Process package.json
|
||||
pkg.version = newVersion;
|
||||
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion);
|
||||
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion);
|
||||
pkg.scripts["build-docker-alpine"] = pkg.scripts["build-docker-alpine"].replaceAll(oldVersion, newVersion);
|
||||
pkg.scripts["build-docker-debian"] = pkg.scripts["build-docker-debian"].replaceAll(oldVersion, newVersion);
|
||||
|
||||
// Replace the version: https://regex101.com/r/hmj2Bc/1
|
||||
pkg.scripts.setup = pkg.scripts.setup.replace(/(git checkout )([^\s]+)/, `$1${newVersion}`);
|
||||
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
|
||||
|
||||
commit(newVersion);
|
||||
tag(newVersion);
|
||||
|
||||
updateWiki(oldVersion, newVersion);
|
||||
|
||||
} else {
|
||||
console.log("version exists");
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the version number in package.json and commits it to git.
|
||||
* @param {string} version - The new version number
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function commit(version) {
|
||||
let msg = "update to " + version;
|
||||
let msg = "Update to " + version;
|
||||
|
||||
let res = child_process.spawnSync("git", ["commit", "-m", msg, "-a"]);
|
||||
let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
|
||||
let stdout = res.stdout.toString().trim();
|
||||
console.log(stdout);
|
||||
|
||||
|
@ -50,51 +51,22 @@ function commit(version) {
|
|||
}
|
||||
|
||||
function tag(version) {
|
||||
let res = child_process.spawnSync("git", ["tag", version]);
|
||||
let res = childProcess.spawnSync("git", [ "tag", version ]);
|
||||
console.log(res.stdout.toString().trim());
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a given version is already tagged in the git repository.
|
||||
* @param {string} version - The version to check for.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function tagExists(version) {
|
||||
if (! version) {
|
||||
throw new Error("invalid version");
|
||||
}
|
||||
|
||||
let res = child_process.spawnSync("git", ["tag", "-l", version]);
|
||||
let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
|
||||
|
||||
return res.stdout.toString().trim() === version;
|
||||
}
|
||||
|
||||
function updateWiki(oldVersion, newVersion) {
|
||||
const wikiDir = "./tmp/wiki";
|
||||
const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md";
|
||||
|
||||
safeDelete(wikiDir);
|
||||
|
||||
child_process.spawnSync("git", ["clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir]);
|
||||
let content = fs.readFileSync(howToUpdateFilename).toString();
|
||||
content = content.replaceAll(`git checkout ${oldVersion}`, `git checkout ${newVersion}`);
|
||||
fs.writeFileSync(howToUpdateFilename, content);
|
||||
|
||||
child_process.spawnSync("git", ["add", "-A"], {
|
||||
cwd: wikiDir,
|
||||
});
|
||||
|
||||
child_process.spawnSync("git", ["commit", "-m", `Update to ${newVersion} from ${oldVersion}`], {
|
||||
cwd: wikiDir,
|
||||
});
|
||||
|
||||
console.log("Pushing to Github");
|
||||
child_process.spawnSync("git", ["push"], {
|
||||
cwd: wikiDir,
|
||||
});
|
||||
|
||||
safeDelete(wikiDir);
|
||||
}
|
||||
|
||||
function safeDelete(dir) {
|
||||
if (fs.existsSync(dir)) {
|
||||
fs.rmdirSync(dir, {
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
48
extra/update-wiki-version.js
Normal file
48
extra/update-wiki-version.js
Normal file
|
@ -0,0 +1,48 @@
|
|||
const childProcess = require("child_process");
|
||||
const fs = require("fs");
|
||||
|
||||
const newVersion = process.env.VERSION;
|
||||
|
||||
if (!newVersion) {
|
||||
console.log("Missing version");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
updateWiki(newVersion);
|
||||
|
||||
function updateWiki(newVersion) {
|
||||
const wikiDir = "./tmp/wiki";
|
||||
const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md";
|
||||
|
||||
safeDelete(wikiDir);
|
||||
|
||||
childProcess.spawnSync("git", [ "clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir ]);
|
||||
let content = fs.readFileSync(howToUpdateFilename).toString();
|
||||
|
||||
// Replace the version: https://regex101.com/r/hmj2Bc/1
|
||||
content = content.replace(/(git checkout )([^\s]+)/, `$1${newVersion}`);
|
||||
fs.writeFileSync(howToUpdateFilename, content);
|
||||
|
||||
childProcess.spawnSync("git", [ "add", "-A" ], {
|
||||
cwd: wikiDir,
|
||||
});
|
||||
|
||||
childProcess.spawnSync("git", [ "commit", "-m", `Update to ${newVersion}` ], {
|
||||
cwd: wikiDir,
|
||||
});
|
||||
|
||||
console.log("Pushing to Github");
|
||||
childProcess.spawnSync("git", [ "push" ], {
|
||||
cwd: wikiDir,
|
||||
});
|
||||
|
||||
safeDelete(wikiDir);
|
||||
}
|
||||
|
||||
function safeDelete(dir) {
|
||||
if (fs.existsSync(dir)) {
|
||||
fs.rmdirSync(dir, {
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
}
|
|
@ -159,7 +159,7 @@ fi
|
|||
check=$(pm2 --version)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Installing PM2"
|
||||
npm install pm2 -g
|
||||
npm install pm2 -g && pm2 install pm2-logrotate
|
||||
pm2 startup
|
||||
fi
|
||||
mkdir -p $installPath
|
||||
|
|
12062
package-lock.json
generated
12062
package-lock.json
generated
File diff suppressed because it is too large
Load diff
67
package.json
67
package.json
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "uptime-kuma",
|
||||
"version": "1.11.3",
|
||||
"version": "1.15.0-beta.1",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
@ -13,6 +13,7 @@
|
|||
"install-legacy": "npm install --legacy-peer-deps",
|
||||
"update-legacy": "npm update --legacy-peer-deps",
|
||||
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
|
||||
"lint-fix:js": "eslint --ext \".js,.vue\" --fix --ignore-path .gitignore .",
|
||||
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
|
||||
"lint": "npm run lint:js && npm run lint:style",
|
||||
"dev": "vite --host --config ./config/vite.config.js",
|
||||
|
@ -20,7 +21,7 @@
|
|||
"start-server": "node server/server.js",
|
||||
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
|
||||
"build": "vite build --config ./config/vite.config.js",
|
||||
"test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --test",
|
||||
"test": "npm run lint && node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --test",
|
||||
"test-with-build": "npm run build && npm test",
|
||||
"jest": "node test/prepare-jest.js && npm run jest-frontend && npm run jest-backend",
|
||||
"jest-frontend": "cross-env TEST_FRONTEND=1 jest --config=./config/jest-frontend.config.js",
|
||||
|
@ -30,15 +31,14 @@
|
|||
"build-docker": "npm run build && npm run build-docker-debian && npm run build-docker-alpine",
|
||||
"build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push",
|
||||
"build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
|
||||
"build-docker-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:1.11.3-alpine --target release . --push",
|
||||
"build-docker-debian": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:1.11.3 -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:1.11.3-debian --target release . --push",
|
||||
"build-docker-alpine": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:$VERSION-alpine --target release . --push",
|
||||
"build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push",
|
||||
"build-docker-nightly": "npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
|
||||
"build-docker-nightly-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
|
||||
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
|
||||
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
||||
"setup": "git checkout 1.11.3 && npm ci --production && npm run download-dist",
|
||||
"setup": "git checkout 1.14.1 && npm ci --production && npm run download-dist",
|
||||
"download-dist": "node extra/download-dist.js",
|
||||
"update-version": "node extra/update-version.js",
|
||||
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
||||
"reset-password": "node extra/reset-password.js",
|
||||
"remove-2fa": "node extra/remove-2fa.js",
|
||||
|
@ -49,73 +49,86 @@
|
|||
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
|
||||
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
|
||||
"simple-dns-server": "node extra/simple-dns-server.js",
|
||||
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
|
||||
"update-language-files-with-base-lang": "cd extra/update-language-files && node index.js %npm_config_base_lang% && eslint ../../src/languages/**.js --fix",
|
||||
"update-language-files": "cd extra/update-language-files && node index.js && eslint ../../src/languages/**.js --fix",
|
||||
"ncu-patch": "ncu -u -t patch"
|
||||
"ncu-patch": "npm-check-updates -u -t patch",
|
||||
"release-final": "node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
|
||||
"release-beta": "node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
|
||||
"git-remove-tag": "git tag -d"
|
||||
},
|
||||
"dependencies": {
|
||||
"@fortawesome/fontawesome-svg-core": "~1.2.36",
|
||||
"@fortawesome/free-regular-svg-icons": "~5.15.4",
|
||||
"@fortawesome/free-solid-svg-icons": "~5.15.4",
|
||||
"@fortawesome/vue-fontawesome": "~3.0.0-5",
|
||||
"@louislam/sqlite3": "~6.0.1",
|
||||
"@louislam/sqlite3": "~15.0.3",
|
||||
"@popperjs/core": "~2.10.2",
|
||||
"args-parser": "~1.3.0",
|
||||
"axios": "~0.21.4",
|
||||
"axios": "~0.26.1",
|
||||
"bcryptjs": "~2.4.3",
|
||||
"bootstrap": "5.1.3",
|
||||
"bree": "~7.1.0",
|
||||
"bree": "~7.1.5",
|
||||
"chardet": "^1.3.0",
|
||||
"chart.js": "~3.6.0",
|
||||
"chart.js": "~3.6.2",
|
||||
"chartjs-adapter-dayjs": "~1.0.0",
|
||||
"check-password-strength": "^2.0.3",
|
||||
"check-password-strength": "^2.0.5",
|
||||
"command-exists": "~1.2.9",
|
||||
"compare-versions": "~3.6.0",
|
||||
"dayjs": "~1.10.7",
|
||||
"express": "~4.17.1",
|
||||
"express-basic-auth": "~1.2.0",
|
||||
"dayjs": "~1.10.8",
|
||||
"express": "~4.17.3",
|
||||
"express-basic-auth": "~1.2.1",
|
||||
"favico.js": "^0.3.10",
|
||||
"form-data": "~4.0.0",
|
||||
"http-graceful-shutdown": "~3.1.5",
|
||||
"http-graceful-shutdown": "~3.1.7",
|
||||
"http-proxy-agent": "^5.0.0",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
"iconv-lite": "^0.6.3",
|
||||
"jsonwebtoken": "~8.5.1",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"limiter": "^2.1.0",
|
||||
"mqtt": "^4.2.8",
|
||||
"node-cloudflared-tunnel": "~1.0.9",
|
||||
"nodemailer": "~6.6.5",
|
||||
"notp": "~2.0.3",
|
||||
"password-hash": "~1.2.2",
|
||||
"postcss-rtlcss": "~3.4.1",
|
||||
"postcss-scss": "~4.0.2",
|
||||
"postcss-scss": "~4.0.3",
|
||||
"prismjs": "^1.27.0",
|
||||
"prom-client": "~13.2.0",
|
||||
"prometheus-api-metrics": "~3.2.0",
|
||||
"prometheus-api-metrics": "~3.2.1",
|
||||
"qrcode": "~1.5.0",
|
||||
"redbean-node": "0.1.3",
|
||||
"socket.io": "~4.4.1",
|
||||
"socket.io-client": "~4.4.1",
|
||||
"socks-proxy-agent": "^6.1.1",
|
||||
"tar": "^6.1.11",
|
||||
"tcp-ping": "~0.1.1",
|
||||
"thirty-two": "~1.0.2",
|
||||
"timezones-list": "~3.0.1",
|
||||
"v-pagination-3": "~0.1.7",
|
||||
"vue": "next",
|
||||
"vue-chart-3": "~0.5.11",
|
||||
"vue-chart-3": "3.0.9",
|
||||
"vue-confirm-dialog": "~1.0.2",
|
||||
"vue-contenteditable": "~3.0.4",
|
||||
"vue-i18n": "~9.1.9",
|
||||
"vue-image-crop-upload": "~3.0.3",
|
||||
"vue-multiselect": "~3.0.0-alpha.2",
|
||||
"vue-prism-editor": "^2.0.0-alpha.2",
|
||||
"vue-qrcode": "~1.0.0",
|
||||
"vue-router": "~4.0.12",
|
||||
"vue-router": "~4.0.14",
|
||||
"vue-toastification": "~2.0.0-rc.5",
|
||||
"vuedraggable": "~4.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@actions/github": "~5.0.0",
|
||||
"@actions/github": "~5.0.1",
|
||||
"@babel/eslint-parser": "~7.15.8",
|
||||
"@babel/preset-env": "^7.15.8",
|
||||
"@types/bootstrap": "~5.1.6",
|
||||
"@vitejs/plugin-legacy": "~1.6.3",
|
||||
"@types/bootstrap": "~5.1.9",
|
||||
"@vitejs/plugin-legacy": "~1.6.4",
|
||||
"@vitejs/plugin-vue": "~1.9.4",
|
||||
"@vue/compiler-sfc": "~3.2.22",
|
||||
"@vue/compiler-sfc": "~3.2.31",
|
||||
"aedes": "^0.46.3",
|
||||
"babel-plugin-rewire": "~1.2.0",
|
||||
"core-js": "~3.18.3",
|
||||
"cross-env": "~7.0.3",
|
||||
|
@ -123,8 +136,10 @@
|
|||
"eslint": "~7.32.0",
|
||||
"eslint-plugin-vue": "~7.18.0",
|
||||
"jest": "~27.2.5",
|
||||
"jest-puppeteer": "~6.0.0",
|
||||
"puppeteer": "~10.4.0",
|
||||
"jest-puppeteer": "~6.0.3",
|
||||
"npm-check-updates": "^12.5.5",
|
||||
"postcss-html": "^1.3.1",
|
||||
"puppeteer": "~13.1.3",
|
||||
"sass": "~1.42.1",
|
||||
"stylelint": "~14.2.0",
|
||||
"stylelint-config-standard": "~24.0.0",
|
||||
|
|
|
@ -2,7 +2,6 @@ const basicAuth = require("express-basic-auth");
|
|||
const passwordHash = require("./password-hash");
|
||||
const { R } = require("redbean-node");
|
||||
const { setting } = require("./util-server");
|
||||
const { debug } = require("../src/util");
|
||||
const { loginRateLimiter } = require("./rate-limiter");
|
||||
|
||||
/**
|
||||
|
@ -12,6 +11,10 @@ const { loginRateLimiter } = require("./rate-limiter");
|
|||
* @returns {Promise<(Bean|null)>}
|
||||
*/
|
||||
exports.login = async function (username, password) {
|
||||
if (typeof username !== "string" || typeof password !== "string") {
|
||||
return null;
|
||||
}
|
||||
|
||||
let user = await R.findOne("user", " username = ? AND active = 1 ", [
|
||||
username,
|
||||
]);
|
||||
|
@ -44,31 +47,34 @@ exports.login = async function (username, password) {
|
|||
* @param {myAuthorizerCB} callback
|
||||
*/
|
||||
function myAuthorizer(username, password, callback) {
|
||||
setting("disableAuth").then((result) => {
|
||||
if (result) {
|
||||
callback(null, true);
|
||||
} else {
|
||||
// Login Rate Limit
|
||||
loginRateLimiter.pass(null, 0).then((pass) => {
|
||||
if (pass) {
|
||||
exports.login(username, password).then((user) => {
|
||||
callback(null, user != null);
|
||||
// Login Rate Limit
|
||||
loginRateLimiter.pass(null, 0).then((pass) => {
|
||||
if (pass) {
|
||||
exports.login(username, password).then((user) => {
|
||||
callback(null, user != null);
|
||||
|
||||
if (user == null) {
|
||||
loginRateLimiter.removeTokens(1);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
callback(null, false);
|
||||
if (user == null) {
|
||||
loginRateLimiter.removeTokens(1);
|
||||
}
|
||||
});
|
||||
|
||||
} else {
|
||||
callback(null, false);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
exports.basicAuth = basicAuth({
|
||||
authorizer: myAuthorizer,
|
||||
authorizeAsync: true,
|
||||
challenge: true,
|
||||
});
|
||||
exports.basicAuth = async function (req, res, next) {
|
||||
const middleware = basicAuth({
|
||||
authorizer: myAuthorizer,
|
||||
authorizeAsync: true,
|
||||
challenge: true,
|
||||
});
|
||||
|
||||
const disabledAuth = await setting("disableAuth");
|
||||
|
||||
if (!disabledAuth) {
|
||||
middleware(req, res, next);
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
const { setSetting } = require("./util-server");
|
||||
const { setSetting, setting } = require("./util-server");
|
||||
const axios = require("axios");
|
||||
const compareVersions = require("compare-versions");
|
||||
|
||||
exports.version = require("../package.json").version;
|
||||
exports.latestVersion = null;
|
||||
|
@ -17,6 +18,19 @@ exports.startInterval = () => {
|
|||
res.data.slow = "1000.0.0";
|
||||
}
|
||||
|
||||
if (await setting("checkUpdate") === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
let checkBeta = await setting("checkBeta");
|
||||
|
||||
if (checkBeta && res.data.beta) {
|
||||
if (compareVersions.compare(res.data.beta, res.data.beta, ">")) {
|
||||
exports.latestVersion = res.data.beta;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (res.data.slow) {
|
||||
exports.latestVersion = res.data.slow;
|
||||
}
|
||||
|
|
|
@ -3,7 +3,8 @@
|
|||
*/
|
||||
const { TimeLogger } = require("../src/util");
|
||||
const { R } = require("redbean-node");
|
||||
const { io } = require("./server");
|
||||
const { UptimeKumaServer } = require("./uptime-kuma-server");
|
||||
const io = UptimeKumaServer.getInstance().io;
|
||||
const { setting } = require("./util-server");
|
||||
const checkVersion = require("./check-version");
|
||||
|
||||
|
@ -63,7 +64,7 @@ async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite =
|
|||
}
|
||||
|
||||
/**
|
||||
* Important Heart beat list (aka event list)
|
||||
* Important Heart beat list (aka event list)
|
||||
* @param {Socket} socket Socket.io instance
|
||||
* @param {number} monitorID ID of monitor to send heartbeat history
|
||||
* @param {boolean} [toUser=false] True = send to all browsers with the same user id, False = send to the current browser only
|
||||
|
@ -93,7 +94,23 @@ async function sendImportantHeartbeatList(socket, monitorID, toUser = false, ove
|
|||
}
|
||||
|
||||
/**
|
||||
* Send application info
|
||||
* Emit proxy list to client
|
||||
* @param {Socket} socket Socket.io socket instance
|
||||
* @return {Promise<Bean[]>}
|
||||
*/
|
||||
async function sendProxyList(socket) {
|
||||
const timeLogger = new TimeLogger();
|
||||
|
||||
const list = await R.find("proxy", " user_id = ? ", [ socket.userID ]);
|
||||
io.to(socket.userID).emit("proxyList", list.map(bean => bean.export()));
|
||||
|
||||
timeLogger.print("Send Proxy List");
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Emits the version information to the client.
|
||||
* @param {Socket} socket Socket.io socket instance
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
|
@ -109,6 +126,6 @@ module.exports = {
|
|||
sendNotificationList,
|
||||
sendImportantHeartbeatList,
|
||||
sendHeartbeatList,
|
||||
sendInfo
|
||||
sendProxyList,
|
||||
sendInfo,
|
||||
};
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const fs = require("fs");
|
||||
const { R } = require("redbean-node");
|
||||
const { setSetting, setting } = require("./util-server");
|
||||
const { debug, sleep } = require("../src/util");
|
||||
const { log, sleep } = require("../src/util");
|
||||
const dayjs = require("dayjs");
|
||||
const knex = require("knex");
|
||||
|
||||
|
@ -53,6 +53,11 @@ class Database {
|
|||
"patch-2fa-invalidate-used-token.sql": true,
|
||||
"patch-notification_sent_history.sql": true,
|
||||
"patch-monitor-basic-auth.sql": true,
|
||||
"patch-status-page.sql": true,
|
||||
"patch-proxy.sql": true,
|
||||
"patch-monitor-expiry-notification.sql": true,
|
||||
"patch-status-page-footer-css.sql": true,
|
||||
"patch-added-mqtt-monitor.sql": true,
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -81,16 +86,19 @@ class Database {
|
|||
fs.mkdirSync(Database.uploadDir, { recursive: true });
|
||||
}
|
||||
|
||||
console.log(`Data Dir: ${Database.dataDir}`);
|
||||
log.info("db", `Data Dir: ${Database.dataDir}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Connect to the database
|
||||
* @param {boolean} [testMode=false] Should the connection be
|
||||
* started in test mode?
|
||||
* @param {boolean} [autoloadModels=true] Should models be
|
||||
* automatically loaded?
|
||||
* @param {boolean} [noLog=false] Should logs not be output?
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async connect(testMode = false) {
|
||||
static async connect(testMode = false, autoloadModels = true, noLog = false) {
|
||||
const acquireConnectionTimeout = 120 * 1000;
|
||||
|
||||
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
|
||||
|
@ -120,7 +128,10 @@ class Database {
|
|||
|
||||
// Auto map the model to a bean object
|
||||
R.freeze(true);
|
||||
await R.autoloadModels("./server/model");
|
||||
|
||||
if (autoloadModels) {
|
||||
await R.autoloadModels("./server/model");
|
||||
}
|
||||
|
||||
await R.exec("PRAGMA foreign_keys = ON");
|
||||
if (testMode) {
|
||||
|
@ -133,10 +144,17 @@ class Database {
|
|||
await R.exec("PRAGMA cache_size = -12000");
|
||||
await R.exec("PRAGMA auto_vacuum = FULL");
|
||||
|
||||
console.log("SQLite config:");
|
||||
console.log(await R.getAll("PRAGMA journal_mode"));
|
||||
console.log(await R.getAll("PRAGMA cache_size"));
|
||||
console.log("SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
|
||||
// This ensures that an operating system crash or power failure will not corrupt the database.
|
||||
// FULL synchronous is very safe, but it is also slower.
|
||||
// Read more: https://sqlite.org/pragma.html#pragma_synchronous
|
||||
await R.exec("PRAGMA synchronous = FULL");
|
||||
|
||||
if (!noLog) {
|
||||
log.info("db", "SQLite config:");
|
||||
log.info("db", await R.getAll("PRAGMA journal_mode"));
|
||||
log.info("db", await R.getAll("PRAGMA cache_size"));
|
||||
log.info("db", "SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
|
||||
}
|
||||
}
|
||||
|
||||
/** Patch the database */
|
||||
|
@ -147,15 +165,15 @@ class Database {
|
|||
version = 0;
|
||||
}
|
||||
|
||||
console.info("Your database version: " + version);
|
||||
console.info("Latest database version: " + this.latestVersion);
|
||||
log.info("db", "Your database version: " + version);
|
||||
log.info("db", "Latest database version: " + this.latestVersion);
|
||||
|
||||
if (version === this.latestVersion) {
|
||||
console.info("Database patch not needed");
|
||||
log.info("db", "Database patch not needed");
|
||||
} else if (version > this.latestVersion) {
|
||||
console.info("Warning: Database version is newer than expected");
|
||||
log.info("db", "Warning: Database version is newer than expected");
|
||||
} else {
|
||||
console.info("Database patch is needed");
|
||||
log.info("db", "Database patch is needed");
|
||||
|
||||
this.backup(version);
|
||||
|
||||
|
@ -163,17 +181,17 @@ class Database {
|
|||
try {
|
||||
for (let i = version + 1; i <= this.latestVersion; i++) {
|
||||
const sqlFile = `./db/patch${i}.sql`;
|
||||
console.info(`Patching ${sqlFile}`);
|
||||
log.info("db", `Patching ${sqlFile}`);
|
||||
await Database.importSQLFile(sqlFile);
|
||||
console.info(`Patched ${sqlFile}`);
|
||||
log.info("db", `Patched ${sqlFile}`);
|
||||
await setSetting("database_version", i);
|
||||
}
|
||||
} catch (ex) {
|
||||
await Database.close();
|
||||
|
||||
console.error(ex);
|
||||
console.error("Start Uptime-Kuma failed due to issue patching the database");
|
||||
console.error("Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
|
||||
log.error("db", ex);
|
||||
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
|
||||
log.error("db", "Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
|
||||
|
||||
this.restore();
|
||||
process.exit(1);
|
||||
|
@ -181,6 +199,7 @@ class Database {
|
|||
}
|
||||
|
||||
await this.patch2();
|
||||
await this.migrateNewStatusPage();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -190,15 +209,15 @@ class Database {
|
|||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async patch2() {
|
||||
console.log("Database Patch 2.0 Process");
|
||||
log.info("db", "Database Patch 2.0 Process");
|
||||
let databasePatchedFiles = await setting("databasePatchedFiles");
|
||||
|
||||
if (! databasePatchedFiles) {
|
||||
databasePatchedFiles = {};
|
||||
}
|
||||
|
||||
debug("Patched files:");
|
||||
debug(databasePatchedFiles);
|
||||
log.debug("db", "Patched files:");
|
||||
log.debug("db", databasePatchedFiles);
|
||||
|
||||
try {
|
||||
for (let sqlFilename in this.patchList) {
|
||||
|
@ -206,15 +225,15 @@ class Database {
|
|||
}
|
||||
|
||||
if (this.patched) {
|
||||
console.log("Database Patched Successfully");
|
||||
log.info("db", "Database Patched Successfully");
|
||||
}
|
||||
|
||||
} catch (ex) {
|
||||
await Database.close();
|
||||
|
||||
console.error(ex);
|
||||
console.error("Start Uptime-Kuma failed due to issue patching the database");
|
||||
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
|
||||
log.error("db", ex);
|
||||
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
|
||||
log.error("db", "Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
|
||||
|
||||
this.restore();
|
||||
|
||||
|
@ -224,6 +243,74 @@ class Database {
|
|||
await setSetting("databasePatchedFiles", databasePatchedFiles);
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate status page value in setting to "status_page" table
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async migrateNewStatusPage() {
|
||||
|
||||
// Fix 1.13.0 empty slug bug
|
||||
await R.exec("UPDATE status_page SET slug = 'empty-slug-recover' WHERE TRIM(slug) = ''");
|
||||
|
||||
let title = await setting("title");
|
||||
|
||||
if (title) {
|
||||
console.log("Migrating Status Page");
|
||||
|
||||
let statusPageCheck = await R.findOne("status_page", " slug = 'default' ");
|
||||
|
||||
if (statusPageCheck !== null) {
|
||||
console.log("Migrating Status Page - Skip, default slug record is already existing");
|
||||
return;
|
||||
}
|
||||
|
||||
let statusPage = R.dispense("status_page");
|
||||
statusPage.slug = "default";
|
||||
statusPage.title = title;
|
||||
statusPage.description = await setting("description");
|
||||
statusPage.icon = await setting("icon");
|
||||
statusPage.theme = await setting("statusPageTheme");
|
||||
statusPage.published = !!await setting("statusPagePublished");
|
||||
statusPage.search_engine_index = !!await setting("searchEngineIndex");
|
||||
statusPage.show_tags = !!await setting("statusPageTags");
|
||||
statusPage.password = null;
|
||||
|
||||
if (!statusPage.title) {
|
||||
statusPage.title = "My Status Page";
|
||||
}
|
||||
|
||||
if (!statusPage.icon) {
|
||||
statusPage.icon = "";
|
||||
}
|
||||
|
||||
if (!statusPage.theme) {
|
||||
statusPage.theme = "light";
|
||||
}
|
||||
|
||||
let id = await R.store(statusPage);
|
||||
|
||||
await R.exec("UPDATE incident SET status_page_id = ? WHERE status_page_id IS NULL", [
|
||||
id
|
||||
]);
|
||||
|
||||
await R.exec("UPDATE [group] SET status_page_id = ? WHERE status_page_id IS NULL", [
|
||||
id
|
||||
]);
|
||||
|
||||
await R.exec("DELETE FROM setting WHERE type = 'statusPage'");
|
||||
|
||||
// Migrate Entry Page if it is status page
|
||||
let entryPage = await setting("entryPage");
|
||||
|
||||
if (entryPage === "statusPage") {
|
||||
await setSetting("entryPage", "statusPage-default", "general");
|
||||
}
|
||||
|
||||
console.log("Migrating Status Page - Done");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Patch database using new patching process
|
||||
* Used it patch2() only
|
||||
|
@ -236,16 +323,16 @@ class Database {
|
|||
let value = this.patchList[sqlFilename];
|
||||
|
||||
if (! value) {
|
||||
console.log(sqlFilename + " skip");
|
||||
log.info("db", sqlFilename + " skip");
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if patched
|
||||
if (! databasePatchedFiles[sqlFilename]) {
|
||||
console.log(sqlFilename + " is not patched");
|
||||
log.info("db", sqlFilename + " is not patched");
|
||||
|
||||
if (value.parents) {
|
||||
console.log(sqlFilename + " need parents");
|
||||
log.info("db", sqlFilename + " need parents");
|
||||
for (let parentSQLFilename of value.parents) {
|
||||
await this.patch2Recursion(parentSQLFilename, databasePatchedFiles);
|
||||
}
|
||||
|
@ -253,14 +340,14 @@ class Database {
|
|||
|
||||
this.backup(dayjs().format("YYYYMMDDHHmmss"));
|
||||
|
||||
console.log(sqlFilename + " is patching");
|
||||
log.info("db", sqlFilename + " is patching");
|
||||
this.patched = true;
|
||||
await this.importSQLFile("./db/" + sqlFilename);
|
||||
databasePatchedFiles[sqlFilename] = true;
|
||||
console.log(sqlFilename + " was patched successfully");
|
||||
log.info("db", sqlFilename + " was patched successfully");
|
||||
|
||||
} else {
|
||||
debug(sqlFilename + " is already patched, skip");
|
||||
log.debug("db", sqlFilename + " is already patched, skip");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -316,7 +403,7 @@ class Database {
|
|||
};
|
||||
process.addListener("unhandledRejection", listener);
|
||||
|
||||
console.log("Closing the database");
|
||||
log.info("db", "Closing the database");
|
||||
|
||||
while (true) {
|
||||
Database.noReject = true;
|
||||
|
@ -326,10 +413,10 @@ class Database {
|
|||
if (Database.noReject) {
|
||||
break;
|
||||
} else {
|
||||
console.log("Waiting to close the database");
|
||||
log.info("db", "Waiting to close the database");
|
||||
}
|
||||
}
|
||||
console.log("SQLite closed");
|
||||
log.info("db", "SQLite closed");
|
||||
|
||||
process.removeListener("unhandledRejection", listener);
|
||||
}
|
||||
|
@ -341,7 +428,7 @@ class Database {
|
|||
*/
|
||||
static backup(version) {
|
||||
if (! this.backupPath) {
|
||||
console.info("Backing up the database");
|
||||
log.info("db", "Backing up the database");
|
||||
this.backupPath = this.dataDir + "kuma.db.bak" + version;
|
||||
fs.copyFileSync(Database.path, this.backupPath);
|
||||
|
||||
|
@ -362,7 +449,7 @@ class Database {
|
|||
/** Restore from most recent backup */
|
||||
static restore() {
|
||||
if (this.backupPath) {
|
||||
console.error("Patching the database failed!!! Restoring the backup");
|
||||
log.error("db", "Patching the database failed!!! Restoring the backup");
|
||||
|
||||
const shmPath = Database.path + "-shm";
|
||||
const walPath = Database.path + "-wal";
|
||||
|
@ -381,7 +468,7 @@ class Database {
|
|||
fs.unlinkSync(walPath);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("Restore failed; you may need to restore the backup manually");
|
||||
log.error("db", "Restore failed; you may need to restore the backup manually");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
|
@ -397,15 +484,15 @@ class Database {
|
|||
}
|
||||
|
||||
} else {
|
||||
console.log("Nothing to restore");
|
||||
log.info("db", "Nothing to restore");
|
||||
}
|
||||
}
|
||||
|
||||
/** Get the size of the database */
|
||||
static getSize() {
|
||||
debug("Database.getSize()");
|
||||
log.debug("db", "Database.getSize()");
|
||||
let stats = fs.statSync(Database.path);
|
||||
debug(stats);
|
||||
log.debug("db", stats);
|
||||
return stats.size;
|
||||
}
|
||||
|
||||
|
|
|
@ -3,17 +3,21 @@
|
|||
Modified with 0 dependencies
|
||||
*/
|
||||
let fs = require("fs");
|
||||
const { log } = require("../src/util");
|
||||
|
||||
let ImageDataURI = (() => {
|
||||
|
||||
/**
|
||||
* Decode the data:image/ URI
|
||||
* @param {string} dataURI data:image/ URI to decode
|
||||
* @returns {Object}
|
||||
* @returns {?Object} An object with properties "imageType" and "dataBase64".
|
||||
* The former is the image type, e.g., "png", and the latter is a base64
|
||||
* encoded string of the image's binary data. If it fails to parse, returns
|
||||
* null instead of an object.
|
||||
*/
|
||||
function decode(dataURI) {
|
||||
if (!/data:image\//.test(dataURI)) {
|
||||
console.log("ImageDataURI :: Error :: It seems that it is not an Image Data URI. Couldn't match \"data:image/\"");
|
||||
log.error("image-data-uri", "It seems that it is not an Image Data URI. Couldn't match \"data:image/\"");
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -29,11 +33,12 @@ let ImageDataURI = (() => {
|
|||
* Endcode an image into data:image/ URI
|
||||
* @param {(Buffer|string)} data Data to encode
|
||||
* @param {string} mediaType Media type of data
|
||||
* @returns {(string|null)}
|
||||
* @returns {(string|null)} A string representing the base64-encoded
|
||||
* version of the given Buffer object or null if an error occurred.
|
||||
*/
|
||||
function encode(data, mediaType) {
|
||||
if (!data || !mediaType) {
|
||||
console.log("ImageDataURI :: Error :: Missing some of the required params: data, mediaType ");
|
||||
log.error("image-data-uri", "Missing some of the required params: data, mediaType");
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -47,7 +52,7 @@ let ImageDataURI = (() => {
|
|||
/**
|
||||
* Write data URI to file
|
||||
* @param {string} dataURI data:image/ URI
|
||||
* @param {string} filePath Path to write file to
|
||||
* @param {string} [filePath] Path to write file to
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
function outputFile(dataURI, filePath) {
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
const path = require("path");
|
||||
const Bree = require("bree");
|
||||
const { SHARE_ENV } = require("worker_threads");
|
||||
|
||||
const { log } = require("../src/util");
|
||||
let bree;
|
||||
const jobs = [
|
||||
{
|
||||
name: "clear-old-data",
|
||||
|
@ -15,7 +16,7 @@ const jobs = [
|
|||
* @returns {Bree}
|
||||
*/
|
||||
const initBackgroundJobs = function (args) {
|
||||
const bree = new Bree({
|
||||
bree = new Bree({
|
||||
root: path.resolve("server", "jobs"),
|
||||
jobs,
|
||||
worker: {
|
||||
|
@ -23,7 +24,7 @@ const initBackgroundJobs = function (args) {
|
|||
workerData: args,
|
||||
},
|
||||
workerMessageHandler: (message) => {
|
||||
console.log("[Background Job]:", message);
|
||||
log.info("jobs", message);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -31,6 +32,13 @@ const initBackgroundJobs = function (args) {
|
|||
return bree;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
initBackgroundJobs
|
||||
const stopBackgroundJobs = function () {
|
||||
if (bree) {
|
||||
bree.stop();
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
initBackgroundJobs,
|
||||
stopBackgroundJobs
|
||||
};
|
||||
|
|
|
@ -30,7 +30,7 @@ const DEFAULT_KEEP_PERIOD = 180;
|
|||
try {
|
||||
await R.exec(
|
||||
"DELETE FROM heartbeat WHERE time < DATETIME('now', '-' || ? || ' days') ",
|
||||
[parsedPeriod]
|
||||
[ parsedPeriod ]
|
||||
);
|
||||
} catch (e) {
|
||||
log(`Failed to clear old data: ${e.message}`);
|
||||
|
|
|
@ -17,7 +17,7 @@ const log = function (any) {
|
|||
* @param {number} error The status code to exit
|
||||
*/
|
||||
const exit = function (error) {
|
||||
if (error && error != 0) {
|
||||
if (error && error !== 0) {
|
||||
process.exit(error);
|
||||
} else {
|
||||
if (parentPort) {
|
||||
|
|
|
@ -6,14 +6,15 @@ class Group extends BeanModel {
|
|||
/**
|
||||
* Return a object that ready to parse to JSON for public
|
||||
* Only show necessary data to public
|
||||
* @param {boolean} [showTags=false] Should the JSON include monitor tags
|
||||
* @returns {Object}
|
||||
*/
|
||||
async toPublicJSON() {
|
||||
async toPublicJSON(showTags = false) {
|
||||
let monitorBeanList = await this.getMonitorList();
|
||||
let monitorList = [];
|
||||
|
||||
for (let bean of monitorBeanList) {
|
||||
monitorList.push(await bean.toPublicJSON());
|
||||
monitorList.push(await bean.toPublicJSON(showTags));
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
|
@ -6,11 +6,12 @@ dayjs.extend(utc);
|
|||
dayjs.extend(timezone);
|
||||
const axios = require("axios");
|
||||
const { Prometheus } = require("../prometheus");
|
||||
const { debug, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util");
|
||||
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, errorLog } = require("../util-server");
|
||||
const { log, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util");
|
||||
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, errorLog, mqttAsync } = require("../util-server");
|
||||
const { R } = require("redbean-node");
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
const { Notification } = require("../notification");
|
||||
const { Proxy } = require("../proxy");
|
||||
const { demoMode } = require("../config");
|
||||
const version = require("../../package.json").version;
|
||||
const apicache = require("../modules/apicache");
|
||||
|
@ -24,22 +25,26 @@ const apicache = require("../modules/apicache");
|
|||
class Monitor extends BeanModel {
|
||||
|
||||
/**
|
||||
* Return a object that ready to parse to JSON for public
|
||||
* Return an object that ready to parse to JSON for public
|
||||
* Only show necessary data to public
|
||||
* @returns {Object}
|
||||
*/
|
||||
async toPublicJSON() {
|
||||
return {
|
||||
async toPublicJSON(showTags = false) {
|
||||
let obj = {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
};
|
||||
if (showTags) {
|
||||
obj.tags = await this.getTags();
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a object that ready to parse to JSON
|
||||
* @returns {Object}
|
||||
*/
|
||||
async toJSON() {
|
||||
async toJSON(includeSensitiveData = true) {
|
||||
|
||||
let notificationIDList = {};
|
||||
|
||||
|
@ -51,17 +56,13 @@ class Monitor extends BeanModel {
|
|||
notificationIDList[bean.notification_id] = true;
|
||||
}
|
||||
|
||||
const tags = await R.getAll("SELECT mt.*, tag.name, tag.color FROM monitor_tag mt JOIN tag ON mt.tag_id = tag.id WHERE mt.monitor_id = ?", [this.id]);
|
||||
const tags = await this.getTags();
|
||||
|
||||
return {
|
||||
let data = {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
url: this.url,
|
||||
method: this.method,
|
||||
body: this.body,
|
||||
headers: this.headers,
|
||||
basic_auth_user: this.basic_auth_user,
|
||||
basic_auth_pass: this.basic_auth_pass,
|
||||
hostname: this.hostname,
|
||||
port: this.port,
|
||||
maxretries: this.maxretries,
|
||||
|
@ -71,6 +72,7 @@ class Monitor extends BeanModel {
|
|||
interval: this.interval,
|
||||
retryInterval: this.retryInterval,
|
||||
keyword: this.keyword,
|
||||
expiryNotification: this.isEnabledExpiryNotification(),
|
||||
ignoreTls: this.getIgnoreTls(),
|
||||
upsideDown: this.isUpsideDown(),
|
||||
maxredirects: this.maxredirects,
|
||||
|
@ -78,10 +80,31 @@ class Monitor extends BeanModel {
|
|||
dns_resolve_type: this.dns_resolve_type,
|
||||
dns_resolve_server: this.dns_resolve_server,
|
||||
dns_last_result: this.dns_last_result,
|
||||
pushToken: this.pushToken,
|
||||
proxyId: this.proxy_id,
|
||||
notificationIDList,
|
||||
tags: tags,
|
||||
mqttUsername: this.mqttUsername,
|
||||
mqttPassword: this.mqttPassword,
|
||||
mqttTopic: this.mqttTopic,
|
||||
mqttSuccessMessage: this.mqttSuccessMessage
|
||||
};
|
||||
|
||||
if (includeSensitiveData) {
|
||||
data = {
|
||||
...data,
|
||||
headers: this.headers,
|
||||
body: this.body,
|
||||
basic_auth_user: this.basic_auth_user,
|
||||
basic_auth_pass: this.basic_auth_pass,
|
||||
pushToken: this.pushToken,
|
||||
};
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
async getTags() {
|
||||
return await R.getAll("SELECT mt.*, tag.name, tag.color FROM monitor_tag mt JOIN tag ON mt.tag_id = tag.id WHERE mt.monitor_id = ?", [ this.id ]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -93,6 +116,10 @@ class Monitor extends BeanModel {
|
|||
return Buffer.from(user + ":" + pass).toString("base64");
|
||||
}
|
||||
|
||||
isEnabledExpiryNotification() {
|
||||
return Boolean(this.expiryNotification);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse to boolean
|
||||
* @returns {boolean}
|
||||
|
@ -129,11 +156,24 @@ class Monitor extends BeanModel {
|
|||
|
||||
const beat = async () => {
|
||||
|
||||
let beatInterval = this.interval;
|
||||
|
||||
if (! beatInterval) {
|
||||
beatInterval = 1;
|
||||
}
|
||||
|
||||
if (demoMode) {
|
||||
if (beatInterval < 20) {
|
||||
console.log("beat interval too low, reset to 20s");
|
||||
beatInterval = 20;
|
||||
}
|
||||
}
|
||||
|
||||
// Expose here for prometheus update
|
||||
// undefined if not https
|
||||
let tlsInfo = undefined;
|
||||
|
||||
if (! previousBeat) {
|
||||
if (!previousBeat) {
|
||||
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
|
||||
this.id,
|
||||
]);
|
||||
|
@ -151,7 +191,7 @@ class Monitor extends BeanModel {
|
|||
}
|
||||
|
||||
// Duration
|
||||
if (! isFirstBeat) {
|
||||
if (!isFirstBeat) {
|
||||
bean.duration = dayjs(bean.time).diff(dayjs(previousBeat.time), "second");
|
||||
} else {
|
||||
bean.duration = 0;
|
||||
|
@ -170,7 +210,12 @@ class Monitor extends BeanModel {
|
|||
};
|
||||
}
|
||||
|
||||
debug(`[${this.name}] Prepare Options for axios`);
|
||||
const httpsAgentOptions = {
|
||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||
rejectUnauthorized: !this.getIgnoreTls(),
|
||||
};
|
||||
|
||||
log.debug("monitor", `[${this.name}] Prepare Options for axios`);
|
||||
|
||||
const options = {
|
||||
url: this.url,
|
||||
|
@ -183,17 +228,33 @@ class Monitor extends BeanModel {
|
|||
...(this.headers ? JSON.parse(this.headers) : {}),
|
||||
...(basicAuthHeader),
|
||||
},
|
||||
httpsAgent: new https.Agent({
|
||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||
rejectUnauthorized: ! this.getIgnoreTls(),
|
||||
}),
|
||||
maxRedirects: this.maxredirects,
|
||||
validateStatus: (status) => {
|
||||
return checkStatusCode(status, this.getAcceptedStatuscodes());
|
||||
},
|
||||
};
|
||||
|
||||
debug(`[${this.name}] Axios Request`);
|
||||
if (this.proxy_id) {
|
||||
const proxy = await R.load("proxy", this.proxy_id);
|
||||
|
||||
if (proxy && proxy.active) {
|
||||
const { httpAgent, httpsAgent } = Proxy.createAgents(proxy, {
|
||||
httpsAgentOptions: httpsAgentOptions,
|
||||
});
|
||||
|
||||
options.proxy = false;
|
||||
options.httpAgent = httpAgent;
|
||||
options.httpsAgent = httpsAgent;
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.httpsAgent) {
|
||||
options.httpsAgent = new https.Agent(httpsAgentOptions);
|
||||
}
|
||||
|
||||
log.debug("monitor", `[${this.name}] Axios Options: ${JSON.stringify(options)}`);
|
||||
log.debug("monitor", `[${this.name}] Axios Request`);
|
||||
|
||||
let res = await axios.request(options);
|
||||
bean.msg = `${res.status} - ${res.statusText}`;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
@ -201,29 +262,30 @@ class Monitor extends BeanModel {
|
|||
// Check certificate if https is used
|
||||
let certInfoStartTime = dayjs().valueOf();
|
||||
if (this.getUrl()?.protocol === "https:") {
|
||||
debug(`[${this.name}] Check cert`);
|
||||
log.debug("monitor", `[${this.name}] Check cert`);
|
||||
try {
|
||||
let tlsInfoObject = checkCertificate(res);
|
||||
tlsInfo = await this.updateTlsInfo(tlsInfoObject);
|
||||
|
||||
if (!this.getIgnoreTls()) {
|
||||
debug(`[${this.name}] call sendCertNotification`);
|
||||
if (!this.getIgnoreTls() && this.isEnabledExpiryNotification()) {
|
||||
log.debug("monitor", `[${this.name}] call sendCertNotification`);
|
||||
await this.sendCertNotification(tlsInfoObject);
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
if (e.message !== "No TLS certificate in response") {
|
||||
console.error(e.message);
|
||||
log.error("monitor", "Caught error");
|
||||
log.error("monitor", e.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (process.env.TIMELOGGER === "1") {
|
||||
debug("Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
|
||||
log.debug("monitor", "Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
|
||||
}
|
||||
|
||||
if (process.env.UPTIME_KUMA_LOG_RESPONSE_BODY_MONITOR_ID == this.id) {
|
||||
console.log(res.data);
|
||||
if (process.env.UPTIME_KUMA_LOG_RESPONSE_BODY_MONITOR_ID === this.id) {
|
||||
log.info("monitor", res.data);
|
||||
}
|
||||
|
||||
if (this.type === "http") {
|
||||
|
@ -262,24 +324,24 @@ class Monitor extends BeanModel {
|
|||
let dnsRes = await dnsResolve(this.hostname, this.dns_resolve_server, this.dns_resolve_type);
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
if (this.dns_resolve_type == "A" || this.dns_resolve_type == "AAAA" || this.dns_resolve_type == "TXT") {
|
||||
if (this.dns_resolve_type === "A" || this.dns_resolve_type === "AAAA" || this.dns_resolve_type === "TXT") {
|
||||
dnsMessage += "Records: ";
|
||||
dnsMessage += dnsRes.join(" | ");
|
||||
} else if (this.dns_resolve_type == "CNAME" || this.dns_resolve_type == "PTR") {
|
||||
} else if (this.dns_resolve_type === "CNAME" || this.dns_resolve_type === "PTR") {
|
||||
dnsMessage = dnsRes[0];
|
||||
} else if (this.dns_resolve_type == "CAA") {
|
||||
} else if (this.dns_resolve_type === "CAA") {
|
||||
dnsMessage = dnsRes[0].issue;
|
||||
} else if (this.dns_resolve_type == "MX") {
|
||||
} else if (this.dns_resolve_type === "MX") {
|
||||
dnsRes.forEach(record => {
|
||||
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
|
||||
});
|
||||
dnsMessage = dnsMessage.slice(0, -2);
|
||||
} else if (this.dns_resolve_type == "NS") {
|
||||
} else if (this.dns_resolve_type === "NS") {
|
||||
dnsMessage += "Servers: ";
|
||||
dnsMessage += dnsRes.join(" | ");
|
||||
} else if (this.dns_resolve_type == "SOA") {
|
||||
} else if (this.dns_resolve_type === "SOA") {
|
||||
dnsMessage += `NS-Name: ${dnsRes.nsname} | Hostmaster: ${dnsRes.hostmaster} | Serial: ${dnsRes.serial} | Refresh: ${dnsRes.refresh} | Retry: ${dnsRes.retry} | Expire: ${dnsRes.expire} | MinTTL: ${dnsRes.minttl}`;
|
||||
} else if (this.dns_resolve_type == "SRV") {
|
||||
} else if (this.dns_resolve_type === "SRV") {
|
||||
dnsRes.forEach(record => {
|
||||
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
|
||||
});
|
||||
|
@ -303,7 +365,7 @@ class Monitor extends BeanModel {
|
|||
time
|
||||
]);
|
||||
|
||||
debug("heartbeatCount" + heartbeatCount + " " + time);
|
||||
log.debug("monitor", "heartbeatCount" + heartbeatCount + " " + time);
|
||||
|
||||
if (heartbeatCount <= 0) {
|
||||
// Fix #922, since previous heartbeat could be inserted by api, it should get from database
|
||||
|
@ -313,7 +375,7 @@ class Monitor extends BeanModel {
|
|||
} else {
|
||||
// No need to insert successful heartbeat for push type, so end here
|
||||
retries = 0;
|
||||
this.heartbeatInterval = setTimeout(beat, this.interval * 1000);
|
||||
this.heartbeatInterval = setTimeout(beat, beatInterval * 1000);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -334,7 +396,7 @@ class Monitor extends BeanModel {
|
|||
},
|
||||
httpsAgent: new https.Agent({
|
||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||
rejectUnauthorized: ! this.getIgnoreTls(),
|
||||
rejectUnauthorized: !this.getIgnoreTls(),
|
||||
}),
|
||||
maxRedirects: this.maxredirects,
|
||||
validateStatus: (status) => {
|
||||
|
@ -356,7 +418,14 @@ class Monitor extends BeanModel {
|
|||
} else {
|
||||
throw new Error("Server not found on Steam");
|
||||
}
|
||||
|
||||
} else if (this.type === "mqtt") {
|
||||
bean.msg = await mqttAsync(this.hostname, this.mqttTopic, this.mqttSuccessMessage, {
|
||||
port: this.port,
|
||||
username: this.mqttUsername,
|
||||
password: this.mqttPassword,
|
||||
interval: this.interval,
|
||||
});
|
||||
bean.status = UP;
|
||||
} else {
|
||||
bean.msg = "Unknown Monitor Type";
|
||||
bean.status = PENDING;
|
||||
|
@ -387,9 +456,7 @@ class Monitor extends BeanModel {
|
|||
}
|
||||
}
|
||||
|
||||
let beatInterval = this.interval;
|
||||
|
||||
debug(`[${this.name}] Check isImportant`);
|
||||
log.debug("monitor", `[${this.name}] Check isImportant`);
|
||||
let isImportant = Monitor.isImportantBeat(isFirstBeat, previousBeat?.status, bean.status);
|
||||
|
||||
// Mark as important if status changed, ignore pending pings,
|
||||
|
@ -397,11 +464,11 @@ class Monitor extends BeanModel {
|
|||
if (isImportant) {
|
||||
bean.important = true;
|
||||
|
||||
debug(`[${this.name}] sendNotification`);
|
||||
log.debug("monitor", `[${this.name}] sendNotification`);
|
||||
await Monitor.sendNotification(isFirstBeat, this, bean);
|
||||
|
||||
// Clear Status Page Cache
|
||||
debug(`[${this.name}] apicache clear`);
|
||||
log.debug("monitor", `[${this.name}] apicache clear`);
|
||||
apicache.clear();
|
||||
|
||||
} else {
|
||||
|
@ -409,41 +476,33 @@ class Monitor extends BeanModel {
|
|||
}
|
||||
|
||||
if (bean.status === UP) {
|
||||
console.info(`Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||
log.info("monitor", `Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||
} else if (bean.status === PENDING) {
|
||||
if (this.retryInterval > 0) {
|
||||
beatInterval = this.retryInterval;
|
||||
}
|
||||
console.warn(`Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||
log.warn("monitor", `Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||
} else {
|
||||
console.warn(`Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||
log.warn("monitor", `Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||
}
|
||||
|
||||
debug(`[${this.name}] Send to socket`);
|
||||
log.debug("monitor", `[${this.name}] Send to socket`);
|
||||
io.to(this.user_id).emit("heartbeat", bean.toJSON());
|
||||
Monitor.sendStats(io, this.id, this.user_id);
|
||||
|
||||
debug(`[${this.name}] Store`);
|
||||
log.debug("monitor", `[${this.name}] Store`);
|
||||
await R.store(bean);
|
||||
|
||||
debug(`[${this.name}] prometheus.update`);
|
||||
log.debug("monitor", `[${this.name}] prometheus.update`);
|
||||
prometheus.update(bean, tlsInfo);
|
||||
|
||||
previousBeat = bean;
|
||||
|
||||
if (! this.isStop) {
|
||||
|
||||
if (demoMode) {
|
||||
if (beatInterval < 20) {
|
||||
console.log("beat interval too low, reset to 20s");
|
||||
beatInterval = 20;
|
||||
}
|
||||
}
|
||||
|
||||
debug(`[${this.name}] SetTimeout for next check.`);
|
||||
log.debug("monitor", `[${this.name}] SetTimeout for next check.`);
|
||||
this.heartbeatInterval = setTimeout(safeBeat, beatInterval * 1000);
|
||||
} else {
|
||||
console.log(`[${this.name}] isStop = true, no next check.`);
|
||||
log.info("monitor", `[${this.name}] isStop = true, no next check.`);
|
||||
}
|
||||
|
||||
};
|
||||
|
@ -454,10 +513,10 @@ class Monitor extends BeanModel {
|
|||
} catch (e) {
|
||||
console.trace(e);
|
||||
errorLog(e, false);
|
||||
console.error("Please report to https://github.com/louislam/uptime-kuma/issues");
|
||||
log.error("monitor", "Please report to https://github.com/louislam/uptime-kuma/issues");
|
||||
|
||||
if (! this.isStop) {
|
||||
console.log("Try to restart the monitor");
|
||||
log.info("monitor", "Try to restart the monitor");
|
||||
this.heartbeatInterval = setTimeout(safeBeat, this.interval * 1000);
|
||||
}
|
||||
}
|
||||
|
@ -477,6 +536,12 @@ class Monitor extends BeanModel {
|
|||
stop() {
|
||||
clearTimeout(this.heartbeatInterval);
|
||||
this.isStop = true;
|
||||
|
||||
this.prometheus().remove();
|
||||
}
|
||||
|
||||
prometheus() {
|
||||
return new Prometheus(this);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -499,41 +564,41 @@ class Monitor extends BeanModel {
|
|||
* @returns {Promise<Object>}
|
||||
*/
|
||||
async updateTlsInfo(checkCertificateResult) {
|
||||
let tls_info_bean = await R.findOne("monitor_tls_info", "monitor_id = ?", [
|
||||
let tlsInfoBean = await R.findOne("monitor_tls_info", "monitor_id = ?", [
|
||||
this.id,
|
||||
]);
|
||||
|
||||
if (tls_info_bean == null) {
|
||||
tls_info_bean = R.dispense("monitor_tls_info");
|
||||
tls_info_bean.monitor_id = this.id;
|
||||
if (tlsInfoBean == null) {
|
||||
tlsInfoBean = R.dispense("monitor_tls_info");
|
||||
tlsInfoBean.monitor_id = this.id;
|
||||
} else {
|
||||
|
||||
// Clear sent history if the cert changed.
|
||||
try {
|
||||
let oldCertInfo = JSON.parse(tls_info_bean.info_json);
|
||||
let oldCertInfo = JSON.parse(tlsInfoBean.info_json);
|
||||
|
||||
let isValidObjects = oldCertInfo && oldCertInfo.certInfo && checkCertificateResult && checkCertificateResult.certInfo;
|
||||
|
||||
if (isValidObjects) {
|
||||
if (oldCertInfo.certInfo.fingerprint256 !== checkCertificateResult.certInfo.fingerprint256) {
|
||||
debug("Resetting sent_history");
|
||||
log.debug("monitor", "Resetting sent_history");
|
||||
await R.exec("DELETE FROM notification_sent_history WHERE type = 'certificate' AND monitor_id = ?", [
|
||||
this.id
|
||||
]);
|
||||
} else {
|
||||
debug("No need to reset sent_history");
|
||||
debug(oldCertInfo.certInfo.fingerprint256);
|
||||
debug(checkCertificateResult.certInfo.fingerprint256);
|
||||
log.debug("monitor", "No need to reset sent_history");
|
||||
log.debug("monitor", oldCertInfo.certInfo.fingerprint256);
|
||||
log.debug("monitor", checkCertificateResult.certInfo.fingerprint256);
|
||||
}
|
||||
} else {
|
||||
debug("Not valid object");
|
||||
log.debug("monitor", "Not valid object");
|
||||
}
|
||||
} catch (e) { }
|
||||
|
||||
}
|
||||
|
||||
tls_info_bean.info_json = JSON.stringify(checkCertificateResult);
|
||||
await R.store(tls_info_bean);
|
||||
tlsInfoBean.info_json = JSON.stringify(checkCertificateResult);
|
||||
await R.store(tlsInfoBean);
|
||||
|
||||
return checkCertificateResult;
|
||||
}
|
||||
|
@ -553,7 +618,7 @@ class Monitor extends BeanModel {
|
|||
await Monitor.sendUptime(24 * 30, io, monitorID, userID);
|
||||
await Monitor.sendCertInfo(io, monitorID, userID);
|
||||
} else {
|
||||
debug("No clients in the room, no need to send stats");
|
||||
log.debug("monitor", "No clients in the room, no need to send stats");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -586,11 +651,11 @@ class Monitor extends BeanModel {
|
|||
* @param {number} userID ID of user to send to
|
||||
*/
|
||||
static async sendCertInfo(io, monitorID, userID) {
|
||||
let tls_info = await R.findOne("monitor_tls_info", "monitor_id = ?", [
|
||||
let tlsInfo = await R.findOne("monitor_tls_info", "monitor_id = ?", [
|
||||
monitorID,
|
||||
]);
|
||||
if (tls_info != null) {
|
||||
io.to(userID).emit("certInfo", monitorID, tls_info.info_json);
|
||||
if (tlsInfo != null) {
|
||||
io.to(userID).emit("certInfo", monitorID, tlsInfo.info_json);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -653,7 +718,7 @@ class Monitor extends BeanModel {
|
|||
|
||||
} else {
|
||||
// Handle new monitor with only one beat, because the beat's duration = 0
|
||||
let status = parseInt(await R.getCell("SELECT `status` FROM heartbeat WHERE monitor_id = ?", [ monitorID ]));
|
||||
let status = parseInt(await R.getCell("SELECT `status` FROM heartbeat WHERE monitor_id = ?", [monitorID]));
|
||||
|
||||
if (status === UP) {
|
||||
uptime = 1;
|
||||
|
@ -721,10 +786,10 @@ class Monitor extends BeanModel {
|
|||
|
||||
for (let notification of notificationList) {
|
||||
try {
|
||||
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(), bean.toJSON());
|
||||
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(false), bean.toJSON());
|
||||
} catch (e) {
|
||||
console.error("Cannot send notification to " + notification.name);
|
||||
console.log(e);
|
||||
log.error("monitor", "Cannot send notification to " + notification.name);
|
||||
log.error("monitor", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -750,7 +815,7 @@ class Monitor extends BeanModel {
|
|||
if (tlsInfoObject && tlsInfoObject.certInfo && tlsInfoObject.certInfo.daysRemaining) {
|
||||
const notificationList = await Monitor.getNotificationList(this);
|
||||
|
||||
debug("call sendCertNotificationByTargetDays");
|
||||
log.debug("monitor", "call sendCertNotificationByTargetDays");
|
||||
await this.sendCertNotificationByTargetDays(tlsInfoObject.certInfo.daysRemaining, 21, notificationList);
|
||||
await this.sendCertNotificationByTargetDays(tlsInfoObject.certInfo.daysRemaining, 14, notificationList);
|
||||
await this.sendCertNotificationByTargetDays(tlsInfoObject.certInfo.daysRemaining, 7, notificationList);
|
||||
|
@ -768,7 +833,7 @@ class Monitor extends BeanModel {
|
|||
async sendCertNotificationByTargetDays(daysRemaining, targetDays, notificationList) {
|
||||
|
||||
if (daysRemaining > targetDays) {
|
||||
debug(`No need to send cert notification. ${daysRemaining} > ${targetDays}`);
|
||||
log.debug("monitor", `No need to send cert notification. ${daysRemaining} > ${targetDays}`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -782,21 +847,21 @@ class Monitor extends BeanModel {
|
|||
|
||||
// Sent already, no need to send again
|
||||
if (row) {
|
||||
debug("Sent already, no need to send again");
|
||||
log.debug("monitor", "Sent already, no need to send again");
|
||||
return;
|
||||
}
|
||||
|
||||
let sent = false;
|
||||
debug("Send certificate notification");
|
||||
log.debug("monitor", "Send certificate notification");
|
||||
|
||||
for (let notification of notificationList) {
|
||||
try {
|
||||
debug("Sending to " + notification.name);
|
||||
log.debug("monitor", "Sending to " + notification.name);
|
||||
await Notification.send(JSON.parse(notification.config), `[${this.name}][${this.url}] Certificate will be expired in ${daysRemaining} days`);
|
||||
sent = true;
|
||||
} catch (e) {
|
||||
console.error("Cannot send cert notification to " + notification.name);
|
||||
console.error(e);
|
||||
log.error("monitor", "Cannot send cert notification to " + notification.name);
|
||||
log.error("monitor", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -808,7 +873,7 @@ class Monitor extends BeanModel {
|
|||
]);
|
||||
}
|
||||
} else {
|
||||
debug("No notification, no need to send cert notification");
|
||||
log.debug("monitor", "No notification, no need to send cert notification");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
21
server/model/proxy.js
Normal file
21
server/model/proxy.js
Normal file
|
@ -0,0 +1,21 @@
|
|||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
|
||||
class Proxy extends BeanModel {
|
||||
toJSON() {
|
||||
return {
|
||||
id: this._id,
|
||||
userId: this._user_id,
|
||||
protocol: this._protocol,
|
||||
host: this._host,
|
||||
port: this._port,
|
||||
auth: !!this._auth,
|
||||
username: this._username,
|
||||
password: this._password,
|
||||
active: !!this._active,
|
||||
default: !!this._default,
|
||||
createdDate: this._created_date,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Proxy;
|
132
server/model/status_page.js
Normal file
132
server/model/status_page.js
Normal file
|
@ -0,0 +1,132 @@
|
|||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
const { R } = require("redbean-node");
|
||||
|
||||
class StatusPage extends BeanModel {
|
||||
|
||||
static domainMappingList = { };
|
||||
|
||||
/**
|
||||
* Return object like this: { "test-uptime.kuma.pet": "default" }
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async loadDomainMappingList() {
|
||||
StatusPage.domainMappingList = await R.getAssoc(`
|
||||
SELECT domain, slug
|
||||
FROM status_page, status_page_cname
|
||||
WHERE status_page.id = status_page_cname.status_page_id
|
||||
`);
|
||||
}
|
||||
|
||||
static async sendStatusPageList(io, socket) {
|
||||
let result = {};
|
||||
|
||||
let list = await R.findAll("status_page", " ORDER BY title ");
|
||||
|
||||
for (let item of list) {
|
||||
result[item.id] = await item.toJSON();
|
||||
}
|
||||
|
||||
io.to(socket.userID).emit("statusPageList", result);
|
||||
return list;
|
||||
}
|
||||
|
||||
async updateDomainNameList(domainNameList) {
|
||||
|
||||
if (!Array.isArray(domainNameList)) {
|
||||
throw new Error("Invalid array");
|
||||
}
|
||||
|
||||
let trx = await R.begin();
|
||||
|
||||
await trx.exec("DELETE FROM status_page_cname WHERE status_page_id = ?", [
|
||||
this.id,
|
||||
]);
|
||||
|
||||
try {
|
||||
for (let domain of domainNameList) {
|
||||
if (typeof domain !== "string") {
|
||||
throw new Error("Invalid domain");
|
||||
}
|
||||
|
||||
if (domain.trim() === "") {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the domain name is used in another status page, delete it
|
||||
await trx.exec("DELETE FROM status_page_cname WHERE domain = ?", [
|
||||
domain,
|
||||
]);
|
||||
|
||||
let mapping = trx.dispense("status_page_cname");
|
||||
mapping.status_page_id = this.id;
|
||||
mapping.domain = domain;
|
||||
await trx.store(mapping);
|
||||
}
|
||||
await trx.commit();
|
||||
} catch (error) {
|
||||
await trx.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
getDomainNameList() {
|
||||
let domainList = [];
|
||||
for (let domain in StatusPage.domainMappingList) {
|
||||
let s = StatusPage.domainMappingList[domain];
|
||||
|
||||
if (this.slug === s) {
|
||||
domainList.push(domain);
|
||||
}
|
||||
}
|
||||
return domainList;
|
||||
}
|
||||
|
||||
async toJSON() {
|
||||
return {
|
||||
id: this.id,
|
||||
slug: this.slug,
|
||||
title: this.title,
|
||||
description: this.description,
|
||||
icon: this.getIcon(),
|
||||
theme: this.theme,
|
||||
published: !!this.published,
|
||||
showTags: !!this.show_tags,
|
||||
domainNameList: this.getDomainNameList(),
|
||||
customCSS: this.custom_css,
|
||||
footerText: this.footer_text,
|
||||
showPoweredBy: !!this.show_powered_by,
|
||||
};
|
||||
}
|
||||
|
||||
async toPublicJSON() {
|
||||
return {
|
||||
slug: this.slug,
|
||||
title: this.title,
|
||||
description: this.description,
|
||||
icon: this.getIcon(),
|
||||
theme: this.theme,
|
||||
published: !!this.published,
|
||||
showTags: !!this.show_tags,
|
||||
customCSS: this.custom_css,
|
||||
footerText: this.footer_text,
|
||||
showPoweredBy: !!this.show_powered_by,
|
||||
};
|
||||
}
|
||||
|
||||
static async slugToID(slug) {
|
||||
return await R.getCell("SELECT id FROM status_page WHERE slug = ? ", [
|
||||
slug
|
||||
]);
|
||||
}
|
||||
|
||||
getIcon() {
|
||||
if (!this.icon) {
|
||||
return "/icon.svg";
|
||||
} else {
|
||||
return this.icon;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = StatusPage;
|
|
@ -5,17 +5,29 @@ const { R } = require("redbean-node");
|
|||
class User extends BeanModel {
|
||||
|
||||
/**
|
||||
* Direct execute, no need R.store()
|
||||
*
|
||||
* Fix #1510, as in the context reset-password.js, there is no auto model mapping. Call this static function instead.
|
||||
* @param userID
|
||||
* @param newPassword
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async resetPassword(userID, newPassword) {
|
||||
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
|
||||
passwordHash.generate(newPassword),
|
||||
userID
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param newPassword
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async resetPassword(newPassword) {
|
||||
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
|
||||
passwordHash.generate(newPassword),
|
||||
this.id
|
||||
]);
|
||||
await User.resetPassword(this.id, newPassword);
|
||||
this.password = newPassword;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = User;
|
||||
|
|
|
@ -68,6 +68,15 @@ function ApiCache() {
|
|||
instances.push(this);
|
||||
this.id = instances.length;
|
||||
|
||||
/**
|
||||
* Logs a message to the console if the `DEBUG` environment variable is set.
|
||||
* @param {string} a - The first argument to log.
|
||||
* @param {string} b - The second argument to log.
|
||||
* @param {string} c - The third argument to log.
|
||||
* @param {string} d - The fourth argument to log, and so on... (optional)
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function debug(a, b, c, d) {
|
||||
let arr = ["\x1b[36m[apicache]\x1b[0m", a, b, c, d].filter(function (arg) {
|
||||
return arg !== undefined;
|
||||
|
@ -77,6 +86,13 @@ function ApiCache() {
|
|||
return (globalOptions.debug || debugEnv) && console.log.apply(null, arr);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the given request and response should be logged.
|
||||
* @param {Object} request The HTTP request object.
|
||||
* @param {Object} response The HTTP response object.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function shouldCacheResponse(request, response, toggle) {
|
||||
let opt = globalOptions;
|
||||
let codes = opt.statusCodes;
|
||||
|
@ -99,6 +115,12 @@ function ApiCache() {
|
|||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a key to the index.
|
||||
* @param {string} key The key to add.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function addIndexEntries(key, req) {
|
||||
let groupName = req.apicacheGroup;
|
||||
|
||||
|
@ -111,6 +133,13 @@ function ApiCache() {
|
|||
index.all.unshift(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new object containing only the whitelisted headers.
|
||||
* @param {Object} headers The original object of header names and values.
|
||||
* @param {Array.<string>} globalOptions.headerWhitelist An array of strings representing the whitelisted header names to keep in the output object.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function filterBlacklistedHeaders(headers) {
|
||||
return Object.keys(headers)
|
||||
.filter(function (key) {
|
||||
|
@ -122,6 +151,12 @@ function ApiCache() {
|
|||
}, {});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Object} headers The response headers to filter.
|
||||
* @returns {Object} A new object containing only the whitelisted response headers.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function createCacheObject(status, headers, data, encoding) {
|
||||
return {
|
||||
status: status,
|
||||
|
@ -132,6 +167,14 @@ function ApiCache() {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a cache value for the given key.
|
||||
* @param {string} key The cache key to set.
|
||||
* @param {*} value The cache value to set.
|
||||
* @param {number} duration How long in milliseconds the cached response should be valid for (defaults to 1 hour).
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function cacheResponse(key, value, duration) {
|
||||
let redis = globalOptions.redisClient;
|
||||
let expireCallback = globalOptions.events.expire;
|
||||
|
@ -154,6 +197,12 @@ function ApiCache() {
|
|||
}, Math.min(duration, 2147483647));
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends content to the response.
|
||||
* @param {string|Buffer} content The content to append.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function accumulateContent(res, content) {
|
||||
if (content) {
|
||||
if (typeof content == "string") {
|
||||
|
@ -179,6 +228,13 @@ function ApiCache() {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Monkeypatches the response object to add cache control headers and create a cache object.
|
||||
* @param {Object} req - The request object.
|
||||
* @param {Object} res - The response object.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function makeResponseCacheable(req, res, next, key, duration, strDuration, toggle) {
|
||||
// monkeypatch res.end to create cache object
|
||||
res._apicache = {
|
||||
|
@ -245,6 +301,13 @@ function ApiCache() {
|
|||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Request} request
|
||||
* @param {Response} response
|
||||
* @returns {boolean|undefined} true if the request should be cached, false otherwise. If undefined, defaults to true.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function sendCachedResponse(request, response, cacheObject, toggle, next, duration) {
|
||||
if (toggle && !toggle(request, response)) {
|
||||
return next();
|
||||
|
@ -365,6 +428,13 @@ function ApiCache() {
|
|||
return this.getIndex();
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts a duration string to an integer number of milliseconds.
|
||||
* @param {string} duration - The string to convert.
|
||||
* @returns {number} The converted value in milliseconds, or the defaultDuration if it can't be parsed.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function parseDuration(duration, defaultDuration) {
|
||||
if (typeof duration === "number") {
|
||||
return duration;
|
||||
|
|
67
server/notification-providers/alerta.js
Normal file
67
server/notification-providers/alerta.js
Normal file
|
@ -0,0 +1,67 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
const axios = require("axios");
|
||||
|
||||
class Alerta extends NotificationProvider {
|
||||
|
||||
name = "alerta";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
let alertaUrl = `${notification.alertaApiEndpoint}`;
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json;charset=UTF-8",
|
||||
"Authorization": "Key " + notification.alertaApiKey,
|
||||
}
|
||||
};
|
||||
let data = {
|
||||
environment: notification.alertaEnvironment,
|
||||
severity: "critical",
|
||||
correlate: [],
|
||||
service: [ "UptimeKuma" ],
|
||||
value: "Timeout",
|
||||
tags: [ "uptimekuma" ],
|
||||
attributes: {},
|
||||
origin: "uptimekuma",
|
||||
type: "exceptionAlert",
|
||||
};
|
||||
|
||||
if (heartbeatJSON == null) {
|
||||
let postData = Object.assign({
|
||||
event: "msg",
|
||||
text: msg,
|
||||
group: "uptimekuma-msg",
|
||||
resource: "Message",
|
||||
}, data);
|
||||
|
||||
await axios.post(alertaUrl, postData, config);
|
||||
} else {
|
||||
let datadup = Object.assign( {
|
||||
correlate: [ "service_up", "service_down" ],
|
||||
event: monitorJSON["type"],
|
||||
group: "uptimekuma-" + monitorJSON["type"],
|
||||
resource: monitorJSON["name"],
|
||||
}, data );
|
||||
|
||||
if (heartbeatJSON["status"] === DOWN) {
|
||||
datadup.severity = notification.alertaAlertState; // critical
|
||||
datadup.text = "Service " + monitorJSON["type"] + " is down.";
|
||||
await axios.post(alertaUrl, datadup, config);
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
datadup.severity = notification.alertaRecoverState; // cleaned
|
||||
datadup.text = "Service " + monitorJSON["type"] + " is up.";
|
||||
await axios.post(alertaUrl, datadup, config);
|
||||
}
|
||||
}
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Alerta;
|
|
@ -70,7 +70,7 @@ class AliyunSMS extends NotificationProvider {
|
|||
};
|
||||
|
||||
let result = await axios(config);
|
||||
if (result.data.Message == "OK") {
|
||||
if (result.data.Message === "OK") {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const child_process = require("child_process");
|
||||
const childProcess = require("child_process");
|
||||
|
||||
class Apprise extends NotificationProvider {
|
||||
|
||||
name = "apprise";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let s = child_process.spawnSync("apprise", [ "-vv", "-b", msg, notification.appriseURL])
|
||||
let s = childProcess.spawnSync("apprise", [ "-vv", "-b", msg, notification.appriseURL ]);
|
||||
|
||||
let output = (s.stdout) ? s.stdout.toString() : "ERROR: maybe apprise not found";
|
||||
|
||||
|
@ -16,7 +16,7 @@ class Apprise extends NotificationProvider {
|
|||
return "Sent Successfully";
|
||||
}
|
||||
|
||||
throw new Error(output)
|
||||
throw new Error(output);
|
||||
} else {
|
||||
return "No output from apprise";
|
||||
}
|
||||
|
|
|
@ -21,31 +21,26 @@ class Bark extends NotificationProvider {
|
|||
name = "Bark";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
try {
|
||||
var barkEndpoint = notification.barkEndpoint;
|
||||
let barkEndpoint = notification.barkEndpoint;
|
||||
|
||||
// check if the endpoint has a "/" suffix, if so, delete it first
|
||||
if (barkEndpoint.endsWith("/")) {
|
||||
barkEndpoint = barkEndpoint.substring(0, barkEndpoint.length - 1);
|
||||
}
|
||||
// check if the endpoint has a "/" suffix, if so, delete it first
|
||||
if (barkEndpoint.endsWith("/")) {
|
||||
barkEndpoint = barkEndpoint.substring(0, barkEndpoint.length - 1);
|
||||
}
|
||||
|
||||
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] == UP) {
|
||||
let title = "UptimeKuma Monitor Up";
|
||||
return await this.postNotification(title, msg, barkEndpoint);
|
||||
}
|
||||
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] === UP) {
|
||||
let title = "UptimeKuma Monitor Up";
|
||||
return await this.postNotification(title, msg, barkEndpoint);
|
||||
}
|
||||
|
||||
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] == DOWN) {
|
||||
let title = "UptimeKuma Monitor Down";
|
||||
return await this.postNotification(title, msg, barkEndpoint);
|
||||
}
|
||||
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] === DOWN) {
|
||||
let title = "UptimeKuma Monitor Down";
|
||||
return await this.postNotification(title, msg, barkEndpoint);
|
||||
}
|
||||
|
||||
if (msg != null) {
|
||||
let title = "UptimeKuma Message";
|
||||
return await this.postNotification(title, msg, barkEndpoint);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
throw error;
|
||||
if (msg != null) {
|
||||
let title = "UptimeKuma Message";
|
||||
return await this.postNotification(title, msg, barkEndpoint);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ class ClickSendSMS extends NotificationProvider {
|
|||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": "Basic " + Buffer.from(notification.clicksendsmsLogin + ":" + notification.clicksendsmsPassword).toString('base64'),
|
||||
"Authorization": "Basic " + Buffer.from(notification.clicksendsmsLogin + ":" + notification.clicksendsmsPassword).toString("base64"),
|
||||
"Accept": "text/json",
|
||||
}
|
||||
};
|
||||
|
|
|
@ -56,7 +56,7 @@ class DingDing extends NotificationProvider {
|
|||
};
|
||||
|
||||
let result = await axios(config);
|
||||
if (result.data.errmsg == "ok") {
|
||||
if (result.data.errmsg === "ok") {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
|
|
@ -17,8 +17,8 @@ class Discord extends NotificationProvider {
|
|||
let discordtestdata = {
|
||||
username: discordDisplayName,
|
||||
content: msg,
|
||||
}
|
||||
await axios.post(notification.discordWebhookUrl, discordtestdata)
|
||||
};
|
||||
await axios.post(notification.discordWebhookUrl, discordtestdata);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
|
@ -35,7 +35,7 @@ class Discord extends NotificationProvider {
|
|||
}
|
||||
|
||||
// If heartbeatJSON is not null, we go into the normal alerting loop.
|
||||
if (heartbeatJSON["status"] == DOWN) {
|
||||
if (heartbeatJSON["status"] === DOWN) {
|
||||
let discorddowndata = {
|
||||
username: discordDisplayName,
|
||||
embeds: [{
|
||||
|
@ -61,16 +61,16 @@ class Discord extends NotificationProvider {
|
|||
},
|
||||
],
|
||||
}],
|
||||
}
|
||||
};
|
||||
|
||||
if (notification.discordPrefixMessage) {
|
||||
discorddowndata.content = notification.discordPrefixMessage;
|
||||
}
|
||||
|
||||
await axios.post(notification.discordWebhookUrl, discorddowndata)
|
||||
await axios.post(notification.discordWebhookUrl, discorddowndata);
|
||||
return okMsg;
|
||||
|
||||
} else if (heartbeatJSON["status"] == UP) {
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
let discordupdata = {
|
||||
username: discordDisplayName,
|
||||
embeds: [{
|
||||
|
@ -96,17 +96,17 @@ class Discord extends NotificationProvider {
|
|||
},
|
||||
],
|
||||
}],
|
||||
}
|
||||
};
|
||||
|
||||
if (notification.discordPrefixMessage) {
|
||||
discordupdata.content = notification.discordPrefixMessage;
|
||||
}
|
||||
|
||||
await axios.post(notification.discordWebhookUrl, discordupdata)
|
||||
await axios.post(notification.discordWebhookUrl, discordupdata);
|
||||
return okMsg;
|
||||
}
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ class Feishu extends NotificationProvider {
|
|||
return okMsg;
|
||||
}
|
||||
|
||||
if (heartbeatJSON["status"] == DOWN) {
|
||||
if (heartbeatJSON["status"] === DOWN) {
|
||||
let downdata = {
|
||||
msg_type: "post",
|
||||
content: {
|
||||
|
@ -48,7 +48,7 @@ class Feishu extends NotificationProvider {
|
|||
return okMsg;
|
||||
}
|
||||
|
||||
if (heartbeatJSON["status"] == UP) {
|
||||
if (heartbeatJSON["status"] === UP) {
|
||||
let updata = {
|
||||
msg_type: "post",
|
||||
content: {
|
||||
|
|
|
@ -13,11 +13,11 @@ class GoogleChat extends NotificationProvider {
|
|||
try {
|
||||
// Google Chat message formatting: https://developers.google.com/chat/api/guides/message-formats/basic
|
||||
|
||||
let textMsg = ''
|
||||
let textMsg = "";
|
||||
if (heartbeatJSON && heartbeatJSON.status === UP) {
|
||||
textMsg = `✅ Application is back online\n`;
|
||||
textMsg = "✅ Application is back online\n";
|
||||
} else if (heartbeatJSON && heartbeatJSON.status === DOWN) {
|
||||
textMsg = `🔴 Application went down\n`;
|
||||
textMsg = "🔴 Application went down\n";
|
||||
}
|
||||
|
||||
if (monitorJSON && monitorJSON.name) {
|
||||
|
|
42
server/notification-providers/gorush.js
Normal file
42
server/notification-providers/gorush.js
Normal file
|
@ -0,0 +1,42 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class Gorush extends NotificationProvider {
|
||||
|
||||
name = "gorush";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
|
||||
let platformMapping = {
|
||||
"ios": 1,
|
||||
"android": 2,
|
||||
"huawei": 3,
|
||||
};
|
||||
|
||||
try {
|
||||
let data = {
|
||||
"notifications": [
|
||||
{
|
||||
"tokens": [ notification.gorushDeviceToken ],
|
||||
"platform": platformMapping[notification.gorushPlatform],
|
||||
"message": msg,
|
||||
// Optional
|
||||
"title": notification.gorushTitle,
|
||||
"priority": notification.gorushPriority,
|
||||
"retry": parseInt(notification.gorushRetry) || 0,
|
||||
"topic": notification.gorushTopic,
|
||||
}
|
||||
]
|
||||
};
|
||||
let config = {};
|
||||
|
||||
await axios.post(`${notification.gorushServerURL}/api/push`, data, config);
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Gorush;
|
|
@ -15,7 +15,7 @@ class Gotify extends NotificationProvider {
|
|||
"message": msg,
|
||||
"priority": notification.gotifyPriority || 8,
|
||||
"title": "Uptime-Kuma",
|
||||
})
|
||||
});
|
||||
|
||||
return okMsg;
|
||||
|
||||
|
|
|
@ -25,9 +25,9 @@ class Line extends NotificationProvider {
|
|||
"text": "Test Successful!"
|
||||
}
|
||||
]
|
||||
}
|
||||
await axios.post(lineAPIUrl, testMessage, config)
|
||||
} else if (heartbeatJSON["status"] == DOWN) {
|
||||
};
|
||||
await axios.post(lineAPIUrl, testMessage, config);
|
||||
} else if (heartbeatJSON["status"] === DOWN) {
|
||||
let downMessage = {
|
||||
"to": notification.lineUserID,
|
||||
"messages": [
|
||||
|
@ -36,9 +36,9 @@ class Line extends NotificationProvider {
|
|||
"text": "UptimeKuma Alert: [🔴 Down]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
|
||||
}
|
||||
]
|
||||
}
|
||||
await axios.post(lineAPIUrl, downMessage, config)
|
||||
} else if (heartbeatJSON["status"] == UP) {
|
||||
};
|
||||
await axios.post(lineAPIUrl, downMessage, config);
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
let upMessage = {
|
||||
"to": notification.lineUserID,
|
||||
"messages": [
|
||||
|
@ -47,12 +47,12 @@ class Line extends NotificationProvider {
|
|||
"text": "UptimeKuma Alert: [✅ Up]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
|
||||
}
|
||||
]
|
||||
}
|
||||
await axios.post(lineAPIUrl, upMessage, config)
|
||||
};
|
||||
await axios.post(lineAPIUrl, upMessage, config);
|
||||
}
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,38 +8,38 @@ class LunaSea extends NotificationProvider {
|
|||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
let lunaseadevice = "https://notify.lunasea.app/v1/custom/device/" + notification.lunaseaDevice
|
||||
let lunaseadevice = "https://notify.lunasea.app/v1/custom/device/" + notification.lunaseaDevice;
|
||||
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
let testdata = {
|
||||
"title": "Uptime Kuma Alert",
|
||||
"body": "Testing Successful.",
|
||||
}
|
||||
await axios.post(lunaseadevice, testdata)
|
||||
};
|
||||
await axios.post(lunaseadevice, testdata);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
if (heartbeatJSON["status"] == DOWN) {
|
||||
if (heartbeatJSON["status"] === DOWN) {
|
||||
let downdata = {
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
}
|
||||
await axios.post(lunaseadevice, downdata)
|
||||
};
|
||||
await axios.post(lunaseadevice, downdata);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
if (heartbeatJSON["status"] == UP) {
|
||||
if (heartbeatJSON["status"] === UP) {
|
||||
let updata = {
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
}
|
||||
await axios.post(lunaseadevice, updata)
|
||||
};
|
||||
await axios.post(lunaseadevice, updata);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const Crypto = require("crypto");
|
||||
const { debug } = require("../../src/util");
|
||||
const { log } = require("../../src/util");
|
||||
|
||||
class Matrix extends NotificationProvider {
|
||||
name = "matrix";
|
||||
|
@ -17,11 +17,11 @@ class Matrix extends NotificationProvider {
|
|||
.slice(0, size)
|
||||
);
|
||||
|
||||
debug("Random String: " + randomString);
|
||||
log.debug("notification", "Random String: " + randomString);
|
||||
|
||||
const roomId = encodeURIComponent(notification.internalRoomId);
|
||||
|
||||
debug("Matrix Room ID: " + roomId);
|
||||
log.debug("notification", "Matrix Room ID: " + roomId);
|
||||
|
||||
try {
|
||||
let config = {
|
||||
|
|
|
@ -15,16 +15,21 @@ class Mattermost extends NotificationProvider {
|
|||
let mattermostTestData = {
|
||||
username: mattermostUserName,
|
||||
text: msg,
|
||||
}
|
||||
await axios.post(notification.mattermostWebhookUrl, mattermostTestData)
|
||||
};
|
||||
await axios.post(notification.mattermostWebhookUrl, mattermostTestData);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
const mattermostChannel = notification.mattermostchannel;
|
||||
let mattermostChannel;
|
||||
|
||||
if (typeof notification.mattermostchannel === "string") {
|
||||
mattermostChannel = notification.mattermostchannel.toLowerCase();
|
||||
}
|
||||
|
||||
const mattermostIconEmoji = notification.mattermosticonemo;
|
||||
const mattermostIconUrl = notification.mattermosticonurl;
|
||||
|
||||
if (heartbeatJSON["status"] == DOWN) {
|
||||
if (heartbeatJSON["status"] === DOWN) {
|
||||
let mattermostdowndata = {
|
||||
username: mattermostUserName,
|
||||
text: "Uptime Kuma Alert",
|
||||
|
@ -68,7 +73,7 @@ class Mattermost extends NotificationProvider {
|
|||
mattermostdowndata
|
||||
);
|
||||
return okMsg;
|
||||
} else if (heartbeatJSON["status"] == UP) {
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
let mattermostupdata = {
|
||||
username: mattermostUserName,
|
||||
text: "Uptime Kuma Alert",
|
||||
|
|
|
@ -31,11 +31,11 @@ class NotificationProvider {
|
|||
if (typeof error.response.data === "string") {
|
||||
msg += error.response.data;
|
||||
} else {
|
||||
msg += JSON.stringify(error.response.data)
|
||||
msg += JSON.stringify(error.response.data);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(msg)
|
||||
throw new Error(msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ class Octopush extends NotificationProvider {
|
|||
|
||||
try {
|
||||
// Default - V2
|
||||
if (notification.octopushVersion == 2 || !notification.octopushVersion) {
|
||||
if (notification.octopushVersion === 2 || !notification.octopushVersion) {
|
||||
let config = {
|
||||
headers: {
|
||||
"api-key": notification.octopushAPIKey,
|
||||
|
@ -30,14 +30,14 @@ class Octopush extends NotificationProvider {
|
|||
"purpose": "alert",
|
||||
"sender": notification.octopushSenderName
|
||||
};
|
||||
await axios.post("https://api.octopush.com/v1/public/sms-campaign/send", data, config)
|
||||
} else if (notification.octopushVersion == 1) {
|
||||
await axios.post("https://api.octopush.com/v1/public/sms-campaign/send", data, config);
|
||||
} else if (notification.octopushVersion === 1) {
|
||||
let data = {
|
||||
"user_login": notification.octopushDMLogin,
|
||||
"api_key": notification.octopushDMAPIKey,
|
||||
"sms_recipients": notification.octopushDMPhoneNumber,
|
||||
"sms_sender": notification.octopushDMSenderName,
|
||||
"sms_type": (notification.octopushDMSMSType == "sms_premium") ? "FR" : "XXX",
|
||||
"sms_type": (notification.octopushDMSMSType === "sms_premium") ? "FR" : "XXX",
|
||||
"transactional": "1",
|
||||
//octopush not supporting non ascii char
|
||||
"sms_text": msg.replace(/[^\x00-\x7F]/g, ""),
|
||||
|
@ -49,7 +49,7 @@ class Octopush extends NotificationProvider {
|
|||
},
|
||||
params: data
|
||||
};
|
||||
await axios.post("https://www.octopush-dm.com/api/sms/json", {}, config)
|
||||
await axios.post("https://www.octopush-dm.com/api/sms/json", {}, config);
|
||||
} else {
|
||||
throw new Error("Unknown Octopush version!");
|
||||
}
|
||||
|
|
45
server/notification-providers/onebot.js
Normal file
45
server/notification-providers/onebot.js
Normal file
|
@ -0,0 +1,45 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class OneBot extends NotificationProvider {
|
||||
|
||||
name = "OneBot";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
try {
|
||||
let httpAddr = notification.httpAddr;
|
||||
if (!httpAddr.startsWith("http")) {
|
||||
httpAddr = "http://" + httpAddr;
|
||||
}
|
||||
if (!httpAddr.endsWith("/")) {
|
||||
httpAddr += "/";
|
||||
}
|
||||
let onebotAPIUrl = httpAddr + "send_msg";
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": "Bearer " + notification.accessToken,
|
||||
}
|
||||
};
|
||||
let pushText = "UptimeKuma Alert: " + msg;
|
||||
let data = {
|
||||
"auto_escape": true,
|
||||
"message": pushText,
|
||||
};
|
||||
if (notification.msgType === "group") {
|
||||
data["message_type"] = "group";
|
||||
data["group_id"] = notification.recieverId;
|
||||
} else {
|
||||
data["message_type"] = "private";
|
||||
data["user_id"] = notification.recieverId;
|
||||
}
|
||||
await axios.post(onebotAPIUrl, data, config);
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = OneBot;
|
|
@ -12,7 +12,7 @@ class PromoSMS extends NotificationProvider {
|
|||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": "Basic " + Buffer.from(notification.promosmsLogin + ":" + notification.promosmsPassword).toString('base64'),
|
||||
"Authorization": "Basic " + Buffer.from(notification.promosmsLogin + ":" + notification.promosmsPassword).toString("base64"),
|
||||
"Accept": "text/json",
|
||||
}
|
||||
};
|
||||
|
@ -30,7 +30,7 @@ class PromoSMS extends NotificationProvider {
|
|||
let error = "Something gone wrong. Api returned " + resp.data.response.status + ".";
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
||||
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
|
|
|
@ -23,26 +23,26 @@ class Pushbullet extends NotificationProvider {
|
|||
"type": "note",
|
||||
"title": "Uptime Kuma Alert",
|
||||
"body": "Testing Successful.",
|
||||
}
|
||||
await axios.post(pushbulletUrl, testdata, config)
|
||||
} else if (heartbeatJSON["status"] == DOWN) {
|
||||
};
|
||||
await axios.post(pushbulletUrl, testdata, config);
|
||||
} else if (heartbeatJSON["status"] === DOWN) {
|
||||
let downdata = {
|
||||
"type": "note",
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
}
|
||||
await axios.post(pushbulletUrl, downdata, config)
|
||||
} else if (heartbeatJSON["status"] == UP) {
|
||||
};
|
||||
await axios.post(pushbulletUrl, downdata, config);
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
let updata = {
|
||||
"type": "note",
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
}
|
||||
await axios.post(pushbulletUrl, updata, config)
|
||||
};
|
||||
await axios.post(pushbulletUrl, updata, config);
|
||||
}
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
52
server/notification-providers/pushdeer.js
Normal file
52
server/notification-providers/pushdeer.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class PushDeer extends NotificationProvider {
|
||||
|
||||
name = "PushDeer";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
let pushdeerlink = "https://api2.pushdeer.com/message/push";
|
||||
|
||||
let valid = msg != null && monitorJSON != null && heartbeatJSON != null;
|
||||
|
||||
let title;
|
||||
if (valid && heartbeatJSON.status === UP) {
|
||||
title = "## Uptime Kuma: " + monitorJSON.name + " up";
|
||||
} else if (valid && heartbeatJSON.status === DOWN) {
|
||||
title = "## Uptime Kuma: " + monitorJSON.name + " down";
|
||||
} else {
|
||||
title = "## Uptime Kuma Message";
|
||||
}
|
||||
|
||||
let data = {
|
||||
"pushkey": notification.pushdeerKey,
|
||||
"text": title,
|
||||
"desp": msg.replace(/\n/g, "\n\n"),
|
||||
"type": "markdown",
|
||||
};
|
||||
|
||||
try {
|
||||
let res = await axios.post(pushdeerlink, data);
|
||||
|
||||
if ("error" in res.data) {
|
||||
let error = res.data.error;
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
if (res.data.content.result.length === 0) {
|
||||
let error = "Invalid PushDeer key";
|
||||
this.throwGeneralAxiosError(error);
|
||||
} else if (JSON.parse(res.data.content.result[0]).success !== "ok") {
|
||||
let error = "Unknown error";
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = PushDeer;
|
|
@ -9,36 +9,31 @@ class Pushover extends NotificationProvider {
|
|||
let okMsg = "Sent Successfully.";
|
||||
let pushoverlink = "https://api.pushover.net/1/messages.json";
|
||||
|
||||
let data = {
|
||||
"message": "<b>Uptime Kuma Alert</b>\n\n<b>Message</b>:" + msg,
|
||||
"user": notification.pushoveruserkey,
|
||||
"token": notification.pushoverapptoken,
|
||||
"sound": notification.pushoversounds,
|
||||
"priority": notification.pushoverpriority,
|
||||
"title": notification.pushovertitle,
|
||||
"retry": "30",
|
||||
"expire": "3600",
|
||||
"html": 1,
|
||||
};
|
||||
|
||||
if (notification.pushoverdevice) {
|
||||
data.device = notification.pushoverdevice;
|
||||
}
|
||||
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
let data = {
|
||||
"message": msg,
|
||||
"user": notification.pushoveruserkey,
|
||||
"token": notification.pushoverapptoken,
|
||||
"sound": notification.pushoversounds,
|
||||
"priority": notification.pushoverpriority,
|
||||
"title": notification.pushovertitle,
|
||||
"retry": "30",
|
||||
"expire": "3600",
|
||||
"html": 1,
|
||||
};
|
||||
await axios.post(pushoverlink, data);
|
||||
return okMsg;
|
||||
} else {
|
||||
data.message += "\n<b>Time (UTC)</b>:" + heartbeatJSON["time"];
|
||||
await axios.post(pushoverlink, data);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
let data = {
|
||||
"message": "<b>Uptime Kuma Alert</b>\n\n<b>Message</b>:" + msg + "\n<b>Time (UTC)</b>:" + heartbeatJSON["time"],
|
||||
"user": notification.pushoveruserkey,
|
||||
"token": notification.pushoverapptoken,
|
||||
"sound": notification.pushoversounds,
|
||||
"priority": notification.pushoverpriority,
|
||||
"title": notification.pushovertitle,
|
||||
"retry": "30",
|
||||
"expire": "3600",
|
||||
"html": 1,
|
||||
};
|
||||
await axios.post(pushoverlink, data);
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
|
|
@ -19,10 +19,10 @@ class Pushy extends NotificationProvider {
|
|||
"badge": 1,
|
||||
"sound": "ping.aiff"
|
||||
}
|
||||
})
|
||||
});
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ const NotificationProvider = require("./notification-provider");
|
|||
const axios = require("axios");
|
||||
const Slack = require("./slack");
|
||||
const { setting } = require("../util-server");
|
||||
const { getMonitorRelativeURL, UP, DOWN } = require("../../src/util");
|
||||
const { getMonitorRelativeURL, DOWN } = require("../../src/util");
|
||||
|
||||
class RocketChat extends NotificationProvider {
|
||||
|
||||
|
|
|
@ -16,10 +16,10 @@ class Signal extends NotificationProvider {
|
|||
};
|
||||
let config = {};
|
||||
|
||||
await axios.post(notification.signalURL, data, config)
|
||||
await axios.post(notification.signalURL, data, config);
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const nodemailer = require("nodemailer");
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
const { DOWN } = require("../../src/util");
|
||||
|
||||
class SMTP extends NotificationProvider {
|
||||
|
||||
|
|
23
server/notification-providers/techulus-push.js
Normal file
23
server/notification-providers/techulus-push.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class TechulusPush extends NotificationProvider {
|
||||
|
||||
name = "PushByTechulus";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
await axios.post(`https://push.techulus.com/api/v1/notify/${notification.pushAPIKey}`, {
|
||||
"title": "Uptime-Kuma",
|
||||
"body": msg,
|
||||
});
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TechulusPush;
|
|
@ -14,12 +14,12 @@ class Telegram extends NotificationProvider {
|
|||
chat_id: notification.telegramChatID,
|
||||
text: msg,
|
||||
},
|
||||
})
|
||||
});
|
||||
return okMsg;
|
||||
|
||||
} catch (error) {
|
||||
let msg = (error.response.data.description) ? error.response.data.description : "Error without description"
|
||||
throw new Error(msg)
|
||||
let msg = (error.response.data.description) ? error.response.data.description : "Error without description";
|
||||
throw new Error(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,17 +24,17 @@ class Webhook extends NotificationProvider {
|
|||
|
||||
config = {
|
||||
headers: finalData.getHeaders(),
|
||||
}
|
||||
};
|
||||
|
||||
} else {
|
||||
finalData = data;
|
||||
}
|
||||
|
||||
await axios.post(notification.webhookURL, finalData, config)
|
||||
await axios.post(notification.webhookURL, finalData, config);
|
||||
return okMsg;
|
||||
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -32,10 +32,10 @@ class WeCom extends NotificationProvider {
|
|||
*/
|
||||
composeMessage(heartbeatJSON, msg) {
|
||||
let title;
|
||||
if (msg != null && heartbeatJSON != null && heartbeatJSON['status'] == UP) {
|
||||
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] === UP) {
|
||||
title = "UptimeKuma Monitor Up";
|
||||
}
|
||||
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] == DOWN) {
|
||||
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] === DOWN) {
|
||||
title = "UptimeKuma Monitor Down";
|
||||
}
|
||||
if (msg != null) {
|
||||
|
|
|
@ -12,6 +12,7 @@ const ClickSendSMS = require("./notification-providers/clicksendsms");
|
|||
const Pushbullet = require("./notification-providers/pushbullet");
|
||||
const Pushover = require("./notification-providers/pushover");
|
||||
const Pushy = require("./notification-providers/pushy");
|
||||
const TechulusPush = require("./notification-providers/techulus-push");
|
||||
const RocketChat = require("./notification-providers/rocket-chat");
|
||||
const Signal = require("./notification-providers/signal");
|
||||
const Slack = require("./notification-providers/slack");
|
||||
|
@ -23,10 +24,15 @@ const Feishu = require("./notification-providers/feishu");
|
|||
const AliyunSms = require("./notification-providers/aliyun-sms");
|
||||
const DingDing = require("./notification-providers/dingding");
|
||||
const Bark = require("./notification-providers/bark");
|
||||
const { log } = require("../src/util");
|
||||
const SerwerSMS = require("./notification-providers/serwersms");
|
||||
const Stackfield = require("./notification-providers/stackfield");
|
||||
const WeCom = require("./notification-providers/wecom");
|
||||
const GoogleChat = require("./notification-providers/google-chat");
|
||||
const Gorush = require("./notification-providers/gorush");
|
||||
const Alerta = require("./notification-providers/alerta");
|
||||
const OneBot = require("./notification-providers/onebot");
|
||||
const PushDeer = require("./notification-providers/pushdeer");
|
||||
|
||||
class Notification {
|
||||
|
||||
|
@ -34,7 +40,7 @@ class Notification {
|
|||
|
||||
/** Initialize the notification providers */
|
||||
static init() {
|
||||
console.log("Prepare Notification Providers");
|
||||
log.info("notification", "Prepare Notification Providers");
|
||||
|
||||
this.providerList = {};
|
||||
|
||||
|
@ -56,6 +62,7 @@ class Notification {
|
|||
new Pushbullet(),
|
||||
new Pushover(),
|
||||
new Pushy(),
|
||||
new TechulusPush(),
|
||||
new RocketChat(),
|
||||
new Signal(),
|
||||
new Slack(),
|
||||
|
@ -66,7 +73,11 @@ class Notification {
|
|||
new SerwerSMS(),
|
||||
new Stackfield(),
|
||||
new WeCom(),
|
||||
new GoogleChat()
|
||||
new GoogleChat(),
|
||||
new Gorush(),
|
||||
new Alerta(),
|
||||
new OneBot(),
|
||||
new PushDeer(),
|
||||
];
|
||||
|
||||
for (let item of list) {
|
||||
|
|
|
@ -31,7 +31,7 @@ exports.verify = function (password, hash) {
|
|||
* @returns {boolean}
|
||||
*/
|
||||
function isSHA1(hash) {
|
||||
return (typeof hash === "string" && hash.startsWith("sha1"))
|
||||
return (typeof hash === "string" && hash.startsWith("sha1"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -11,7 +11,8 @@ module.exports = Ping;
|
|||
/**
|
||||
* Constructor for ping class
|
||||
* @param {string} host Host to ping
|
||||
* @param {string} options Command line options for ping
|
||||
* @param {object} [options] Options for the ping command
|
||||
* @param {array|string} [options.args] - Arguments to pass to the ping command
|
||||
*/
|
||||
function Ping(host, options) {
|
||||
if (!host) {
|
||||
|
@ -139,6 +140,11 @@ Ping.prototype.send = function (callback) {
|
|||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @param {Function} callback
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
function onEnd() {
|
||||
let stdout = this.stdout._stdout;
|
||||
let stderr = this.stderr._stderr;
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
const PrometheusClient = require("prom-client");
|
||||
const { log } = require("../src/util");
|
||||
|
||||
const commonLabels = [
|
||||
"monitor_name",
|
||||
|
@ -8,24 +9,24 @@ const commonLabels = [
|
|||
"monitor_port",
|
||||
];
|
||||
|
||||
const monitor_cert_days_remaining = new PrometheusClient.Gauge({
|
||||
const monitorCertDaysRemaining = new PrometheusClient.Gauge({
|
||||
name: "monitor_cert_days_remaining",
|
||||
help: "The number of days remaining until the certificate expires",
|
||||
labelNames: commonLabels
|
||||
});
|
||||
|
||||
const monitor_cert_is_valid = new PrometheusClient.Gauge({
|
||||
const monitorCertIsValid = new PrometheusClient.Gauge({
|
||||
name: "monitor_cert_is_valid",
|
||||
help: "Is the certificate still valid? (1 = Yes, 0= No)",
|
||||
labelNames: commonLabels
|
||||
});
|
||||
const monitor_response_time = new PrometheusClient.Gauge({
|
||||
const monitorResponseTime = new PrometheusClient.Gauge({
|
||||
name: "monitor_response_time",
|
||||
help: "Monitor Response Time (ms)",
|
||||
labelNames: commonLabels
|
||||
});
|
||||
|
||||
const monitor_status = new PrometheusClient.Gauge({
|
||||
const monitorStatus = new PrometheusClient.Gauge({
|
||||
name: "monitor_status",
|
||||
help: "Monitor Status (1 = UP, 0= DOWN)",
|
||||
labelNames: commonLabels
|
||||
|
@ -56,44 +57,58 @@ class Prometheus {
|
|||
|
||||
if (typeof tlsInfo !== "undefined") {
|
||||
try {
|
||||
let is_valid = 0;
|
||||
if (tlsInfo.valid == true) {
|
||||
is_valid = 1;
|
||||
let isValid;
|
||||
if (tlsInfo.valid === true) {
|
||||
isValid = 1;
|
||||
} else {
|
||||
is_valid = 0;
|
||||
isValid = 0;
|
||||
}
|
||||
monitor_cert_is_valid.set(this.monitorLabelValues, is_valid);
|
||||
monitorCertIsValid.set(this.monitorLabelValues, isValid);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
log.error("prometheus", "Caught error");
|
||||
log.error("prometheus", e);
|
||||
}
|
||||
|
||||
try {
|
||||
if (tlsInfo.certInfo != null) {
|
||||
monitor_cert_days_remaining.set(this.monitorLabelValues, tlsInfo.certInfo.daysRemaining);
|
||||
monitorCertDaysRemaining.set(this.monitorLabelValues, tlsInfo.certInfo.daysRemaining);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
log.error("prometheus", "Caught error");
|
||||
log.error("prometheus", e);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
monitor_status.set(this.monitorLabelValues, heartbeat.status);
|
||||
monitorStatus.set(this.monitorLabelValues, heartbeat.status);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
log.error("prometheus", "Caught error");
|
||||
log.error("prometheus", e);
|
||||
}
|
||||
|
||||
try {
|
||||
if (typeof heartbeat.ping === "number") {
|
||||
monitor_response_time.set(this.monitorLabelValues, heartbeat.ping);
|
||||
monitorResponseTime.set(this.monitorLabelValues, heartbeat.ping);
|
||||
} else {
|
||||
// Is it good?
|
||||
monitor_response_time.set(this.monitorLabelValues, -1);
|
||||
monitorResponseTime.set(this.monitorLabelValues, -1);
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("prometheus", "Caught error");
|
||||
log.error("prometheus", e);
|
||||
}
|
||||
}
|
||||
|
||||
remove() {
|
||||
try {
|
||||
monitorCertDaysRemaining.remove(this.monitorLabelValues);
|
||||
monitorCertIsValid.remove(this.monitorLabelValues);
|
||||
monitorResponseTime.remove(this.monitorLabelValues);
|
||||
monitorStatus.remove(this.monitorLabelValues);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
189
server/proxy.js
Normal file
189
server/proxy.js
Normal file
|
@ -0,0 +1,189 @@
|
|||
const { R } = require("redbean-node");
|
||||
const HttpProxyAgent = require("http-proxy-agent");
|
||||
const HttpsProxyAgent = require("https-proxy-agent");
|
||||
const SocksProxyAgent = require("socks-proxy-agent");
|
||||
const { debug } = require("../src/util");
|
||||
const { UptimeKumaServer } = require("./uptime-kuma-server");
|
||||
|
||||
class Proxy {
|
||||
|
||||
static SUPPORTED_PROXY_PROTOCOLS = [ "http", "https", "socks", "socks5", "socks4" ]
|
||||
|
||||
/**
|
||||
* Saves and updates given proxy entity
|
||||
*
|
||||
* @param proxy
|
||||
* @param proxyID
|
||||
* @param userID
|
||||
* @return {Promise<Bean>}
|
||||
*/
|
||||
static async save(proxy, proxyID, userID) {
|
||||
let bean;
|
||||
|
||||
if (proxyID) {
|
||||
bean = await R.findOne("proxy", " id = ? AND user_id = ? ", [ proxyID, userID ]);
|
||||
|
||||
if (!bean) {
|
||||
throw new Error("proxy not found");
|
||||
}
|
||||
|
||||
} else {
|
||||
bean = R.dispense("proxy");
|
||||
}
|
||||
|
||||
// Make sure given proxy protocol is supported
|
||||
if (!this.SUPPORTED_PROXY_PROTOCOLS.includes(proxy.protocol)) {
|
||||
throw new Error(`
|
||||
Unsupported proxy protocol "${proxy.protocol}.
|
||||
Supported protocols are ${this.SUPPORTED_PROXY_PROTOCOLS.join(", ")}."`
|
||||
);
|
||||
}
|
||||
|
||||
// When proxy is default update deactivate old default proxy
|
||||
if (proxy.default) {
|
||||
await R.exec("UPDATE proxy SET `default` = 0 WHERE `default` = 1");
|
||||
}
|
||||
|
||||
bean.user_id = userID;
|
||||
bean.protocol = proxy.protocol;
|
||||
bean.host = proxy.host;
|
||||
bean.port = proxy.port;
|
||||
bean.auth = proxy.auth;
|
||||
bean.username = proxy.username;
|
||||
bean.password = proxy.password;
|
||||
bean.active = proxy.active || true;
|
||||
bean.default = proxy.default || false;
|
||||
|
||||
await R.store(bean);
|
||||
|
||||
if (proxy.applyExisting) {
|
||||
await applyProxyEveryMonitor(bean.id, userID);
|
||||
}
|
||||
|
||||
return bean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes proxy with given id and removes it from monitors
|
||||
*
|
||||
* @param proxyID
|
||||
* @param userID
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
static async delete(proxyID, userID) {
|
||||
const bean = await R.findOne("proxy", " id = ? AND user_id = ? ", [ proxyID, userID ]);
|
||||
|
||||
if (!bean) {
|
||||
throw new Error("proxy not found");
|
||||
}
|
||||
|
||||
// Delete removed proxy from monitors if exists
|
||||
await R.exec("UPDATE monitor SET proxy_id = null WHERE proxy_id = ?", [ proxyID ]);
|
||||
|
||||
// Delete proxy from list
|
||||
await R.trash(bean);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create HTTP and HTTPS agents related with given proxy bean object
|
||||
*
|
||||
* @param proxy proxy bean object
|
||||
* @param options http and https agent options
|
||||
* @return {{httpAgent: Agent, httpsAgent: Agent}}
|
||||
*/
|
||||
static createAgents(proxy, options) {
|
||||
const { httpAgentOptions, httpsAgentOptions } = options || {};
|
||||
let agent;
|
||||
let httpAgent;
|
||||
let httpsAgent;
|
||||
|
||||
const proxyOptions = {
|
||||
protocol: proxy.protocol,
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
};
|
||||
|
||||
if (proxy.auth) {
|
||||
proxyOptions.auth = `${proxy.username}:${proxy.password}`;
|
||||
}
|
||||
|
||||
debug(`Proxy Options: ${JSON.stringify(proxyOptions)}`);
|
||||
debug(`HTTP Agent Options: ${JSON.stringify(httpAgentOptions)}`);
|
||||
debug(`HTTPS Agent Options: ${JSON.stringify(httpsAgentOptions)}`);
|
||||
|
||||
switch (proxy.protocol) {
|
||||
case "http":
|
||||
case "https":
|
||||
httpAgent = new HttpProxyAgent({
|
||||
...httpAgentOptions || {},
|
||||
...proxyOptions
|
||||
});
|
||||
|
||||
httpsAgent = new HttpsProxyAgent({
|
||||
...httpsAgentOptions || {},
|
||||
...proxyOptions,
|
||||
});
|
||||
break;
|
||||
case "socks":
|
||||
case "socks5":
|
||||
case "socks4":
|
||||
agent = new SocksProxyAgent({
|
||||
...httpAgentOptions,
|
||||
...httpsAgentOptions,
|
||||
...proxyOptions,
|
||||
});
|
||||
|
||||
httpAgent = agent;
|
||||
httpsAgent = agent;
|
||||
break;
|
||||
|
||||
default: throw new Error(`Unsupported proxy protocol provided. ${proxy.protocol}`);
|
||||
}
|
||||
|
||||
return {
|
||||
httpAgent,
|
||||
httpsAgent
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Reload proxy settings for current monitors
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async reloadProxy() {
|
||||
const server = UptimeKumaServer.getInstance();
|
||||
|
||||
let updatedList = await R.getAssoc("SELECT id, proxy_id FROM monitor");
|
||||
|
||||
for (let monitorID in server.monitorList) {
|
||||
let monitor = server.monitorList[monitorID];
|
||||
|
||||
if (updatedList[monitorID]) {
|
||||
monitor.proxy_id = updatedList[monitorID].proxy_id;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies given proxy id to monitors
|
||||
*
|
||||
* @param proxyID
|
||||
* @param userID
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function applyProxyEveryMonitor(proxyID, userID) {
|
||||
// Find all monitors with id and proxy id
|
||||
const monitors = await R.getAll("SELECT id, proxy_id FROM monitor WHERE user_id = ?", [ userID ]);
|
||||
|
||||
// Update proxy id not match with given proxy id
|
||||
for (const monitor of monitors) {
|
||||
if (monitor.proxy_id !== proxyID) {
|
||||
await R.exec("UPDATE monitor SET proxy_id = ? WHERE id = ?", [ proxyID, monitor.id ]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Proxy,
|
||||
};
|
|
@ -1,5 +1,5 @@
|
|||
const { RateLimiter } = require("limiter");
|
||||
const { debug } = require("../src/util");
|
||||
const { log } = require("../src/util");
|
||||
|
||||
class KumaRateLimiter {
|
||||
/**
|
||||
|
@ -24,7 +24,7 @@ class KumaRateLimiter {
|
|||
*/
|
||||
async pass(callback, num = 1) {
|
||||
const remainingRequests = await this.removeTokens(num);
|
||||
debug("Rate Limit (remainingRequests):" + remainingRequests);
|
||||
log.info("rate-limit", "remaining requests: " + remainingRequests);
|
||||
if (remainingRequests < 0) {
|
||||
if (callback) {
|
||||
callback({
|
||||
|
@ -54,6 +54,14 @@ const loginRateLimiter = new KumaRateLimiter({
|
|||
errorMessage: "Too frequently, try again later."
|
||||
});
|
||||
|
||||
const twoFaRateLimiter = new KumaRateLimiter({
|
||||
tokensPerInterval: 30,
|
||||
interval: "minute",
|
||||
fireImmediately: true,
|
||||
errorMessage: "Too frequently, try again later."
|
||||
});
|
||||
|
||||
module.exports = {
|
||||
loginRateLimiter
|
||||
loginRateLimiter,
|
||||
twoFaRateLimiter,
|
||||
};
|
||||
|
|
|
@ -1,19 +1,31 @@
|
|||
let express = require("express");
|
||||
const { allowDevAllOrigin, getSettings, setting } = require("../util-server");
|
||||
const { allowDevAllOrigin } = require("../util-server");
|
||||
const { R } = require("redbean-node");
|
||||
const server = require("../server");
|
||||
const apicache = require("../modules/apicache");
|
||||
const Monitor = require("../model/monitor");
|
||||
const dayjs = require("dayjs");
|
||||
const { UP, flipStatus, debug } = require("../../src/util");
|
||||
const { UP, flipStatus, log } = require("../../src/util");
|
||||
const StatusPage = require("../model/status_page");
|
||||
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
||||
let router = express.Router();
|
||||
|
||||
let cache = apicache.middleware;
|
||||
const server = UptimeKumaServer.getInstance();
|
||||
let io = server.io;
|
||||
|
||||
router.get("/api/entry-page", async (_, response) => {
|
||||
router.get("/api/entry-page", async (request, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
response.json(server.entryPage);
|
||||
|
||||
let result = { };
|
||||
|
||||
if (request.hostname in StatusPage.domainMappingList) {
|
||||
result.type = "statusPageMatchedDomain";
|
||||
result.statusPageSlug = StatusPage.domainMappingList[request.hostname];
|
||||
} else {
|
||||
result.type = "entryPage";
|
||||
result.entryPage = server.entryPage;
|
||||
}
|
||||
response.json(result);
|
||||
});
|
||||
|
||||
router.get("/api/push/:pushToken", async (request, response) => {
|
||||
|
@ -51,8 +63,8 @@ router.get("/api/push/:pushToken", async (request, response) => {
|
|||
duration = dayjs(bean.time).diff(dayjs(previousHeartbeat.time), "second");
|
||||
}
|
||||
|
||||
debug("PreviousStatus: " + previousStatus);
|
||||
debug("Current Status: " + status);
|
||||
log.debug("router", "PreviousStatus: " + previousStatus);
|
||||
log.debug("router", "Current Status: " + status);
|
||||
|
||||
bean.important = Monitor.isImportantBeat(isFirstBeat, previousStatus, status);
|
||||
bean.monitor_id = monitor.id;
|
||||
|
@ -82,110 +94,80 @@ router.get("/api/push/:pushToken", async (request, response) => {
|
|||
}
|
||||
});
|
||||
|
||||
// Status Page Config
|
||||
router.get("/api/status-page/config", async (_request, response) => {
|
||||
// Status page config, incident, monitor list
|
||||
router.get("/api/status-page/:slug", cache("5 minutes"), async (request, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
let slug = request.params.slug;
|
||||
|
||||
let config = await getSettings("statusPage");
|
||||
// Get Status Page
|
||||
let statusPage = await R.findOne("status_page", " slug = ? ", [
|
||||
slug
|
||||
]);
|
||||
|
||||
if (! config.statusPageTheme) {
|
||||
config.statusPageTheme = "light";
|
||||
if (!statusPage) {
|
||||
response.statusCode = 404;
|
||||
response.json({
|
||||
msg: "Not Found"
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (! config.statusPagePublished) {
|
||||
config.statusPagePublished = true;
|
||||
}
|
||||
|
||||
if (! config.statusPageTags) {
|
||||
config.statusPageTags = false;
|
||||
}
|
||||
|
||||
if (! config.title) {
|
||||
config.title = "Uptime Kuma";
|
||||
}
|
||||
|
||||
response.json(config);
|
||||
});
|
||||
|
||||
// Status Page - Get the current Incident
|
||||
// Can fetch only if published
|
||||
router.get("/api/status-page/incident", async (_, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
|
||||
try {
|
||||
await checkPublished();
|
||||
|
||||
let incident = await R.findOne("incident", " pin = 1 AND active = 1");
|
||||
// Incident
|
||||
let incident = await R.findOne("incident", " pin = 1 AND active = 1 AND status_page_id = ? ", [
|
||||
statusPage.id,
|
||||
]);
|
||||
|
||||
if (incident) {
|
||||
incident = incident.toPublicJSON();
|
||||
}
|
||||
|
||||
// Public Group List
|
||||
const publicGroupList = [];
|
||||
const showTags = !!statusPage.show_tags;
|
||||
|
||||
const list = await R.find("group", " public = 1 AND status_page_id = ? ORDER BY weight ", [
|
||||
statusPage.id
|
||||
]);
|
||||
|
||||
for (let groupBean of list) {
|
||||
let monitorGroup = await groupBean.toPublicJSON(showTags);
|
||||
publicGroupList.push(monitorGroup);
|
||||
}
|
||||
|
||||
// Response
|
||||
response.json({
|
||||
ok: true,
|
||||
config: await statusPage.toPublicJSON(),
|
||||
incident,
|
||||
publicGroupList
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
send403(response, error.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Status Page - Monitor List
|
||||
// Can fetch only if published
|
||||
router.get("/api/status-page/monitor-list", cache("5 minutes"), async (_request, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
|
||||
try {
|
||||
await checkPublished();
|
||||
const publicGroupList = [];
|
||||
const tagsVisible = (await getSettings("statusPage")).statusPageTags;
|
||||
const list = await R.find("group", " public = 1 ORDER BY weight ");
|
||||
for (let groupBean of list) {
|
||||
let monitorGroup = await groupBean.toPublicJSON();
|
||||
if (tagsVisible) {
|
||||
monitorGroup.monitorList = await Promise.all(monitorGroup.monitorList.map(async (monitor) => {
|
||||
// Includes tags as an array in response, allows for tags to be displayed on public status page
|
||||
const tags = await R.getAll(
|
||||
`SELECT monitor_tag.monitor_id, monitor_tag.value, tag.name, tag.color
|
||||
FROM monitor_tag
|
||||
JOIN tag
|
||||
ON monitor_tag.tag_id = tag.id
|
||||
WHERE monitor_tag.monitor_id = ?`, [monitor.id]
|
||||
);
|
||||
return {
|
||||
...monitor,
|
||||
tags: tags
|
||||
};
|
||||
}));
|
||||
}
|
||||
|
||||
publicGroupList.push(monitorGroup);
|
||||
}
|
||||
|
||||
response.json(publicGroupList);
|
||||
|
||||
} catch (error) {
|
||||
send403(response, error.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Status Page Polling Data
|
||||
// Can fetch only if published
|
||||
router.get("/api/status-page/heartbeat", cache("5 minutes"), async (_request, response) => {
|
||||
router.get("/api/status-page/heartbeat/:slug", cache("1 minutes"), async (request, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
|
||||
try {
|
||||
await checkPublished();
|
||||
|
||||
let heartbeatList = {};
|
||||
let uptimeList = {};
|
||||
|
||||
let slug = request.params.slug;
|
||||
let statusPageID = await StatusPage.slugToID(slug);
|
||||
|
||||
let monitorIDList = await R.getCol(`
|
||||
SELECT monitor_group.monitor_id FROM monitor_group, \`group\`
|
||||
WHERE monitor_group.group_id = \`group\`.id
|
||||
AND public = 1
|
||||
`);
|
||||
AND \`group\`.status_page_id = ?
|
||||
`, [
|
||||
statusPageID
|
||||
]);
|
||||
|
||||
for (let monitorID of monitorIDList) {
|
||||
let list = await R.getAll(`
|
||||
|
@ -214,24 +196,6 @@ router.get("/api/status-page/heartbeat", cache("5 minutes"), async (_request, re
|
|||
}
|
||||
});
|
||||
|
||||
async function checkPublished() {
|
||||
if (! await isPublished()) {
|
||||
throw new Error("The status page is not published");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Default is published
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async function isPublished() {
|
||||
const value = await setting("statusPagePublished");
|
||||
if (value === null) {
|
||||
return true;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function send403(res, msg = "") {
|
||||
res.status(403).json({
|
||||
"status": "fail",
|
||||
|
|
500
server/server.js
500
server/server.js
File diff suppressed because it is too large
Load diff
93
server/socket-handlers/cloudflared-socket-handler.js
Normal file
93
server/socket-handlers/cloudflared-socket-handler.js
Normal file
|
@ -0,0 +1,93 @@
|
|||
const { checkLogin, setSetting, setting, doubleCheckPassword } = require("../util-server");
|
||||
const { CloudflaredTunnel } = require("node-cloudflared-tunnel");
|
||||
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
||||
const io = UptimeKumaServer.getInstance().io;
|
||||
|
||||
const prefix = "cloudflared_";
|
||||
const cloudflared = new CloudflaredTunnel();
|
||||
|
||||
cloudflared.change = (running, message) => {
|
||||
io.to("cloudflared").emit(prefix + "running", running);
|
||||
io.to("cloudflared").emit(prefix + "message", message);
|
||||
};
|
||||
|
||||
cloudflared.error = (errorMessage) => {
|
||||
io.to("cloudflared").emit(prefix + "errorMessage", errorMessage);
|
||||
};
|
||||
|
||||
module.exports.cloudflaredSocketHandler = (socket) => {
|
||||
|
||||
socket.on(prefix + "join", async () => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
socket.join("cloudflared");
|
||||
io.to(socket.userID).emit(prefix + "installed", cloudflared.checkInstalled());
|
||||
io.to(socket.userID).emit(prefix + "running", cloudflared.running);
|
||||
io.to(socket.userID).emit(prefix + "token", await setting("cloudflaredTunnelToken"));
|
||||
} catch (error) { }
|
||||
});
|
||||
|
||||
socket.on(prefix + "leave", async () => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
socket.leave("cloudflared");
|
||||
} catch (error) { }
|
||||
});
|
||||
|
||||
socket.on(prefix + "start", async (token) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
if (token && typeof token === "string") {
|
||||
await setSetting("cloudflaredTunnelToken", token);
|
||||
cloudflared.token = token;
|
||||
} else {
|
||||
cloudflared.token = null;
|
||||
}
|
||||
cloudflared.start();
|
||||
} catch (error) { }
|
||||
});
|
||||
|
||||
socket.on(prefix + "stop", async (currentPassword, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
await doubleCheckPassword(socket, currentPassword);
|
||||
cloudflared.stop();
|
||||
} catch (error) {
|
||||
callback({
|
||||
ok: false,
|
||||
msg: error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
socket.on(prefix + "removeToken", async () => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
await setSetting("cloudflaredTunnelToken", "");
|
||||
} catch (error) { }
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
module.exports.autoStart = async (token) => {
|
||||
if (!token) {
|
||||
token = await setting("cloudflaredTunnelToken");
|
||||
} else {
|
||||
// Override the current token via args or env var
|
||||
await setSetting("cloudflaredTunnelToken", token);
|
||||
console.log("Use cloudflared token from args or env var");
|
||||
}
|
||||
|
||||
if (token) {
|
||||
console.log("Start cloudflared");
|
||||
cloudflared.token = token;
|
||||
cloudflared.start();
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.stop = async () => {
|
||||
console.log("Stop cloudflared");
|
||||
if (cloudflared) {
|
||||
cloudflared.stop();
|
||||
}
|
||||
};
|
54
server/socket-handlers/proxy-socket-handler.js
Normal file
54
server/socket-handlers/proxy-socket-handler.js
Normal file
|
@ -0,0 +1,54 @@
|
|||
const { checkLogin } = require("../util-server");
|
||||
const { Proxy } = require("../proxy");
|
||||
const { sendProxyList } = require("../client");
|
||||
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
||||
const server = UptimeKumaServer.getInstance();
|
||||
|
||||
module.exports.proxySocketHandler = (socket) => {
|
||||
socket.on("addProxy", async (proxy, proxyID, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
|
||||
const proxyBean = await Proxy.save(proxy, proxyID, socket.userID);
|
||||
await sendProxyList(socket);
|
||||
|
||||
if (proxy.applyExisting) {
|
||||
await Proxy.reloadProxy();
|
||||
await server.sendMonitorList(socket);
|
||||
}
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
msg: "Saved",
|
||||
id: proxyBean.id,
|
||||
});
|
||||
|
||||
} catch (e) {
|
||||
callback({
|
||||
ok: false,
|
||||
msg: e.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("deleteProxy", async (proxyID, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
|
||||
await Proxy.delete(proxyID, socket.userID);
|
||||
await sendProxyList(socket);
|
||||
await Proxy.reloadProxy();
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
msg: "Deleted",
|
||||
});
|
||||
|
||||
} catch (e) {
|
||||
callback({
|
||||
ok: false,
|
||||
msg: e.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
|
@ -1,25 +1,36 @@
|
|||
const { R } = require("redbean-node");
|
||||
const { checkLogin, setSettings } = require("../util-server");
|
||||
const { checkLogin, setSetting } = require("../util-server");
|
||||
const dayjs = require("dayjs");
|
||||
const { debug } = require("../../src/util");
|
||||
const { log } = require("../../src/util");
|
||||
const ImageDataURI = require("../image-data-uri");
|
||||
const Database = require("../database");
|
||||
const apicache = require("../modules/apicache");
|
||||
const StatusPage = require("../model/status_page");
|
||||
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
||||
|
||||
module.exports.statusPageSocketHandler = (socket) => {
|
||||
|
||||
// Post or edit incident
|
||||
socket.on("postIncident", async (incident, callback) => {
|
||||
socket.on("postIncident", async (slug, incident, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
|
||||
await R.exec("UPDATE incident SET pin = 0 ");
|
||||
let statusPageID = await StatusPage.slugToID(slug);
|
||||
|
||||
if (!statusPageID) {
|
||||
throw new Error("slug is not found");
|
||||
}
|
||||
|
||||
await R.exec("UPDATE incident SET pin = 0 WHERE status_page_id = ? ", [
|
||||
statusPageID
|
||||
]);
|
||||
|
||||
let incidentBean;
|
||||
|
||||
if (incident.id) {
|
||||
incidentBean = await R.findOne("incident", " id = ?", [
|
||||
incident.id
|
||||
incidentBean = await R.findOne("incident", " id = ? AND status_page_id = ? ", [
|
||||
incident.id,
|
||||
statusPageID
|
||||
]);
|
||||
}
|
||||
|
||||
|
@ -31,6 +42,7 @@ module.exports.statusPageSocketHandler = (socket) => {
|
|||
incidentBean.content = incident.content;
|
||||
incidentBean.style = incident.style;
|
||||
incidentBean.pin = true;
|
||||
incidentBean.status_page_id = statusPageID;
|
||||
|
||||
if (incident.id) {
|
||||
incidentBean.lastUpdatedDate = R.isoDateTime(dayjs.utc());
|
||||
|
@ -52,11 +64,15 @@ module.exports.statusPageSocketHandler = (socket) => {
|
|||
}
|
||||
});
|
||||
|
||||
socket.on("unpinIncident", async (callback) => {
|
||||
socket.on("unpinIncident", async (slug, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
|
||||
await R.exec("UPDATE incident SET pin = 0 WHERE pin = 1");
|
||||
let statusPageID = await StatusPage.slugToID(slug);
|
||||
|
||||
await R.exec("UPDATE incident SET pin = 0 WHERE pin = 1 AND status_page_id = ? ", [
|
||||
statusPageID
|
||||
]);
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
|
@ -69,14 +85,46 @@ module.exports.statusPageSocketHandler = (socket) => {
|
|||
}
|
||||
});
|
||||
|
||||
// Save Status Page
|
||||
// imgDataUrl Only Accept PNG!
|
||||
socket.on("saveStatusPage", async (config, imgDataUrl, publicGroupList, callback) => {
|
||||
|
||||
socket.on("getStatusPage", async (slug, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
|
||||
apicache.clear();
|
||||
let statusPage = await R.findOne("status_page", " slug = ? ", [
|
||||
slug
|
||||
]);
|
||||
|
||||
if (!statusPage) {
|
||||
throw new Error("No slug?");
|
||||
}
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
config: await statusPage.toJSON(),
|
||||
});
|
||||
} catch (error) {
|
||||
callback({
|
||||
ok: false,
|
||||
msg: error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Save Status Page
|
||||
// imgDataUrl Only Accept PNG!
|
||||
socket.on("saveStatusPage", async (slug, config, imgDataUrl, publicGroupList, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
|
||||
// Save Config
|
||||
let statusPage = await R.findOne("status_page", " slug = ? ", [
|
||||
slug
|
||||
]);
|
||||
|
||||
if (!statusPage) {
|
||||
throw new Error("No slug?");
|
||||
}
|
||||
|
||||
checkSlug(config.slug);
|
||||
|
||||
const header = "data:image/png;base64,";
|
||||
|
||||
|
@ -88,16 +136,34 @@ module.exports.statusPageSocketHandler = (socket) => {
|
|||
throw new Error("Only allowed PNG logo.");
|
||||
}
|
||||
|
||||
const filename = `logo${statusPage.id}.png`;
|
||||
|
||||
// Convert to file
|
||||
await ImageDataURI.outputFile(imgDataUrl, Database.uploadDir + "logo.png");
|
||||
config.logo = "/upload/logo.png?t=" + Date.now();
|
||||
await ImageDataURI.outputFile(imgDataUrl, Database.uploadDir + filename);
|
||||
config.logo = `/upload/${filename}?t=` + Date.now();
|
||||
|
||||
} else {
|
||||
config.icon = imgDataUrl;
|
||||
}
|
||||
|
||||
// Save Config
|
||||
await setSettings("statusPage", config);
|
||||
statusPage.slug = config.slug;
|
||||
statusPage.title = config.title;
|
||||
statusPage.description = config.description;
|
||||
statusPage.icon = config.logo;
|
||||
statusPage.theme = config.theme;
|
||||
//statusPage.published = ;
|
||||
//statusPage.search_engine_index = ;
|
||||
statusPage.show_tags = config.showTags;
|
||||
//statusPage.password = null;
|
||||
statusPage.footer_text = config.footerText;
|
||||
statusPage.custom_css = config.customCSS;
|
||||
statusPage.show_powered_by = config.showPoweredBy;
|
||||
statusPage.modified_date = R.isoDateTime();
|
||||
|
||||
await R.store(statusPage);
|
||||
|
||||
await statusPage.updateDomainNameList(config.domainNameList);
|
||||
await StatusPage.loadDomainMappingList();
|
||||
|
||||
// Save Public Group List
|
||||
const groupIDList = [];
|
||||
|
@ -106,13 +172,15 @@ module.exports.statusPageSocketHandler = (socket) => {
|
|||
for (let group of publicGroupList) {
|
||||
let groupBean;
|
||||
if (group.id) {
|
||||
groupBean = await R.findOne("group", " id = ? AND public = 1 ", [
|
||||
group.id
|
||||
groupBean = await R.findOne("group", " id = ? AND public = 1 AND status_page_id = ? ", [
|
||||
group.id,
|
||||
statusPage.id
|
||||
]);
|
||||
} else {
|
||||
groupBean = R.dispense("group");
|
||||
}
|
||||
|
||||
groupBean.status_page_id = statusPage.id;
|
||||
groupBean.name = group.name;
|
||||
groupBean.public = true;
|
||||
groupBean.weight = groupOrder++;
|
||||
|
@ -124,7 +192,6 @@ module.exports.statusPageSocketHandler = (socket) => {
|
|||
]);
|
||||
|
||||
let monitorOrder = 1;
|
||||
console.log(group.monitorList);
|
||||
|
||||
for (let monitor of group.monitorList) {
|
||||
let relationBean = R.dispense("monitor_group");
|
||||
|
@ -138,10 +205,25 @@ module.exports.statusPageSocketHandler = (socket) => {
|
|||
group.id = groupBean.id;
|
||||
}
|
||||
|
||||
// Delete groups that not in the list
|
||||
debug("Delete groups that not in the list");
|
||||
// Delete groups that are not in the list
|
||||
log.debug("socket", "Delete groups that are not in the list");
|
||||
const slots = groupIDList.map(() => "?").join(",");
|
||||
await R.exec(`DELETE FROM \`group\` WHERE id NOT IN (${slots})`, groupIDList);
|
||||
|
||||
const data = [
|
||||
...groupIDList,
|
||||
statusPage.id
|
||||
];
|
||||
await R.exec(`DELETE FROM \`group\` WHERE id NOT IN (${slots}) AND status_page_id = ?`, data);
|
||||
|
||||
const server = UptimeKumaServer.getInstance();
|
||||
|
||||
// Also change entry page to new slug if it is the default one, and slug is changed.
|
||||
if (server.entryPage === "statusPage-" + slug && statusPage.slug !== slug) {
|
||||
server.entryPage = "statusPage-" + statusPage.slug;
|
||||
await setSetting("entryPage", server.entryPage, "general");
|
||||
}
|
||||
|
||||
apicache.clear();
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
|
@ -149,7 +231,7 @@ module.exports.statusPageSocketHandler = (socket) => {
|
|||
});
|
||||
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
log.error("socket", error);
|
||||
|
||||
callback({
|
||||
ok: false,
|
||||
|
@ -158,4 +240,117 @@ module.exports.statusPageSocketHandler = (socket) => {
|
|||
}
|
||||
});
|
||||
|
||||
// Add a new status page
|
||||
socket.on("addStatusPage", async (title, slug, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
|
||||
title = title?.trim();
|
||||
slug = slug?.trim();
|
||||
|
||||
// Check empty
|
||||
if (!title || !slug) {
|
||||
throw new Error("Please input all fields");
|
||||
}
|
||||
|
||||
// Make sure slug is string
|
||||
if (typeof slug !== "string") {
|
||||
throw new Error("Slug -Accept string only");
|
||||
}
|
||||
|
||||
// lower case only
|
||||
slug = slug.toLowerCase();
|
||||
|
||||
checkSlug(slug);
|
||||
|
||||
let statusPage = R.dispense("status_page");
|
||||
statusPage.slug = slug;
|
||||
statusPage.title = title;
|
||||
statusPage.theme = "light";
|
||||
statusPage.icon = "";
|
||||
await R.store(statusPage);
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
msg: "OK!"
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
callback({
|
||||
ok: false,
|
||||
msg: error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Delete a status page
|
||||
socket.on("deleteStatusPage", async (slug, callback) => {
|
||||
const server = UptimeKumaServer.getInstance();
|
||||
|
||||
try {
|
||||
checkLogin(socket);
|
||||
|
||||
let statusPageID = await StatusPage.slugToID(slug);
|
||||
|
||||
if (statusPageID) {
|
||||
|
||||
// Reset entry page if it is the default one.
|
||||
if (server.entryPage === "statusPage-" + slug) {
|
||||
server.entryPage = "dashboard";
|
||||
await setSetting("entryPage", server.entryPage, "general");
|
||||
}
|
||||
|
||||
// No need to delete records from `status_page_cname`, because it has cascade foreign key.
|
||||
// But for incident & group, it is hard to add cascade foreign key during migration, so they have to be deleted manually.
|
||||
|
||||
// Delete incident
|
||||
await R.exec("DELETE FROM incident WHERE status_page_id = ? ", [
|
||||
statusPageID
|
||||
]);
|
||||
|
||||
// Delete group
|
||||
await R.exec("DELETE FROM `group` WHERE status_page_id = ? ", [
|
||||
statusPageID
|
||||
]);
|
||||
|
||||
// Delete status_page
|
||||
await R.exec("DELETE FROM status_page WHERE id = ? ", [
|
||||
statusPageID
|
||||
]);
|
||||
|
||||
} else {
|
||||
throw new Error("Status Page is not found");
|
||||
}
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
});
|
||||
} catch (error) {
|
||||
callback({
|
||||
ok: false,
|
||||
msg: error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Check slug a-z, 0-9, - only
|
||||
* Regex from: https://stackoverflow.com/questions/22454258/js-regex-string-validation-for-slug
|
||||
*/
|
||||
function checkSlug(slug) {
|
||||
if (typeof slug !== "string") {
|
||||
throw new Error("Slug must be string");
|
||||
}
|
||||
|
||||
slug = slug.trim();
|
||||
|
||||
if (!slug) {
|
||||
throw new Error("Slug cannot be empty");
|
||||
}
|
||||
|
||||
if (!slug.match(/^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$/)) {
|
||||
throw new Error("Invalid Slug");
|
||||
}
|
||||
}
|
||||
|
|
90
server/uptime-kuma-server.js
Normal file
90
server/uptime-kuma-server.js
Normal file
|
@ -0,0 +1,90 @@
|
|||
const express = require("express");
|
||||
const https = require("https");
|
||||
const fs = require("fs");
|
||||
const http = require("http");
|
||||
const { Server } = require("socket.io");
|
||||
const { R } = require("redbean-node");
|
||||
const { log } = require("../src/util");
|
||||
|
||||
/**
|
||||
* `module.exports` (alias: `server`) should be inside this class, in order to avoid circular dependency issue.
|
||||
* @type {UptimeKumaServer}
|
||||
*/
|
||||
class UptimeKumaServer {
|
||||
|
||||
/**
|
||||
*
|
||||
* @type {UptimeKumaServer}
|
||||
*/
|
||||
static instance = null;
|
||||
|
||||
/**
|
||||
* Main monitor list
|
||||
* @type {{}}
|
||||
*/
|
||||
monitorList = {};
|
||||
entryPage = "dashboard";
|
||||
app = undefined;
|
||||
httpServer = undefined;
|
||||
io = undefined;
|
||||
|
||||
static getInstance(args) {
|
||||
if (UptimeKumaServer.instance == null) {
|
||||
UptimeKumaServer.instance = new UptimeKumaServer(args);
|
||||
}
|
||||
return UptimeKumaServer.instance;
|
||||
}
|
||||
|
||||
constructor(args) {
|
||||
// SSL
|
||||
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
|
||||
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
|
||||
|
||||
log.info("server", "Creating express and socket.io instance");
|
||||
this.app = express();
|
||||
|
||||
if (sslKey && sslCert) {
|
||||
log.info("server", "Server Type: HTTPS");
|
||||
this.httpServer = https.createServer({
|
||||
key: fs.readFileSync(sslKey),
|
||||
cert: fs.readFileSync(sslCert)
|
||||
}, this.app);
|
||||
} else {
|
||||
log.info("server", "Server Type: HTTP");
|
||||
this.httpServer = http.createServer(this.app);
|
||||
}
|
||||
|
||||
this.io = new Server(this.httpServer);
|
||||
}
|
||||
|
||||
async sendMonitorList(socket) {
|
||||
let list = await this.getMonitorJSONList(socket.userID);
|
||||
this.io.to(socket.userID).emit("monitorList", list);
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of monitors for the given user.
|
||||
* @param {string} userID - The ID of the user to get monitors for.
|
||||
* @returns {Promise<Object>} A promise that resolves to an object with monitor IDs as keys and monitor objects as values.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
async getMonitorJSONList(userID) {
|
||||
let result = {};
|
||||
|
||||
let monitorList = await R.find("monitor", " user_id = ? ORDER BY weight DESC, name", [
|
||||
userID,
|
||||
]);
|
||||
|
||||
for (let monitor of monitorList) {
|
||||
result[monitor.id] = await monitor.toJSON();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
UptimeKumaServer
|
||||
};
|
|
@ -1,15 +1,15 @@
|
|||
const tcpp = require("tcp-ping");
|
||||
const Ping = require("./ping-lite");
|
||||
const { R } = require("redbean-node");
|
||||
const { debug } = require("../src/util");
|
||||
const { log, genSecret } = require("../src/util");
|
||||
const passwordHash = require("./password-hash");
|
||||
const dayjs = require("dayjs");
|
||||
const { Resolver } = require("dns");
|
||||
const child_process = require("child_process");
|
||||
const childProcess = require("child_process");
|
||||
const iconv = require("iconv-lite");
|
||||
const chardet = require("chardet");
|
||||
const fs = require("fs");
|
||||
const nodeJsUtil = require("util");
|
||||
const mqtt = require("mqtt");
|
||||
|
||||
// From ping-lite
|
||||
exports.WIN = /^win/.test(process.platform);
|
||||
|
@ -27,12 +27,12 @@ exports.initJWTSecret = async () => {
|
|||
"jwtSecret",
|
||||
]);
|
||||
|
||||
if (! jwtSecretBean) {
|
||||
if (!jwtSecretBean) {
|
||||
jwtSecretBean = R.dispense("setting");
|
||||
jwtSecretBean.key = "jwtSecret";
|
||||
}
|
||||
|
||||
jwtSecretBean.value = passwordHash.generate(dayjs() + "");
|
||||
jwtSecretBean.value = passwordHash.generate(genSecret());
|
||||
await R.store(jwtSecretBean);
|
||||
return jwtSecretBean;
|
||||
};
|
||||
|
@ -106,19 +106,79 @@ exports.pingAsync = function (hostname, ipv6 = false) {
|
|||
});
|
||||
};
|
||||
|
||||
// `string[]`, `Object[]` and `Object`.
|
||||
/**
|
||||
* MQTT Monitor
|
||||
* TODO: Add docs for MQTT monitor
|
||||
*/
|
||||
exports.mqttAsync = function (hostname, topic, okMessage, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { port, username, password, interval = 20 } = options;
|
||||
|
||||
// Adds MQTT protocol to the hostname if not already present
|
||||
if (!/^(?:http|mqtt)s?:\/\//.test(hostname)) {
|
||||
hostname = "mqtt://" + hostname;
|
||||
}
|
||||
|
||||
const timeoutID = setTimeout(() => {
|
||||
log.debug("mqtt", "MQTT timeout triggered");
|
||||
client.end();
|
||||
reject(new Error("Timeout"));
|
||||
}, interval * 1000 * 0.8);
|
||||
|
||||
log.debug("mqtt", "MQTT connecting");
|
||||
|
||||
let client = mqtt.connect(hostname, {
|
||||
port,
|
||||
username,
|
||||
password
|
||||
});
|
||||
|
||||
client.on("connect", () => {
|
||||
log.debug("mqtt", "MQTT connected");
|
||||
|
||||
try {
|
||||
log.debug("mqtt", "MQTT subscribe topic");
|
||||
client.subscribe(topic);
|
||||
} catch (e) {
|
||||
client.end();
|
||||
clearTimeout(timeoutID);
|
||||
reject(new Error("Cannot subscribe topic"));
|
||||
}
|
||||
});
|
||||
|
||||
client.on("error", (error) => {
|
||||
client.end();
|
||||
clearTimeout(timeoutID);
|
||||
reject(error);
|
||||
});
|
||||
|
||||
client.on("message", (messageTopic, message) => {
|
||||
if (messageTopic == topic) {
|
||||
client.end();
|
||||
clearTimeout(timeoutID);
|
||||
if (okMessage != null && okMessage !== "" && message.toString() !== okMessage) {
|
||||
reject(new Error(`Message Mismatch - Topic: ${messageTopic}; Message: ${message.toString()}`));
|
||||
} else {
|
||||
resolve(`Topic: ${messageTopic}; Message: ${message.toString()}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Resolves a given record using the specified DNS server
|
||||
* @param {string} hostname The hostname of the record to lookup
|
||||
* @param {string} resolver_server The DNS server to use
|
||||
* @param {string} resolverServer The DNS server to use
|
||||
* @param {string} rrtype The type of record to request
|
||||
* @returns {Promise<(string[]|Object[]|Object)>}
|
||||
*/
|
||||
exports.dnsResolve = function (hostname, resolver_server, rrtype) {
|
||||
exports.dnsResolve = function (hostname, resolverServer, rrtype) {
|
||||
const resolver = new Resolver();
|
||||
resolver.setServers([resolver_server]);
|
||||
resolver.setServers([ resolverServer ]);
|
||||
return new Promise((resolve, reject) => {
|
||||
if (rrtype == "PTR") {
|
||||
if (rrtype === "PTR") {
|
||||
resolver.reverse(hostname, (err, records) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
|
@ -150,7 +210,7 @@ exports.setting = async function (key) {
|
|||
|
||||
try {
|
||||
const v = JSON.parse(value);
|
||||
debug(`Get Setting: ${key}: ${v}`);
|
||||
log.debug("util", `Get Setting: ${key}: ${v}`);
|
||||
return v;
|
||||
} catch (e) {
|
||||
return value;
|
||||
|
@ -269,7 +329,7 @@ const parseCertificateInfo = function (info) {
|
|||
const existingList = {};
|
||||
|
||||
while (link) {
|
||||
debug(`[${i}] ${link.fingerprint}`);
|
||||
log.debug("cert", `[${i}] ${link.fingerprint}`);
|
||||
|
||||
if (!link.valid_from || !link.valid_to) {
|
||||
break;
|
||||
|
@ -284,7 +344,7 @@ const parseCertificateInfo = function (info) {
|
|||
if (link.issuerCertificate == null) {
|
||||
break;
|
||||
} else if (link.issuerCertificate.fingerprint in existingList) {
|
||||
debug(`[Last] ${link.issuerCertificate.fingerprint}`);
|
||||
log.debug("cert", `[Last] ${link.issuerCertificate.fingerprint}`);
|
||||
link.issuerCertificate = null;
|
||||
break;
|
||||
} else {
|
||||
|
@ -310,7 +370,7 @@ exports.checkCertificate = function (res) {
|
|||
const info = res.request.res.socket.getPeerCertificate(true);
|
||||
const valid = res.request.res.socket.authorized || false;
|
||||
|
||||
debug("Parsing Certificate Info");
|
||||
log.debug("cert", "Parsing Certificate Info");
|
||||
const parsedInfo = parseCertificateInfo(info);
|
||||
|
||||
return {
|
||||
|
@ -322,23 +382,23 @@ exports.checkCertificate = function (res) {
|
|||
/**
|
||||
* Check if the provided status code is within the accepted ranges
|
||||
* @param {string} status The status code to check
|
||||
* @param {Array<string>} accepted_codes An array of accepted status codes
|
||||
* @param {Array<string>} acceptedCodes An array of accepted status codes
|
||||
* @returns {boolean} True if status code within range, false otherwise
|
||||
* @throws {Error} Will throw an error if the provided status code is not a valid range string or code string
|
||||
*/
|
||||
exports.checkStatusCode = function (status, accepted_codes) {
|
||||
if (accepted_codes == null || accepted_codes.length === 0) {
|
||||
exports.checkStatusCode = function (status, acceptedCodes) {
|
||||
if (acceptedCodes == null || acceptedCodes.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const code_range of accepted_codes) {
|
||||
const code_range_split = code_range.split("-").map(string => parseInt(string));
|
||||
if (code_range_split.length === 1) {
|
||||
if (status === code_range_split[0]) {
|
||||
for (const codeRange of acceptedCodes) {
|
||||
const codeRangeSplit = codeRange.split("-").map(string => parseInt(string));
|
||||
if (codeRangeSplit.length === 1) {
|
||||
if (status === codeRangeSplit[0]) {
|
||||
return true;
|
||||
}
|
||||
} else if (code_range_split.length === 2) {
|
||||
if (status >= code_range_split[0] && status <= code_range_split[1]) {
|
||||
} else if (codeRangeSplit.length === 2) {
|
||||
if (status >= codeRangeSplit[0] && status <= codeRangeSplit[1]) {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
|
@ -359,13 +419,13 @@ exports.getTotalClientInRoom = (io, roomName) => {
|
|||
|
||||
const sockets = io.sockets;
|
||||
|
||||
if (! sockets) {
|
||||
if (!sockets) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const adapter = sockets.adapter;
|
||||
|
||||
if (! adapter) {
|
||||
if (!adapter) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -402,16 +462,38 @@ exports.allowAllOrigin = (res) => {
|
|||
* @param {Socket} socket Socket instance
|
||||
*/
|
||||
exports.checkLogin = (socket) => {
|
||||
if (! socket.userID) {
|
||||
if (!socket.userID) {
|
||||
throw new Error("You are not logged in.");
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* For logged-in users, double-check the password
|
||||
* @param {Socket} socket Socket.io instance
|
||||
* @param {string} currentPassword
|
||||
* @returns {Promise<Bean>}
|
||||
*/
|
||||
exports.doubleCheckPassword = async (socket, currentPassword) => {
|
||||
if (typeof currentPassword !== "string") {
|
||||
throw new Error("Wrong data type?");
|
||||
}
|
||||
|
||||
let user = await R.findOne("user", " id = ? AND active = 1 ", [
|
||||
socket.userID,
|
||||
]);
|
||||
|
||||
if (!user || !passwordHash.verify(currentPassword, user.password)) {
|
||||
throw new Error("Incorrect current password");
|
||||
}
|
||||
|
||||
return user;
|
||||
};
|
||||
|
||||
/** Start Unit tests */
|
||||
exports.startUnitTest = async () => {
|
||||
console.log("Starting unit test...");
|
||||
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
|
||||
const child = child_process.spawn(npm, ["run", "jest"]);
|
||||
const child = childProcess.spawn(npm, [ "run", "jest" ]);
|
||||
|
||||
child.stdout.on("data", (data) => {
|
||||
console.log(data.toString());
|
||||
|
@ -434,7 +516,6 @@ exports.startUnitTest = async () => {
|
|||
*/
|
||||
exports.convertToUTF8 = (body) => {
|
||||
const guessEncoding = chardet.detect(body);
|
||||
//debug("Guess Encoding: " + guessEncoding);
|
||||
const str = iconv.decode(body, guessEncoding);
|
||||
return str.toString();
|
||||
};
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
<template>
|
||||
<router-view />
|
||||
<router-view />
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { setPageLocale } from "./util-frontend";
|
||||
export default {
|
||||
created() {
|
||||
setPageLocale();
|
||||
},
|
||||
created() {
|
||||
setPageLocale();
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
|
|
@ -22,6 +22,18 @@ textarea.form-control {
|
|||
width: 10px;
|
||||
}
|
||||
|
||||
.list-group {
|
||||
border-radius: 0.75rem;
|
||||
|
||||
.dark & {
|
||||
.list-group-item {
|
||||
background-color: $dark-bg;
|
||||
color: $dark-font-color;
|
||||
border-color: $dark-border-color;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: #ccc;
|
||||
border-radius: 20px;
|
||||
|
@ -92,6 +104,10 @@ textarea.form-control {
|
|||
}
|
||||
}
|
||||
|
||||
.btn-dark {
|
||||
background-color: #161B22;
|
||||
}
|
||||
|
||||
@media (max-width: 550px) {
|
||||
.table-shadow-box {
|
||||
padding: 10px !important;
|
||||
|
@ -144,6 +160,10 @@ textarea.form-control {
|
|||
background-color: #090c10;
|
||||
color: $dark-font-color;
|
||||
|
||||
mark, .mark {
|
||||
background-color: #b6ad86;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb, ::-webkit-scrollbar-thumb {
|
||||
background: $dark-border-color;
|
||||
}
|
||||
|
@ -156,13 +176,24 @@ textarea.form-control {
|
|||
|
||||
.form-check-input {
|
||||
background-color: $dark-bg2;
|
||||
border-color: $dark-border-color;
|
||||
}
|
||||
|
||||
.input-group-text {
|
||||
background-color: #282f39;
|
||||
border-color: $dark-border-color;
|
||||
color: $dark-font-color;
|
||||
}
|
||||
|
||||
.form-check-input:checked {
|
||||
border-color: $primary; // Re-apply bootstrap border
|
||||
}
|
||||
|
||||
.form-switch .form-check-input {
|
||||
background-color: #232f3b;
|
||||
}
|
||||
|
||||
a,
|
||||
a:not(.btn),
|
||||
.table,
|
||||
.nav-link {
|
||||
color: $dark-font-color;
|
||||
|
@ -329,11 +360,8 @@ textarea.form-control {
|
|||
|
||||
.monitor-list {
|
||||
&.scrollbar {
|
||||
min-height: calc(100vh - 240px);
|
||||
max-height: calc(100vh - 30px);
|
||||
overflow-y: auto;
|
||||
position: sticky;
|
||||
top: 10px;
|
||||
height: calc(100% - 65px);
|
||||
}
|
||||
|
||||
.item {
|
||||
|
@ -396,6 +424,10 @@ textarea.form-control {
|
|||
background-color: rgba(239, 239, 239, 0.7);
|
||||
border-radius: 8px;
|
||||
|
||||
&.no-bg {
|
||||
background-color: transparent !important;
|
||||
}
|
||||
|
||||
&:focus {
|
||||
outline: 0 solid #eee;
|
||||
background-color: rgba(245, 245, 245, 0.9);
|
||||
|
@ -433,6 +465,14 @@ textarea.form-control {
|
|||
border-radius: 10px !important;
|
||||
}
|
||||
|
||||
.spinner {
|
||||
color: $primary;
|
||||
}
|
||||
|
||||
.prism-editor__textarea {
|
||||
outline: none !important;
|
||||
}
|
||||
|
||||
// Localization
|
||||
|
||||
@import "localization.scss";
|
||||
|
|
|
@ -11,23 +11,23 @@
|
|||
<table class="text-start">
|
||||
<tbody>
|
||||
<tr class="my-3">
|
||||
<td class="px-3">Subject:</td>
|
||||
<td class="px-3">{{ $t("Subject:") }}</td>
|
||||
<td>{{ formatSubject(cert.subject) }}</td>
|
||||
</tr>
|
||||
<tr class="my-3">
|
||||
<td class="px-3">Valid To:</td>
|
||||
<td class="px-3">{{ $t("Valid To:") }}</td>
|
||||
<td><Datetime :value="cert.validTo" /></td>
|
||||
</tr>
|
||||
<tr class="my-3">
|
||||
<td class="px-3">Days Remaining:</td>
|
||||
<td class="px-3">{{ $t("Days Remaining:") }}</td>
|
||||
<td>{{ cert.daysRemaining }}</td>
|
||||
</tr>
|
||||
<tr class="my-3">
|
||||
<td class="px-3">Issuer:</td>
|
||||
<td class="px-3">{{ $t("Issuer:") }}</td>
|
||||
<td>{{ formatSubject(cert.issuer) }}</td>
|
||||
</tr>
|
||||
<tr class="my-3">
|
||||
<td class="px-3">Fingerprint:</td>
|
||||
<td class="px-3">{{ $t("Fingerprint:") }}</td>
|
||||
<td>{{ cert.fingerprint }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
</template>
|
||||
|
||||
<script>
|
||||
import { Modal } from "bootstrap"
|
||||
import { Modal } from "bootstrap";
|
||||
|
||||
export default {
|
||||
props: {
|
||||
|
@ -42,19 +42,20 @@ export default {
|
|||
default: "No",
|
||||
},
|
||||
},
|
||||
emits: [ "yes" ],
|
||||
data: () => ({
|
||||
modal: null,
|
||||
}),
|
||||
mounted() {
|
||||
this.modal = new Modal(this.$refs.modal)
|
||||
this.modal = new Modal(this.$refs.modal);
|
||||
},
|
||||
methods: {
|
||||
show() {
|
||||
this.modal.show()
|
||||
this.modal.show();
|
||||
},
|
||||
yes() {
|
||||
this.$emit("yes");
|
||||
},
|
||||
},
|
||||
}
|
||||
};
|
||||
</script>
|
||||
|
|
|
@ -57,6 +57,7 @@ export default {
|
|||
default: undefined,
|
||||
},
|
||||
},
|
||||
emits: [ "update:modelValue" ],
|
||||
data() {
|
||||
return {
|
||||
visibility: "password",
|
||||
|
|
|
@ -5,12 +5,12 @@
|
|||
|
||||
<script lang="ts">
|
||||
|
||||
import { sleep } from "../util.ts"
|
||||
import { sleep } from "../util.ts";
|
||||
|
||||
export default {
|
||||
|
||||
props: {
|
||||
value: [String, Number],
|
||||
value: [ String, Number ],
|
||||
time: {
|
||||
type: Number,
|
||||
default: 0.3,
|
||||
|
@ -25,12 +25,12 @@ export default {
|
|||
return {
|
||||
output: "",
|
||||
frameDuration: 30,
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
computed: {
|
||||
isNum() {
|
||||
return typeof this.value === "number"
|
||||
return typeof this.value === "number";
|
||||
},
|
||||
},
|
||||
|
||||
|
@ -45,7 +45,7 @@ export default {
|
|||
} else {
|
||||
for (let i = 1; i < frames; i++) {
|
||||
this.output += step;
|
||||
await sleep(15)
|
||||
await sleep(15);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -59,5 +59,5 @@ export default {
|
|||
|
||||
methods: {},
|
||||
|
||||
}
|
||||
};
|
||||
</script>
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue