فهرست منبع

Merge remote-tracking branch 'base/react' into develop

Tim Burr 1 ماه پیش
والد
کامیت
e0985bee43
100فایلهای تغییر یافته به همراه5839 افزوده شده و 5541 حذف شده
  1. 1 0
      .gitignore
  2. 1 1
      .version
  3. 22 22
      Jenkinsfile
  4. 1 13
      README.md
  5. 0 73
      backend/.eslintrc.json
  6. 0 11
      backend/.prettierrc
  7. 45 43
      backend/app.js
  8. 91 0
      backend/biome.json
  9. 1 1
      backend/certbot/README.md
  10. 0 0
      backend/certbot/dns-plugins.json
  11. 20 15
      backend/db.js
  12. 26 28
      backend/index.js
  13. 371 423
      backend/internal/access-list.js
  14. 72 49
      backend/internal/audit-log.js
  15. 424 479
      backend/internal/certificate.js
  16. 293 364
      backend/internal/dead-host.js
  17. 121 123
      backend/internal/host.js
  18. 65 56
      backend/internal/ip_ranges.js
  19. 103 107
      backend/internal/nginx.js
  20. 202 200
      backend/internal/proxy-host.js
  21. 195 182
      backend/internal/redirection-host.js
  22. 17 18
      backend/internal/report.js
  23. 35 43
      backend/internal/setting.js
  24. 156 153
      backend/internal/stream.js
  25. 108 116
      backend/internal/token.js
  26. 222 241
      backend/internal/user.js
  27. 211 240
      backend/lib/access.js
  28. 72 71
      backend/lib/certbot.js
  29. 151 144
      backend/lib/config.js
  30. 52 48
      backend/lib/error.js
  31. 8 7
      backend/lib/express/cors.js
  32. 12 12
      backend/lib/express/jwt-decode.js
  33. 5 5
      backend/lib/express/jwt.js
  34. 16 16
      backend/lib/express/pagination.js
  35. 2 3
      backend/lib/express/user-id-from-me.js
  36. 52 56
      backend/lib/helpers.js
  37. 25 21
      backend/lib/migrate_template.js
  38. 92 92
      backend/lib/utils.js
  39. 35 33
      backend/lib/validator/api.js
  40. 18 18
      backend/lib/validator/index.js
  41. 16 12
      backend/logger.js
  42. 11 13
      backend/migrate.js
  43. 134 133
      backend/migrations/20180618015850_initial.js
  44. 15 14
      backend/migrations/20180929054513_websockets.js
  45. 15 13
      backend/migrations/20181019052346_forward_host.js
  46. 19 18
      backend/migrations/20181113041458_http2_support.js
  47. 14 12
      backend/migrations/20181213013211_forward_scheme.js
  48. 23 21
      backend/migrations/20190104035154_disabled.js
  49. 14 12
      backend/migrations/20190215115310_customlocations.js
  50. 23 21
      backend/migrations/20190218060101_hsts.js
  51. 10 9
      backend/migrations/20190227065017_settings.js
  52. 27 28
      backend/migrations/20200410143839_access_list_client.js
  53. 14 12
      backend/migrations/20200410143840_access_list_client_fix.js
  54. 19 17
      backend/migrations/20201014143841_pass_auth.js
  55. 21 19
      backend/migrations/20210210154702_redirection_scheme.js
  56. 21 19
      backend/migrations/20210210154703_redirection_status_code.js
  57. 33 30
      backend/migrations/20210423103500_stream_domain.js
  58. 27 25
      backend/migrations/20211108145214_regenerate_default_host.js
  59. 21 16
      backend/migrations/20240427161436_stream_ssl.js
  60. 51 56
      backend/models/access_list.js
  61. 25 24
      backend/models/access_list_auth.js
  62. 25 24
      backend/models/access_list_client.js
  63. 22 22
      backend/models/audit-log.js
  64. 36 42
      backend/models/auth.js
  65. 73 64
      backend/models/certificate.js
  66. 40 47
      backend/models/dead_host.js
  67. 6 7
      backend/models/now_helper.js
  68. 56 56
      backend/models/proxy_host.js
  69. 47 48
      backend/models/redirection_host.js
  70. 3 3
      backend/models/setting.js
  71. 38 43
      backend/models/stream.js
  72. 59 58
      backend/models/token.js
  73. 28 32
      backend/models/user.js
  74. 4 4
      backend/models/user_permission.js
  75. 1 1
      backend/nodemon.json
  76. 19 18
      backend/package.json
  77. 86 31
      backend/routes/audit-log.js
  78. 44 29
      backend/routes/main.js
  79. 95 89
      backend/routes/nginx/access_lists.js
  80. 222 155
      backend/routes/nginx/certificates.js
  81. 116 106
      backend/routes/nginx/dead_hosts.js
  82. 118 106
      backend/routes/nginx/proxy_hosts.js
  83. 123 108
      backend/routes/nginx/redirection_hosts.js
  84. 118 106
      backend/routes/nginx/streams.js
  85. 18 15
      backend/routes/reports.js
  86. 25 19
      backend/routes/schema.js
  87. 56 53
      backend/routes/settings.js
  88. 30 27
      backend/routes/tokens.js
  89. 212 131
      backend/routes/users.js
  90. 7 0
      backend/schema/components/audit-log-list.json
  91. 13 1
      backend/schema/components/audit-log-object.json
  92. 0 6
      backend/schema/components/certificate-object.json
  93. 23 0
      backend/schema/components/dns-providers-list.json
  94. 5 0
      backend/schema/components/health-object.json
  95. 1 1
      backend/schema/components/stream-object.json
  96. 57 0
      backend/schema/components/user-object.json
  97. 40 35
      backend/schema/index.js
  98. 3 3
      backend/schema/paths/audit-log/get.json
  99. 73 0
      backend/schema/paths/audit-log/id/get.json
  100. 1 0
      backend/schema/paths/get.json

+ 1 - 0
.gitignore

@@ -1,5 +1,6 @@
 .DS_Store
 .DS_Store
 .idea
 .idea
+.qodo
 ._*
 ._*
 .vscode
 .vscode
 certbot-help.txt
 certbot-help.txt

+ 1 - 1
.version

@@ -1 +1 @@
-2.12.6
+2.13.0

+ 22 - 22
Jenkinsfile

@@ -119,13 +119,13 @@ pipeline {
 				always {
 				always {
 					// Dumps to analyze later
 					// Dumps to analyze later
 					sh 'mkdir -p debug/sqlite'
 					sh 'mkdir -p debug/sqlite'
-					sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/sqlite/docker_fullstack.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q stepca) > debug/sqlite/docker_stepca.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns) > debug/sqlite/docker_pdns.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/sqlite/docker_pdns-db.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/sqlite/docker_dnsrouter.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q fullstack) > debug/sqlite/docker_fullstack.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q stepca) > debug/sqlite/docker_stepca.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns) > debug/sqlite/docker_pdns.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/sqlite/docker_pdns-db.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/sqlite/docker_dnsrouter.log 2>&1'
 					junit 'test/results/junit/*'
 					junit 'test/results/junit/*'
-					sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
+					sh 'docker compose down --remove-orphans --volumes -t 30 || true'
 				}
 				}
 				unstable {
 				unstable {
 					dir(path: 'test/results') {
 					dir(path: 'test/results') {
@@ -152,13 +152,13 @@ pipeline {
 				always {
 				always {
 					// Dumps to analyze later
 					// Dumps to analyze later
 					sh 'mkdir -p debug/mysql'
 					sh 'mkdir -p debug/mysql'
-					sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/mysql/docker_fullstack.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q stepca) > debug/mysql/docker_stepca.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns) > debug/mysql/docker_pdns.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/mysql/docker_pdns-db.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/mysql/docker_dnsrouter.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q fullstack) > debug/mysql/docker_fullstack.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q stepca) > debug/mysql/docker_stepca.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns) > debug/mysql/docker_pdns.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/mysql/docker_pdns-db.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/mysql/docker_dnsrouter.log 2>&1'
 					junit 'test/results/junit/*'
 					junit 'test/results/junit/*'
-					sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
+					sh 'docker compose down --remove-orphans --volumes -t 30 || true'
 				}
 				}
 				unstable {
 				unstable {
 					dir(path: 'test/results') {
 					dir(path: 'test/results') {
@@ -185,18 +185,18 @@ pipeline {
 				always {
 				always {
 					// Dumps to analyze later
 					// Dumps to analyze later
 					sh 'mkdir -p debug/postgres'
 					sh 'mkdir -p debug/postgres'
-					sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q db-postgres) > debug/postgres/docker_db-postgres.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
-					sh 'docker logs $(docker-compose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q db-postgres) > debug/postgres/docker_db-postgres.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
+					sh 'docker logs $(docker compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
+					sh 'docker logs $(docke rcompose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
 
 
 					junit 'test/results/junit/*'
 					junit 'test/results/junit/*'
-					sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
+					sh 'docker compose down --remove-orphans --volumes -t 30 || true'
 				}
 				}
 				unstable {
 				unstable {
 					dir(path: 'test/results') {
 					dir(path: 'test/results') {

+ 1 - 13
README.md

@@ -1,7 +1,7 @@
 <p align="center">
 <p align="center">
 	<img src="https://nginxproxymanager.com/github.png">
 	<img src="https://nginxproxymanager.com/github.png">
 	<br><br>
 	<br><br>
-	<img src="https://img.shields.io/badge/version-2.12.6-green.svg?style=for-the-badge">
+	<img src="https://img.shields.io/badge/version-2.13.0-green.svg?style=for-the-badge">
 	<a href="https://hub.docker.com/repository/docker/jc21/nginx-proxy-manager">
 	<a href="https://hub.docker.com/repository/docker/jc21/nginx-proxy-manager">
 		<img src="https://img.shields.io/docker/stars/jc21/nginx-proxy-manager.svg?style=for-the-badge">
 		<img src="https://img.shields.io/docker/stars/jc21/nginx-proxy-manager.svg?style=for-the-badge">
 	</a>
 	</a>
@@ -74,11 +74,7 @@ This is the bare minimum configuration required. See the [documentation](https:/
 3. Bring up your stack by running
 3. Bring up your stack by running
 
 
 ```bash
 ```bash
-docker-compose up -d
-
-# If using docker-compose-plugin
 docker compose up -d
 docker compose up -d
-
 ```
 ```
 
 
 4. Log in to the Admin UI
 4. Log in to the Admin UI
@@ -88,14 +84,6 @@ Sometimes this can take a little bit because of the entropy of keys.
 
 
 [http://127.0.0.1:81](http://127.0.0.1:81)
 [http://127.0.0.1:81](http://127.0.0.1:81)
 
 
-Default Admin User:
-```
-Email:    [email protected]
-Password: changeme
-```
-
-Immediately after logging in with this default user you will be asked to modify your details and change your password.
-
 
 
 ## Contributing
 ## Contributing
 
 

+ 0 - 73
backend/.eslintrc.json

@@ -1,73 +0,0 @@
-{
-	"env": {
-		"node": true,
-		"es6": true
-	},
-	"extends": [
-		"eslint:recommended"
-	],
-	"globals": {
-		"Atomics": "readonly",
-		"SharedArrayBuffer": "readonly"
-	},
-	"parserOptions": {
-		"ecmaVersion": 2018,
-		"sourceType": "module"
-	},
-	"plugins": [
-		"align-assignments"
-	],
-	"rules": {
-		"arrow-parens": [
-			"error",
-			"always"
-		],
-		"indent": [
-			"error",
-			"tab"
-		],
-		"linebreak-style": [
-			"error",
-			"unix"
-		],
-		"quotes": [
-			"error",
-			"single"
-		],
-		"semi": [
-			"error",
-			"always"
-		],
-		"key-spacing": [
-			"error",
-			{
-				"align": "value"
-			}
-		],
-		"comma-spacing": [
-			"error",
-			{
-				"before": false,
-				"after": true
-			}
-		],
-		"func-call-spacing": [
-			"error",
-			"never"
-		],
-		"keyword-spacing": [
-			"error",
-			{
-				"before": true
-			}
-		],
-		"no-irregular-whitespace": "error",
-		"no-unused-expressions": 0,
-		"align-assignments/align-assignments": [
-			2,
-			{
-				"requiresOnly": false
-			}
-		]
-	}
-}

+ 0 - 11
backend/.prettierrc

@@ -1,11 +0,0 @@
-{
-	"printWidth": 320,
-	"tabWidth": 4,
-	"useTabs": true,
-	"semi": true,
-	"singleQuote": true,
-	"bracketSpacing": true,
-	"jsxBracketSameLine": true,
-	"trailingComma": "all",
-	"proseWrap": "always"
-}

+ 45 - 43
backend/app.js

@@ -1,9 +1,12 @@
-const express     = require('express');
-const bodyParser  = require('body-parser');
-const fileUpload  = require('express-fileupload');
-const compression = require('compression');
-const config      = require('./lib/config');
-const log         = require('./logger').express;
+import bodyParser from "body-parser";
+import compression from "compression";
+import express from "express";
+import fileUpload from "express-fileupload";
+import { isDebugMode } from "./lib/config.js";
+import cors from "./lib/express/cors.js";
+import jwt from "./lib/express/jwt.js";
+import { express as logger } from "./logger.js";
+import mainRoutes from "./routes/main.js";
 
 
 /**
 /**
  * App
  * App
@@ -11,7 +14,7 @@ const log         = require('./logger').express;
 const app = express();
 const app = express();
 app.use(fileUpload());
 app.use(fileUpload());
 app.use(bodyParser.json());
 app.use(bodyParser.json());
-app.use(bodyParser.urlencoded({extended: true}));
+app.use(bodyParser.urlencoded({ extended: true }));
 
 
 // Gzip
 // Gzip
 app.use(compression());
 app.use(compression());
@@ -20,71 +23,70 @@ app.use(compression());
  * General Logging, BEFORE routes
  * General Logging, BEFORE routes
  */
  */
 
 
-app.disable('x-powered-by');
-app.enable('trust proxy', ['loopback', 'linklocal', 'uniquelocal']);
-app.enable('strict routing');
+app.disable("x-powered-by");
+app.enable("trust proxy", ["loopback", "linklocal", "uniquelocal"]);
+app.enable("strict routing");
 
 
 // pretty print JSON when not live
 // pretty print JSON when not live
-if (config.debug()) {
-	app.set('json spaces', 2);
+if (isDebugMode()) {
+	app.set("json spaces", 2);
 }
 }
 
 
 // CORS for everything
 // CORS for everything
-app.use(require('./lib/express/cors'));
+app.use(cors);
 
 
 // General security/cache related headers + server header
 // General security/cache related headers + server header
-app.use(function (req, res, next) {
-	let x_frame_options = 'DENY';
+app.use((_, res, next) => {
+	let x_frame_options = "DENY";
 
 
-	if (typeof process.env.X_FRAME_OPTIONS !== 'undefined' && process.env.X_FRAME_OPTIONS) {
+	if (typeof process.env.X_FRAME_OPTIONS !== "undefined" && process.env.X_FRAME_OPTIONS) {
 		x_frame_options = process.env.X_FRAME_OPTIONS;
 		x_frame_options = process.env.X_FRAME_OPTIONS;
 	}
 	}
 
 
 	res.set({
 	res.set({
-		'X-XSS-Protection':       '1; mode=block',
-		'X-Content-Type-Options': 'nosniff',
-		'X-Frame-Options':        x_frame_options,
-		'Cache-Control':          'no-cache, no-store, max-age=0, must-revalidate',
-		Pragma:                   'no-cache',
-		Expires:                  0
+		"X-XSS-Protection": "1; mode=block",
+		"X-Content-Type-Options": "nosniff",
+		"X-Frame-Options": x_frame_options,
+		"Cache-Control": "no-cache, no-store, max-age=0, must-revalidate",
+		Pragma: "no-cache",
+		Expires: 0,
 	});
 	});
 	next();
 	next();
 });
 });
 
 
-app.use(require('./lib/express/jwt')());
-app.use('/', require('./routes/main'));
+app.use(jwt());
+app.use("/", mainRoutes);
 
 
 // production error handler
 // production error handler
 // no stacktraces leaked to user
 // no stacktraces leaked to user
-// eslint-disable-next-line
-app.use(function (err, req, res, next) {
-
-	let payload = {
+app.use((err, req, res, _) => {
+	const payload = {
 		error: {
 		error: {
-			code:    err.status,
-			message: err.public ? err.message : 'Internal Error'
-		}
+			code: err.status,
+			message: err.public ? err.message : "Internal Error",
+		},
 	};
 	};
 
 
-	if (config.debug() || (req.baseUrl + req.path).includes('nginx/certificates')) {
+	if (typeof err.message_i18n !== "undefined") {
+		payload.error.message_i18n = err.message_i18n;
+	}
+
+	if (isDebugMode() || (req.baseUrl + req.path).includes("nginx/certificates")) {
 		payload.debug = {
 		payload.debug = {
-			stack:    typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null,
-			previous: err.previous
+			stack: typeof err.stack !== "undefined" && err.stack ? err.stack.split("\n") : null,
+			previous: err.previous,
 		};
 		};
 	}
 	}
 
 
 	// Not every error is worth logging - but this is good for now until it gets annoying.
 	// Not every error is worth logging - but this is good for now until it gets annoying.
-	if (typeof err.stack !== 'undefined' && err.stack) {
-		if (config.debug()) {
-			log.debug(err.stack);
-		} else if (typeof err.public == 'undefined' || !err.public) {
-			log.warn(err.message);
+	if (typeof err.stack !== "undefined" && err.stack) {
+		logger.debug(err.stack);
+		if (typeof err.public === "undefined" || !err.public) {
+			logger.warn(err.message);
 		}
 		}
 	}
 	}
 
 
-	res
-		.status(err.status || 500)
-		.send(payload);
+	res.status(err.status || 500).send(payload);
 });
 });
 
 
-module.exports = app;
+export default app;

+ 91 - 0
backend/biome.json

@@ -0,0 +1,91 @@
+{
+    "$schema": "https://biomejs.dev/schemas/2.3.1/schema.json",
+    "vcs": {
+        "enabled": true,
+        "clientKind": "git",
+        "useIgnoreFile": true
+    },
+    "files": {
+        "ignoreUnknown": false,
+        "includes": [
+            "**/*.ts",
+            "**/*.tsx",
+            "**/*.js",
+            "**/*.jsx",
+            "!**/dist/**/*"
+        ]
+    },
+    "formatter": {
+        "enabled": true,
+        "indentStyle": "tab",
+        "indentWidth": 4,
+        "lineWidth": 120,
+        "formatWithErrors": true
+    },
+    "assist": {
+        "actions": {
+            "source": {
+                "organizeImports": {
+                    "level": "on",
+                    "options": {
+                        "groups": [
+                            ":BUN:",
+                            ":NODE:",
+                            [
+                                "npm:*",
+                                "npm:*/**"
+                            ],
+                            ":PACKAGE_WITH_PROTOCOL:",
+                            ":URL:",
+                            ":PACKAGE:",
+                            [
+                                "/src/*",
+                                "/src/**"
+                            ],
+                            [
+                                "/**"
+                            ],
+                            [
+                                "#*",
+                                "#*/**"
+                            ],
+                            ":PATH:"
+                        ]
+                    }
+                }
+            }
+        }
+    },
+    "linter": {
+        "enabled": true,
+        "rules": {
+            "recommended": true,
+            "correctness": {
+                "useUniqueElementIds": "off"
+            },
+            "suspicious": {
+                "noExplicitAny": "off"
+            },
+            "performance": {
+                "noDelete": "off"
+            },
+            "nursery": "off",
+            "a11y": {
+                "useSemanticElements": "off",
+                "useValidAnchor": "off"
+            },
+            "style": {
+                "noParameterAssign": "error",
+                "useAsConstAssertion": "error",
+                "useDefaultParameterLast": "error",
+                "useEnumInitializers": "error",
+                "useSelfClosingElements": "error",
+                "useSingleVarDeclarator": "error",
+                "noUnusedTemplateLiteral": "error",
+                "useNumberNamespace": "error",
+                "noInferrableTypes": "error",
+                "noUselessElse": "error"
+            }
+        }
+    }
+}

+ 1 - 1
global/README.md → backend/certbot/README.md

@@ -1,4 +1,4 @@
-# certbot-dns-plugins
+# Certbot dns-plugins
 
 
 This file contains info about available Certbot DNS plugins.
 This file contains info about available Certbot DNS plugins.
 This only works for plugins which use the standard argument structure, so:
 This only works for plugins which use the standard argument structure, so:

+ 0 - 0
global/certbot-dns-plugins.json → backend/certbot/dns-plugins.json


+ 20 - 15
backend/db.js

@@ -1,27 +1,32 @@
-const config = require('./lib/config');
+import knex from "knex";
+import {configGet, configHas} from "./lib/config.js";
 
 
-if (!config.has('database')) {
-	throw new Error('Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/');
-}
+const generateDbConfig = () => {
+	if (!configHas("database")) {
+		throw new Error(
+			"Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/",
+		);
+	}
+
+	const cfg = configGet("database");
 
 
-function generateDbConfig() {
-	const cfg = config.get('database');
-	if (cfg.engine === 'knex-native') {
+	if (cfg.engine === "knex-native") {
 		return cfg.knex;
 		return cfg.knex;
 	}
 	}
+
 	return {
 	return {
-		client:     cfg.engine,
+		client: cfg.engine,
 		connection: {
 		connection: {
-			host:     cfg.host,
-			user:     cfg.user,
+			host: cfg.host,
+			user: cfg.user,
 			password: cfg.password,
 			password: cfg.password,
 			database: cfg.name,
 			database: cfg.name,
-			port:     cfg.port
+			port: cfg.port,
 		},
 		},
 		migrations: {
 		migrations: {
-			tableName: 'migrations'
-		}
+			tableName: "migrations",
+		},
 	};
 	};
-}
+};
 
 
-module.exports = require('knex')(generateDbConfig());
+export default knex(generateDbConfig());

+ 26 - 28
backend/index.js

@@ -1,48 +1,47 @@
 #!/usr/bin/env node
 #!/usr/bin/env node
 
 
-const schema = require('./schema');
-const logger = require('./logger').global;
-
-const IP_RANGES_FETCH_ENABLED = process.env.IP_RANGES_FETCH_ENABLED !== 'false';
-
-async function appStart () {
-	const migrate             = require('./migrate');
-	const setup               = require('./setup');
-	const app                 = require('./app');
-	const internalCertificate = require('./internal/certificate');
-	const internalIpRanges    = require('./internal/ip_ranges');
-
-	return migrate.latest()
+import app from "./app.js";
+import internalCertificate from "./internal/certificate.js";
+import internalIpRanges from "./internal/ip_ranges.js";
+import { global as logger } from "./logger.js";
+import { migrateUp } from "./migrate.js";
+import { getCompiledSchema } from "./schema/index.js";
+import setup from "./setup.js";
+
+const IP_RANGES_FETCH_ENABLED = process.env.IP_RANGES_FETCH_ENABLED !== "false";
+
+async function appStart() {
+	return migrateUp()
 		.then(setup)
 		.then(setup)
-		.then(schema.getCompiledSchema)
+		.then(getCompiledSchema)
 		.then(() => {
 		.then(() => {
-			if (IP_RANGES_FETCH_ENABLED) {
-				logger.info('IP Ranges fetch is enabled');
-				return internalIpRanges.fetch().catch((err) => {
-					logger.error('IP Ranges fetch failed, continuing anyway:', err.message);
-				});
-			} else {
-				logger.info('IP Ranges fetch is disabled by environment variable');
+			if (!IP_RANGES_FETCH_ENABLED) {
+				logger.info("IP Ranges fetch is disabled by environment variable");
+				return;
 			}
 			}
+			logger.info("IP Ranges fetch is enabled");
+			return internalIpRanges.fetch().catch((err) => {
+				logger.error("IP Ranges fetch failed, continuing anyway:", err.message);
+			});
 		})
 		})
 		.then(() => {
 		.then(() => {
 			internalCertificate.initTimer();
 			internalCertificate.initTimer();
 			internalIpRanges.initTimer();
 			internalIpRanges.initTimer();
 
 
 			const server = app.listen(3000, () => {
 			const server = app.listen(3000, () => {
-				logger.info('Backend PID ' + process.pid + ' listening on port 3000 ...');
+				logger.info(`Backend PID ${process.pid} listening on port 3000 ...`);
 
 
-				process.on('SIGTERM', () => {
-					logger.info('PID ' + process.pid + ' received SIGTERM');
+				process.on("SIGTERM", () => {
+					logger.info(`PID ${process.pid} received SIGTERM`);
 					server.close(() => {
 					server.close(() => {
-						logger.info('Stopping.');
+						logger.info("Stopping.");
 						process.exit(0);
 						process.exit(0);
 					});
 					});
 				});
 				});
 			});
 			});
 		})
 		})
 		.catch((err) => {
 		.catch((err) => {
-			logger.error(err.message, err);
+			logger.error(`Startup Error: ${err.message}`, err);
 			setTimeout(appStart, 1000);
 			setTimeout(appStart, 1000);
 		});
 		});
 }
 }
@@ -50,7 +49,6 @@ async function appStart () {
 try {
 try {
 	appStart();
 	appStart();
 } catch (err) {
 } catch (err) {
-	logger.error(err.message, err);
+	logger.fatal(err);
 	process.exit(1);
 	process.exit(1);
 }
 }
-

+ 371 - 423
backend/internal/access-list.js

@@ -1,103 +1,94 @@
-const _                     = require('lodash');
-const fs                    = require('node:fs');
-const batchflow             = require('batchflow');
-const logger                = require('../logger').access;
-const error                 = require('../lib/error');
-const utils                 = require('../lib/utils');
-const accessListModel       = require('../models/access_list');
-const accessListAuthModel   = require('../models/access_list_auth');
-const accessListClientModel = require('../models/access_list_client');
-const proxyHostModel        = require('../models/proxy_host');
-const internalAuditLog      = require('./audit-log');
-const internalNginx         = require('./nginx');
-
-function omissions () {
-	return ['is_deleted'];
-}
+import fs from "node:fs";
+import batchflow from "batchflow";
+import _ from "lodash";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import { access as logger } from "../logger.js";
+import accessListModel from "../models/access_list.js";
+import accessListAuthModel from "../models/access_list_auth.js";
+import accessListClientModel from "../models/access_list_client.js";
+import proxyHostModel from "../models/proxy_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalNginx from "./nginx.js";
+
+const omissions = () => {
+	return ["is_deleted"];
+};
 
 
 const internalAccessList = {
 const internalAccessList = {
-
 	/**
 	/**
 	 * @param   {Access}  access
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	create: (access, data) => {
-		return access.can('access_lists:create', data)
-			.then((/*access_data*/) => {
-				return accessListModel
-					.query()
-					.insertAndFetch({
-						name:          data.name,
-						satisfy_any:   data.satisfy_any,
-						pass_auth:     data.pass_auth,
-						owner_user_id: access.token.getUserId(1)
-					})
-					.then(utils.omitRow(omissions()));
+	create: async (access, data) => {
+		await access.can("access_lists:create", data);
+		const row = await accessListModel
+			.query()
+			.insertAndFetch({
+				name: data.name,
+				satisfy_any: data.satisfy_any,
+				pass_auth: data.pass_auth,
+				owner_user_id: access.token.getUserId(1),
 			})
 			})
-			.then((row) => {
-				data.id = row.id;
-
-				const promises = [];
-
-				// Now add the items
-				data.items.map((item) => {
-					promises.push(accessListAuthModel
-						.query()
-						.insert({
-							access_list_id: row.id,
-							username:       item.username,
-							password:       item.password
-						})
-					);
-				});
-
-				// Now add the clients
-				if (typeof data.clients !== 'undefined' && data.clients) {
-					data.clients.map((client) => {
-						promises.push(accessListClientModel
-							.query()
-							.insert({
-								access_list_id: row.id,
-								address:        client.address,
-								directive:      client.directive
-							})
-						);
-					});
-				}
+			.then(utils.omitRow(omissions()));
+
+		data.id = row.id;
+
+		const promises = [];
+		// Items
+		data.items.map((item) => {
+			promises.push(
+				accessListAuthModel.query().insert({
+					access_list_id: row.id,
+					username: item.username,
+					password: item.password,
+				}),
+			);
+			return true;
+		});
+
+		// Clients
+		data.clients?.map((client) => {
+			promises.push(
+				accessListClientModel.query().insert({
+					access_list_id: row.id,
+					address: client.address,
+					directive: client.directive,
+				}),
+			);
+			return true;
+		});
+
+		await Promise.all(promises);
+
+		// re-fetch with expansions
+		const freshRow = await internalAccessList.get(
+			access,
+			{
+				id: data.id,
+				expand: ["owner", "items", "clients", "proxy_hosts.access_list.[clients,items]"],
+			},
+			true // skip masking
+		);
+
+		// Audit log
+		data.meta = _.assign({}, data.meta || {}, freshRow.meta);
+		await internalAccessList.build(freshRow);
+
+		if (Number.parseInt(freshRow.proxy_host_count, 10)) {
+			await internalNginx.bulkGenerateConfigs("proxy_host", freshRow.proxy_hosts);
+		}
 
 
-				return Promise.all(promises);
-			})
-			.then(() => {
-				// re-fetch with expansions
-				return internalAccessList.get(access, {
-					id:     data.id,
-					expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]']
-				}, true /* <- skip masking */);
-			})
-			.then((row) => {
-				// Audit log
-				data.meta = _.assign({}, data.meta || {}, row.meta);
-
-				return internalAccessList.build(row)
-					.then(() => {
-						if (parseInt(row.proxy_host_count, 10)) {
-							return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
-						}
-					})
-					.then(() => {
-						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'created',
-							object_type: 'access-list',
-							object_id:   row.id,
-							meta:        internalAccessList.maskItems(data)
-						});
-					})
-					.then(() => {
-						return internalAccessList.maskItems(row);
-					});
-			});
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "created",
+			object_type: "access-list",
+			object_id: freshRow.id,
+			meta: internalAccessList.maskItems(data),
+		});
+
+		return internalAccessList.maskItems(freshRow);
 	},
 	},
 
 
 	/**
 	/**
@@ -108,129 +99,107 @@ const internalAccessList = {
 	 * @param  {String}  [data.items]
 	 * @param  {String}  [data.items]
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
-	update: (access, data) => {
-		return access.can('access_lists:update', data.id)
-			.then((/*access_data*/) => {
-				return internalAccessList.get(access, {id: data.id});
-			})
-			.then((row) => {
-				if (row.id !== data.id) {
-					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError(`Access List could not be updated, IDs do not match: ${row.id} !== ${data.id}`);
-				}
-			})
-			.then(() => {
-				// patch name if specified
-				if (typeof data.name !== 'undefined' && data.name) {
-					return accessListModel
-						.query()
-						.where({id: data.id})
-						.patch({
-							name:        data.name,
-							satisfy_any: data.satisfy_any,
-							pass_auth:   data.pass_auth,
-						});
-				}
-			})
-			.then(() => {
-				// Check for items and add/update/remove them
-				if (typeof data.items !== 'undefined' && data.items) {
-					const promises      = [];
-					const items_to_keep = [];
-
-					data.items.map((item) => {
-						if (item.password) {
-							promises.push(accessListAuthModel
-								.query()
-								.insert({
-									access_list_id: data.id,
-									username:       item.username,
-									password:       item.password
-								})
-							);
-						} else {
-							// This was supplied with an empty password, which means keep it but don't change the password
-							items_to_keep.push(item.username);
-						}
-					});
+	update: async (access, data) => {
+		await access.can("access_lists:update", data.id);
+		const row = await internalAccessList.get(access, { id: data.id });
+		if (row.id !== data.id) {
+			// Sanity check that something crazy hasn't happened
+			throw new errs.InternalValidationError(
+				`Access List could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
+			);
+		}
+
+		// patch name if specified
+		if (typeof data.name !== "undefined" && data.name) {
+			await accessListModel.query().where({ id: data.id }).patch({
+				name: data.name,
+				satisfy_any: data.satisfy_any,
+				pass_auth: data.pass_auth,
+			});
+		}
 
 
-					const query = accessListAuthModel
-						.query()
-						.delete()
-						.where('access_list_id', data.id);
-
-					if (items_to_keep.length) {
-						query.andWhere('username', 'NOT IN', items_to_keep);
-					}
-
-					return query
-						.then(() => {
-							// Add new items
-							if (promises.length) {
-								return Promise.all(promises);
-							}
-						});
+		// Check for items and add/update/remove them
+		if (typeof data.items !== "undefined" && data.items) {
+			const promises = [];
+			const itemsToKeep = [];
+
+			data.items.map((item) => {
+				if (item.password) {
+					promises.push(
+						accessListAuthModel.query().insert({
+							access_list_id: data.id,
+							username: item.username,
+							password: item.password,
+						}),
+					);
+				} else {
+					// This was supplied with an empty password, which means keep it but don't change the password
+					itemsToKeep.push(item.username);
 				}
 				}
-			})
-			.then(() => {
-				// Check for clients and add/update/remove them
-				if (typeof data.clients !== 'undefined' && data.clients) {
-					const promises = [];
-
-					data.clients.map((client) => {
-						if (client.address) {
-							promises.push(accessListClientModel
-								.query()
-								.insert({
-									access_list_id: data.id,
-									address:        client.address,
-									directive:      client.directive
-								})
-							);
-						}
-					});
+				return true;
+			});
+
+			const query = accessListAuthModel.query().delete().where("access_list_id", data.id);
+
+			if (itemsToKeep.length) {
+				query.andWhere("username", "NOT IN", itemsToKeep);
+			}
+
+			await query;
+			// Add new items
+			if (promises.length) {
+				await Promise.all(promises);
+			}
+		}
 
 
-					const query = accessListClientModel
-						.query()
-						.delete()
-						.where('access_list_id', data.id);
-
-					return query
-						.then(() => {
-							// Add new items
-							if (promises.length) {
-								return Promise.all(promises);
-							}
-						});
+		// Check for clients and add/update/remove them
+		if (typeof data.clients !== "undefined" && data.clients) {
+			const clientPromises = [];
+			data.clients.map((client) => {
+				if (client.address) {
+					clientPromises.push(
+						accessListClientModel.query().insert({
+							access_list_id: data.id,
+							address: client.address,
+							directive: client.directive,
+						}),
+					);
 				}
 				}
-			})
-			.then(() => {
-				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'updated',
-					object_type: 'access-list',
-					object_id:   data.id,
-					meta:        internalAccessList.maskItems(data)
-				});
-			})
-			.then(() => {
-				// re-fetch with expansions
-				return internalAccessList.get(access, {
-					id:     data.id,
-					expand: ['owner', 'items', 'clients', 'proxy_hosts.[certificate,access_list.[clients,items]]']
-				}, true /* <- skip masking */);
-			})
-			.then((row) => {
-				return internalAccessList.build(row)
-					.then(() => {
-						if (parseInt(row.proxy_host_count, 10)) {
-							return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
-						}
-					}).then(internalNginx.reload)
-					.then(() => {
-						return internalAccessList.maskItems(row);
-					});
+				return true;
 			});
 			});
+
+			const query = accessListClientModel.query().delete().where("access_list_id", data.id);
+			await query;
+			// Add new clitens
+			if (clientPromises.length) {
+				await Promise.all(clientPromises);
+			}
+		}
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "updated",
+			object_type: "access-list",
+			object_id: data.id,
+			meta: internalAccessList.maskItems(data),
+		});
+
+		// re-fetch with expansions
+		const freshRow = await internalAccessList.get(
+			access,
+			{
+				id: data.id,
+				expand: ["owner", "items", "clients", "proxy_hosts.[certificate,access_list.[clients,items]]"],
+			},
+			true // skip masking
+		);
+
+		await internalAccessList.build(freshRow)
+		if (Number.parseInt(freshRow.proxy_host_count, 10)) {
+			await internalNginx.bulkGenerateConfigs("proxy_host", freshRow.proxy_hosts);
+		}
+		await internalNginx.reload();
+		return internalAccessList.maskItems(freshRow);
 	},
 	},
 
 
 	/**
 	/**
@@ -239,52 +208,50 @@ const internalAccessList = {
 	 * @param  {Integer}  data.id
 	 * @param  {Integer}  data.id
 	 * @param  {Array}    [data.expand]
 	 * @param  {Array}    [data.expand]
 	 * @param  {Array}    [data.omit]
 	 * @param  {Array}    [data.omit]
-	 * @param  {Boolean}  [skip_masking]
+	 * @param  {Boolean}  [skipMasking]
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
-	get: (access, data, skip_masking) => {
-		if (typeof data === 'undefined') {
-			data = {};
+	get: async (access, data, skipMasking) => {
+		const thisData = data || {};
+		const accessData = await access.can("access_lists:get", thisData.id)
+
+		const query = accessListModel
+			.query()
+			.select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
+			.leftJoin("proxy_host", function () {
+				this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
+					"proxy_host.is_deleted",
+					"=",
+					0,
+				);
+			})
+			.where("access_list.is_deleted", 0)
+			.andWhere("access_list.id", thisData.id)
+			.groupBy("access_list.id")
+			.allowGraph("[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]")
+			.first();
+
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
 		}
 		}
 
 
-		return access.can('access_lists:get', data.id)
-			.then((access_data) => {
-				const query = accessListModel
-					.query()
-					.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
-					.leftJoin('proxy_host', function() {
-						this.on('proxy_host.access_list_id', '=', 'access_list.id')
-							.andOn('proxy_host.is_deleted', '=', 0);
-					})
-					.where('access_list.is_deleted', 0)
-					.andWhere('access_list.id', data.id)
-					.groupBy('access_list.id')
-					.allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]')
-					.first();
-
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
-				}
+		if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+			query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
+		}
 
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched(`[${data.expand.join(', ')}]`);
-				}
+		let row = await query.then(utils.omitRow(omissions()));
 
 
-				return query.then(utils.omitRow(omissions()));
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				}
-				if (!skip_masking && typeof row.items !== 'undefined' && row.items) {
-					row = internalAccessList.maskItems(row);
-				}
-				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
-				}
-				return row;
-			});
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(thisData.id);
+		}
+		if (!skipMasking && typeof row.items !== "undefined" && row.items) {
+			row = internalAccessList.maskItems(row);
+		}
+		// Custom omissions
+		if (typeof data.omit !== "undefined" && data.omit !== null) {
+			row = _.omit(row, data.omit);
+		}
+		return row;
 	},
 	},
 
 
 	/**
 	/**
@@ -294,73 +261,64 @@ const internalAccessList = {
 	 * @param   {String}  [data.reason]
 	 * @param   {String}  [data.reason]
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	delete: (access, data) => {
-		return access.can('access_lists:delete', data.id)
-			.then(() => {
-				return internalAccessList.get(access, {id: data.id, expand: ['proxy_hosts', 'items', 'clients']});
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				}
+	delete: async (access, data) => {
+		await access.can("access_lists:delete", data.id);
+		const row = await internalAccessList.get(access, {
+			id: data.id,
+			expand: ["proxy_hosts", "items", "clients"],
+		});
+
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
 
 
-				// 1. update row to be deleted
-				// 2. update any proxy hosts that were using it (ignoring permissions)
-				// 3. reconfigure those hosts
-				// 4. audit log
-
-				// 1. update row to be deleted
-				return accessListModel
-					.query()
-					.where('id', row.id)
-					.patch({
-						is_deleted: 1
-					})
-					.then(() => {
-						// 2. update any proxy hosts that were using it (ignoring permissions)
-						if (row.proxy_hosts) {
-							return proxyHostModel
-								.query()
-								.where('access_list_id', '=', row.id)
-								.patch({access_list_id: 0})
-								.then(() => {
-									// 3. reconfigure those hosts, then reload nginx
-
-									// set the access_list_id to zero for these items
-									row.proxy_hosts.map((_val, idx) => {
-										row.proxy_hosts[idx].access_list_id = 0;
-									});
-
-									return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
-								})
-								.then(() => {
-									return internalNginx.reload();
-								});
-						}
-					})
-					.then(() => {
-						// delete the htpasswd file
-						const htpasswd_file = internalAccessList.getFilename(row);
-
-						try {
-							fs.unlinkSync(htpasswd_file);
-						} catch (_err) {
-							// do nothing
-						}
-					})
-					.then(() => {
-						// 4. audit log
-						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'access-list',
-							object_id:   row.id,
-							meta:        _.omit(internalAccessList.maskItems(row), ['is_deleted', 'proxy_hosts'])
-						});
-					});
-			})
-			.then(() => {
+		// 1. update row to be deleted
+		// 2. update any proxy hosts that were using it (ignoring permissions)
+		// 3. reconfigure those hosts
+		// 4. audit log
+
+		// 1. update row to be deleted
+		await accessListModel
+			.query()
+			.where("id", row.id)
+			.patch({
+				is_deleted: 1,
+			});
+
+		// 2. update any proxy hosts that were using it (ignoring permissions)
+		if (row.proxy_hosts) {
+			await proxyHostModel
+				.query()
+				.where("access_list_id", "=", row.id)
+				.patch({ access_list_id: 0 });
+
+			// 3. reconfigure those hosts, then reload nginx
+			// set the access_list_id to zero for these items
+			row.proxy_hosts.map((_val, idx) => {
+				row.proxy_hosts[idx].access_list_id = 0;
 				return true;
 				return true;
 			});
 			});
+
+			await internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
+		}
+
+		await internalNginx.reload();
+
+		// delete the htpasswd file
+		try {
+			fs.unlinkSync(internalAccessList.getFilename(row));
+		} catch (_err) {
+			// do nothing
+		}
+
+		// 4. audit log
+		await internalAuditLog.add(access, {
+			action: "deleted",
+			object_type: "access-list",
+			object_id: row.id,
+			meta: _.omit(internalAccessList.maskItems(row), ["is_deleted", "proxy_hosts"]),
+		});
+		return true;
 	},
 	},
 
 
 	/**
 	/**
@@ -368,75 +326,73 @@ const internalAccessList = {
 	 *
 	 *
 	 * @param   {Access}  access
 	 * @param   {Access}  access
 	 * @param   {Array}   [expand]
 	 * @param   {Array}   [expand]
-	 * @param   {String}  [search_query]
+	 * @param   {String}  [searchQuery]
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	getAll: (access, expand, search_query) => {
-		return access.can('access_lists:list')
-			.then((access_data) => {
-				const query = accessListModel
-					.query()
-					.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
-					.leftJoin('proxy_host', function() {
-						this.on('proxy_host.access_list_id', '=', 'access_list.id')
-							.andOn('proxy_host.is_deleted', '=', 0);
-					})
-					.where('access_list.is_deleted', 0)
-					.groupBy('access_list.id')
-					.allowGraph('[owner,items,clients]')
-					.orderBy('access_list.name', 'ASC');
+	getAll: async (access, expand, searchQuery) => {
+		const accessData = await access.can("access_lists:list");
 
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
-				}
+		const query = accessListModel
+			.query()
+			.select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
+			.leftJoin("proxy_host", function () {
+				this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
+					"proxy_host.is_deleted",
+					"=",
+					0,
+				);
+			})
+			.where("access_list.is_deleted", 0)
+			.groupBy("access_list.id")
+			.allowGraph("[owner,items,clients]")
+			.orderBy("access_list.name", "ASC");
 
 
-				// Query is used for searching
-				if (typeof search_query === 'string') {
-					query.where(function () {
-						this.where('name', 'like', `%${search_query}%`);
-					});
-				}
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
+		}
 
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched(`[${expand.join(', ')}]`);
-				}
+		// Query is used for searching
+		if (typeof searchQuery === "string") {
+			query.where(function () {
+				this.where("name", "like", `%${searchQuery}%`);
+			});
+		}
 
 
-				return query.then(utils.omitRows(omissions()));
-			})
-			.then((rows) => {
-				if (rows) {
-					rows.map((row, idx) => {
-						if (typeof row.items !== 'undefined' && row.items) {
-							rows[idx] = internalAccessList.maskItems(row);
-						}
-					});
-				}
+		if (typeof expand !== "undefined" && expand !== null) {
+			query.withGraphFetched(`[${expand.join(", ")}]`);
+		}
 
 
-				return rows;
+		const rows = await query.then(utils.omitRows(omissions()));
+		if (rows) {
+			rows.map((row, idx) => {
+				if (typeof row.items !== "undefined" && row.items) {
+					rows[idx] = internalAccessList.maskItems(row);
+				}
+				return true;
 			});
 			});
+		}
+		return rows;
 	},
 	},
 
 
 	/**
 	/**
-	 * Report use
+	 * Count is used in reports
 	 *
 	 *
-	 * @param   {Integer} user_id
+	 * @param   {Integer} userId
 	 * @param   {String}  visibility
 	 * @param   {String}  visibility
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	getCount: (user_id, visibility) => {
+	getCount: async (userId, visibility) => {
 		const query = accessListModel
 		const query = accessListModel
 			.query()
 			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+			.count("id as count")
+			.where("is_deleted", 0);
 
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", userId);
 		}
 		}
 
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
+		const row = await query.first();
+		return Number.parseInt(row.count, 10);
 	},
 	},
 
 
 	/**
 	/**
@@ -444,21 +400,21 @@ const internalAccessList = {
 	 * @returns {Object}
 	 * @returns {Object}
 	 */
 	 */
 	maskItems: (list) => {
 	maskItems: (list) => {
-		if (list && typeof list.items !== 'undefined') {
+		if (list && typeof list.items !== "undefined") {
 			list.items.map((val, idx) => {
 			list.items.map((val, idx) => {
-				let repeat_for = 8;
-				let first_char = '*';
+				let repeatFor = 8;
+				let firstChar = "*";
 
 
-				if (typeof val.password !== 'undefined' && val.password) {
-					repeat_for = val.password.length - 1;
-					first_char = val.password.charAt(0);
+				if (typeof val.password !== "undefined" && val.password) {
+					repeatFor = val.password.length - 1;
+					firstChar = val.password.charAt(0);
 				}
 				}
 
 
-				list.items[idx].hint     = first_char + ('*').repeat(repeat_for);
-				list.items[idx].password = '';
+				list.items[idx].hint = firstChar + "*".repeat(repeatFor);
+				list.items[idx].password = "";
+				return true;
 			});
 			});
 		}
 		}
-
 		return list;
 		return list;
 	},
 	},
 
 
@@ -478,63 +434,55 @@ const internalAccessList = {
 	 * @param   {Array}   list.items
 	 * @param   {Array}   list.items
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	build: (list) => {
+	build: async (list) => {
 		logger.info(`Building Access file #${list.id} for: ${list.name}`);
 		logger.info(`Building Access file #${list.id} for: ${list.name}`);
 
 
-		return new Promise((resolve, reject) => {
-			const htpasswd_file = internalAccessList.getFilename(list);
+		const htpasswdFile = internalAccessList.getFilename(list);
 
 
-			// 1. remove any existing access file
-			try {
-				fs.unlinkSync(htpasswd_file);
-			} catch (_err) {
-				// do nothing
-			}
+		// 1. remove any existing access file
+		try {
+			fs.unlinkSync(htpasswdFile);
+		} catch (_err) {
+			// do nothing
+		}
 
 
-			// 2. create empty access file
-			try {
-				fs.writeFileSync(htpasswd_file, '', {encoding: 'utf8'});
-				resolve(htpasswd_file);
-			} catch (err) {
-				reject(err);
-			}
-		})
-			.then((htpasswd_file) => {
-				// 3. generate password for each user
-				if (list.items.length) {
-					return new Promise((resolve, reject) => {
-						batchflow(list.items).sequential()
-							.each((_i, item, next) => {
-								if (typeof item.password !== 'undefined' && item.password.length) {
-									logger.info(`Adding: ${item.username}`);
-
-									utils.execFile('openssl', ['passwd', '-apr1', item.password])
-										.then((res) => {
-											try {
-												fs.appendFileSync(htpasswd_file, `${item.username}:${res}\n`, {encoding: 'utf8'});
-											} catch (err) {
-												reject(err);
-											}
-											next();
-										})
-										.catch((err) => {
-											logger.error(err);
-											next(err);
-										});
-								}
-							})
-							.error((err) => {
-								logger.error(err);
-								reject(err);
-							})
-							.end((results) => {
-								logger.success(`Built Access file #${list.id} for: ${list.name}`);
-								resolve(results);
-							});
+		// 2. create empty access file
+		fs.writeFileSync(htpasswdFile, '', {encoding: 'utf8'});
+
+		// 3. generate password for each user
+		if (list.items.length) {
+			await new Promise((resolve, reject) => {
+				batchflow(list.items).sequential()
+					.each((_i, item, next) => {
+						if (item.password?.length) {
+							logger.info(`Adding: ${item.username}`);
+
+							utils.execFile('openssl', ['passwd', '-apr1', item.password])
+								.then((res) => {
+									try {
+										fs.appendFileSync(htpasswdFile, `${item.username}:${res}\n`, {encoding: 'utf8'});
+									} catch (err) {
+										reject(err);
+									}
+									next();
+								})
+								.catch((err) => {
+									logger.error(err);
+									next(err);
+								});
+						}
+					})
+					.error((err) => {
+						logger.error(err);
+						reject(err);
+					})
+					.end((results) => {
+						logger.success(`Built Access file #${list.id} for: ${list.name}`);
+						resolve(results);
 					});
 					});
-				}
 			});
 			});
+		}
 	}
 	}
-};
+}
 
 
-module.exports = internalAccessList;
+export default internalAccessList;

+ 72 - 49
backend/internal/audit-log.js

@@ -1,6 +1,6 @@
-const error            = require('../lib/error');
-const auditLogModel    = require('../models/audit-log');
-const {castJsonIfNeed} = require('../lib/helpers');
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import auditLogModel from "../models/audit-log.js";
 
 
 const internalAuditLog = {
 const internalAuditLog = {
 
 
@@ -9,32 +9,60 @@ const internalAuditLog = {
 	 *
 	 *
 	 * @param   {Access}  access
 	 * @param   {Access}  access
 	 * @param   {Array}   [expand]
 	 * @param   {Array}   [expand]
-	 * @param   {String}  [search_query]
+	 * @param   {String}  [searchQuery]
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	getAll: (access, expand, search_query) => {
-		return access.can('auditlog:list')
-			.then(() => {
-				let query = auditLogModel
-					.query()
-					.orderBy('created_on', 'DESC')
-					.orderBy('id', 'DESC')
-					.limit(100)
-					.allowGraph('[user]');
-
-				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
-					query.where(function () {
-						this.where(castJsonIfNeed('meta'), 'like', '%' + search_query + '%');
-					});
-				}
-
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
-				}
-
-				return query;
+	getAll: async (access, expand, searchQuery) => {
+		await access.can("auditlog:list");
+
+		const query = auditLogModel
+			.query()
+			.orderBy("created_on", "DESC")
+			.orderBy("id", "DESC")
+			.limit(100)
+			.allowGraph("[user]");
+
+		// Query is used for searching
+		if (typeof searchQuery === "string" && searchQuery.length > 0) {
+			query.where(function () {
+				this.where(castJsonIfNeed("meta"), "like", `%${searchQuery}`);
 			});
 			});
+		}
+
+		if (typeof expand !== "undefined" && expand !== null) {
+			query.withGraphFetched(`[${expand.join(", ")}]`);
+		}
+
+		return await query;
+	},
+
+	/**
+	 * @param  {Access}   access
+	 * @param  {Object}   [data]
+	 * @param  {Integer}  [data.id]          Defaults to the token user
+	 * @param  {Array}    [data.expand]
+	 * @return {Promise}
+	 */
+	get: async (access, data) => {
+		await access.can("auditlog:list");
+
+		const query = auditLogModel
+			.query()
+			.andWhere("id", data.id)
+			.allowGraph("[user]")
+			.first();
+
+		if (typeof data.expand !== "undefined" && data.expand !== null) {
+			query.withGraphFetched(`[${data.expand.join(", ")}]`);
+		}
+
+		const row = await query;
+
+		if (!row?.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
+
+		return row;
 	},
 	},
 
 
 	/**
 	/**
@@ -51,29 +79,24 @@ const internalAuditLog = {
 	 * @param   {Object}   [data.meta]
 	 * @param   {Object}   [data.meta]
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	add: (access, data) => {
-		return new Promise((resolve, reject) => {
-			// Default the user id
-			if (typeof data.user_id === 'undefined' || !data.user_id) {
-				data.user_id = access.token.getUserId(1);
-			}
-
-			if (typeof data.action === 'undefined' || !data.action) {
-				reject(new error.InternalValidationError('Audit log entry must contain an Action'));
-			} else {
-				// Make sure at least 1 of the IDs are set and action
-				resolve(auditLogModel
-					.query()
-					.insert({
-						user_id:     data.user_id,
-						action:      data.action,
-						object_type: data.object_type || '',
-						object_id:   data.object_id || 0,
-						meta:        data.meta || {}
-					}));
-			}
+	add: async (access, data) => {
+		if (typeof data.user_id === "undefined" || !data.user_id) {
+			data.user_id = access.token.getUserId(1);
+		}
+
+		if (typeof data.action === "undefined" || !data.action) {
+			throw new errs.InternalValidationError("Audit log entry must contain an Action");
+		}
+
+		// Make sure at least 1 of the IDs are set and action
+		return await auditLogModel.query().insert({
+			user_id: data.user_id,
+			action: data.action,
+			object_type: data.object_type || "",
+			object_id: data.object_id || 0,
+			meta: data.meta || {},
 		});
 		});
-	}
+	},
 };
 };
 
 
-module.exports = internalAuditLog;
+export default internalAuditLog;

تفاوت فایلی نمایش داده نمی شود زیرا این فایل بسیار بزرگ است
+ 424 - 479
backend/internal/certificate.js


+ 293 - 364
backend/internal/dead-host.js

@@ -1,110 +1,96 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const deadHostModel       = require('../models/dead_host');
-const internalHost        = require('./host');
-const internalNginx       = require('./nginx');
-const internalAuditLog    = require('./audit-log');
-const internalCertificate = require('./certificate');
-const {castJsonIfNeed}    = require('../lib/helpers');
-
-function omissions () {
-	return ['is_deleted'];
-}
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import deadHostModel from "../models/dead_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
+
+const omissions = () => {
+	return ["is_deleted"];
+};
 
 
 const internalDeadHost = {
 const internalDeadHost = {
-
 	/**
 	/**
 	 * @param   {Access}  access
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	create: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+	create: async (access, data) => {
+		const createCertificate = data.certificate_id === "new";
 
 
-		if (create_certificate) {
+		if (createCertificate) {
 			delete data.certificate_id;
 			delete data.certificate_id;
 		}
 		}
 
 
-		return access.can('dead_hosts:create', data)
-			.then((/*access_data*/) => {
-				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
-
-				data.domain_names.map(function (domain_name) {
-					domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
-				});
-
-				return Promise.all(domain_name_check_promises)
-					.then((check_results) => {
-						check_results.map(function (result) {
-							if (result.is_taken) {
-								throw new error.ValidationError(result.hostname + ' is already in use');
-							}
-						});
-					});
-			})
-			.then(() => {
-				// At this point the domains should have been checked
-				data.owner_user_id = access.token.getUserId(1);
-				data               = internalHost.cleanSslHstsData(data);
-
-				// Fix for db field not having a default value
-				// for this optional field.
-				if (typeof data.advanced_config === 'undefined') {
-					data.advanced_config = '';
-				}
+		await access.can("dead_hosts:create", data);
 
 
-				return deadHostModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
-			})
-			.then((row) => {
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
-						.then((cert) => {
-							// update host with cert id
-							return internalDeadHost.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
-							});
-						})
-						.then(() => {
-							return row;
-						});
-				} else {
-					return row;
+		// Get a list of the domain names and check each of them against existing records
+		const domainNameCheckPromises = [];
+
+		data.domain_names.map((domain_name) => {
+			domainNameCheckPromises.push(internalHost.isHostnameTaken(domain_name));
+			return true;
+		});
+
+		await Promise.all(domainNameCheckPromises).then((check_results) => {
+			check_results.map((result) => {
+				if (result.is_taken) {
+					throw new errs.ValidationError(`${result.hostname} is already in use`);
 				}
 				}
-			})
-			.then((row) => {
-				// re-fetch with cert
-				return internalDeadHost.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner']
-				});
-			})
-			.then((row) => {
-				// Configure nginx
-				return internalNginx.configure(deadHostModel, 'dead_host', row)
-					.then(() => {
-						return row;
-					});
-			})
-			.then((row) => {
-				data.meta = _.assign({}, data.meta || {}, row.meta);
-
-				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'dead-host',
-					object_id:   row.id,
-					meta:        data
-				})
-					.then(() => {
-						return row;
-					});
+				return true;
+			});
+		});
+
+		// At this point the domains should have been checked
+		data.owner_user_id = access.token.getUserId(1);
+		const thisData = internalHost.cleanSslHstsData(data);
+
+		// Fix for db field not having a default value
+		// for this optional field.
+		if (typeof data.advanced_config === "undefined") {
+			thisData.advanced_config = "";
+		}
+
+		const row = await deadHostModel.query()
+			.insertAndFetch(thisData)
+			.then(utils.omitRow(omissions()));
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "created",
+			object_type: "dead-host",
+			object_id: row.id,
+			meta: thisData,
+		});
+
+		if (createCertificate) {
+			const cert = await internalCertificate.createQuickCertificate(access, data);
+
+			// update host with cert id
+			await internalDeadHost.update(access, {
+				id: row.id,
+				certificate_id: cert.id,
 			});
 			});
+		}
+
+		// re-fetch with cert
+		const freshRow = await internalDeadHost.get(access, {
+			id: row.id,
+			expand: ["certificate", "owner"],
+		});
+
+		// Sanity check
+		if (createCertificate && !freshRow.certificate_id) {
+			throw new errs.InternalValidationError("The host was created but the Certificate creation failed.");
+		}
+
+		// Configure nginx
+		await internalNginx.configure(deadHostModel, "dead_host", freshRow);
+
+		return freshRow;
 	},
 	},
 
 
 	/**
 	/**
@@ -113,98 +99,85 @@ const internalDeadHost = {
 	 * @param  {Number}  data.id
 	 * @param  {Number}  data.id
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
-	update: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
-
-		if (create_certificate) {
+	update: async (access, data) => {
+		const createCertificate = data.certificate_id === "new";
+		if (createCertificate) {
 			delete data.certificate_id;
 			delete data.certificate_id;
 		}
 		}
 
 
-		return access.can('dead_hosts:update', data.id)
-			.then((/*access_data*/) => {
-				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
-
-				if (typeof data.domain_names !== 'undefined') {
-					data.domain_names.map(function (domain_name) {
-						domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'dead', data.id));
-					});
-
-					return Promise.all(domain_name_check_promises)
-						.then((check_results) => {
-							check_results.map(function (result) {
-								if (result.is_taken) {
-									throw new error.ValidationError(result.hostname + ' is already in use');
-								}
-							});
-						});
-				}
-			})
-			.then(() => {
-				return internalDeadHost.get(access, {id: data.id});
-			})
-			.then((row) => {
-				if (row.id !== data.id) {
-					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('404 Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
-				}
+		await access.can("dead_hosts:update", data.id);
 
 
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
-						.then((cert) => {
-							// update host with cert id
-							data.certificate_id = cert.id;
-						})
-						.then(() => {
-							return row;
-						});
-				} else {
-					return row;
+		// Get a list of the domain names and check each of them against existing records
+		const domainNameCheckPromises = [];
+		if (typeof data.domain_names !== "undefined") {
+			data.domain_names.map((domainName) => {
+				domainNameCheckPromises.push(internalHost.isHostnameTaken(domainName, "dead", data.id));
+				return true;
+			});
+
+			const checkResults = await Promise.all(domainNameCheckPromises);
+			checkResults.map((result) => {
+				if (result.is_taken) {
+					throw new errs.ValidationError(`${result.hostname} is already in use`);
 				}
 				}
-			})
-			.then((row) => {
-				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
-
-				data = internalHost.cleanSslHstsData(data, row);
-
-				return deadHostModel
-					.query()
-					.where({id: data.id})
-					.patch(data)
-					.then((saved_row) => {
-						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        data
-						})
-							.then(() => {
-								return _.omit(saved_row, omissions());
-							});
-					});
-			})
-			.then(() => {
-				return internalDeadHost.get(access, {
-					id:     data.id,
-					expand: ['owner', 'certificate']
-				})
-					.then((row) => {
-						// Configure nginx
-						return internalNginx.configure(deadHostModel, 'dead_host', row)
-							.then((new_meta) => {
-								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
-							});
-					});
+				return true;
+			});
+		}
+		const row = await internalDeadHost.get(access, { id: data.id });
+
+		if (row.id !== data.id) {
+			// Sanity check that something crazy hasn't happened
+			throw new errs.InternalValidationError(
+				`404 Host could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
+			);
+		}
+
+		if (createCertificate) {
+			const cert = await internalCertificate.createQuickCertificate(access, {
+				domain_names: data.domain_names || row.domain_names,
+				meta: _.assign({}, row.meta, data.meta),
+			});
+
+			// update host with cert id
+			data.certificate_id = cert.id;
+		}
+
+		// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
+		let thisData = _.assign(
+			{},
+			{
+				domain_names: row.domain_names,
+			},
+			data,
+		);
+
+		thisData = internalHost.cleanSslHstsData(thisData, row);
+
+
+		// do the row update
+		await deadHostModel
+			.query()
+			.where({id: data.id})
+			.patch(data);
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "updated",
+			object_type: "dead-host",
+			object_id: row.id,
+			meta: thisData,
+		});
+
+		const thisRow = await internalDeadHost
+			.get(access, {
+				id: thisData.id,
+				expand: ["owner", "certificate"],
 			});
 			});
+
+		// Configure nginx
+		const newMeta = await internalNginx.configure(deadHostModel, "dead_host", row);
+		row.meta = newMeta;
+		return _.omit(internalHost.cleanRowCertificateMeta(thisRow), omissions());
 	},
 	},
 
 
 	/**
 	/**
@@ -215,40 +188,32 @@ const internalDeadHost = {
 	 * @param  {Array}    [data.omit]
 	 * @param  {Array}    [data.omit]
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
-	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+	get: async (access, data) => {
+		const accessData = await access.can("dead_hosts:get", data.id);
+		const query = deadHostModel
+			.query()
+			.where("is_deleted", 0)
+			.andWhere("id", data.id)
+			.allowGraph("[owner,certificate]")
+			.first();
 
 
-		return access.can('dead_hosts:get', data.id)
-			.then((access_data) => {
-				let query = deadHostModel
-					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,certificate]')
-					.first();
-
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
-				}
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("owner_user_id", access.token.getUserId(1));
+		}
 
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
-				}
+		if (typeof data.expand !== "undefined" && data.expand !== null) {
+			query.withGraphFetched(`[${data.expand.join(", ")}]`);
+		}
 
 
-				return query.then(utils.omitRow(omissions()));
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				}
-				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
-				}
-				return row;
-			});
+		const row = await query.then(utils.omitRow(omissions()));
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
+		// Custom omissions
+		if (typeof data.omit !== "undefined" && data.omit !== null) {
+			return _.omit(row, data.omit);
+		}
+		return row;
 	},
 	},
 
 
 	/**
 	/**
@@ -258,42 +223,32 @@ const internalDeadHost = {
 	 * @param {String}  [data.reason]
 	 * @param {String}  [data.reason]
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	delete: (access, data) => {
-		return access.can('dead_hosts:delete', data.id)
-			.then(() => {
-				return internalDeadHost.get(access, {id: data.id});
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				}
+	delete: async (access, data) => {
+		await access.can("dead_hosts:delete", data.id)
+		const row = await internalDeadHost.get(access, { id: data.id });
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
 
 
-				return deadHostModel
-					.query()
-					.where('id', row.id)
-					.patch({
-						is_deleted: 1
-					})
-					.then(() => {
-						// Delete Nginx Config
-						return internalNginx.deleteConfig('dead_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
-					})
-					.then(() => {
-						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
-						});
-					});
-			})
-			.then(() => {
-				return true;
+		await deadHostModel
+			.query()
+			.where("id", row.id)
+			.patch({
+				is_deleted: 1,
 			});
 			});
+
+		// Delete Nginx Config
+		await internalNginx.deleteConfig("dead_host", row);
+		await internalNginx.reload();
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "deleted",
+			object_type: "dead-host",
+			object_id: row.id,
+			meta: _.omit(row, omissions()),
+		});
+		return true;
 	},
 	},
 
 
 	/**
 	/**
@@ -303,46 +258,39 @@ const internalDeadHost = {
 	 * @param {String}  [data.reason]
 	 * @param {String}  [data.reason]
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	enable: (access, data) => {
-		return access.can('dead_hosts:update', data.id)
-			.then(() => {
-				return internalDeadHost.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner']
-				});
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Host is already enabled');
-				}
+	enable: async (access, data) => {
+		await access.can("dead_hosts:update", data.id)
+		const row = await internalDeadHost.get(access, {
+			id: data.id,
+			expand: ["certificate", "owner"],
+		});
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
+		if (row.enabled) {
+			throw new errs.ValidationError("Host is already enabled");
+		}
 
 
-				row.enabled = 1;
-
-				return deadHostModel
-					.query()
-					.where('id', row.id)
-					.patch({
-						enabled: 1
-					})
-					.then(() => {
-						// Configure nginx
-						return internalNginx.configure(deadHostModel, 'dead_host', row);
-					})
-					.then(() => {
-						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
-						});
-					});
-			})
-			.then(() => {
-				return true;
+		row.enabled = 1;
+
+		await deadHostModel
+			.query()
+			.where("id", row.id)
+			.patch({
+				enabled: 1,
 			});
 			});
+
+		// Configure nginx
+		await internalNginx.configure(deadHostModel, "dead_host", row);
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "enabled",
+			object_type: "dead-host",
+			object_id: row.id,
+			meta: _.omit(row, omissions()),
+		});
+		return true;
 	},
 	},
 
 
 	/**
 	/**
@@ -352,46 +300,37 @@ const internalDeadHost = {
 	 * @param {String}  [data.reason]
 	 * @param {String}  [data.reason]
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	disable: (access, data) => {
-		return access.can('dead_hosts:update', data.id)
-			.then(() => {
-				return internalDeadHost.get(access, {id: data.id});
-			})
-			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Host is already disabled');
-				}
+	disable: async (access, data) => {
+		await access.can("dead_hosts:update", data.id)
+		const row = await internalDeadHost.get(access, { id: data.id });
+		if (!row || !row.id) {
+			throw new errs.ItemNotFoundError(data.id);
+		}
+		if (!row.enabled) {
+			throw new errs.ValidationError("Host is already disabled");
+		}
 
 
-				row.enabled = 0;
-
-				return deadHostModel
-					.query()
-					.where('id', row.id)
-					.patch({
-						enabled: 0
-					})
-					.then(() => {
-						// Delete Nginx Config
-						return internalNginx.deleteConfig('dead_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
-					})
-					.then(() => {
-						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
-						});
-					});
-			})
-			.then(() => {
-				return true;
+		row.enabled = 0;
+
+		await deadHostModel
+			.query()
+			.where("id", row.id)
+			.patch({
+				enabled: 0,
 			});
 			});
+
+		// Delete Nginx Config
+		await internalNginx.deleteConfig("dead_host", row);
+		await internalNginx.reload();
+
+		// Add to audit log
+		await internalAuditLog.add(access, {
+			action: "disabled",
+			object_type: "dead-host",
+			object_id: row.id,
+			meta: _.omit(row, omissions()),
+		});
+		return true;
 	},
 	},
 
 
 	/**
 	/**
@@ -399,43 +338,38 @@ const internalDeadHost = {
 	 *
 	 *
 	 * @param   {Access}  access
 	 * @param   {Access}  access
 	 * @param   {Array}   [expand]
 	 * @param   {Array}   [expand]
-	 * @param   {String}  [search_query]
+	 * @param   {String}  [searchQuery]
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	getAll: (access, expand, search_query) => {
-		return access.can('dead_hosts:list')
-			.then((access_data) => {
-				let query = deadHostModel
-					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,certificate]')
-					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
-
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
-				}
+	getAll: async (access, expand, searchQuery) => {
+		const accessData = await access.can("dead_hosts:list")
+		const query = deadHostModel
+			.query()
+			.where("is_deleted", 0)
+			.groupBy("id")
+			.allowGraph("[owner,certificate]")
+			.orderBy(castJsonIfNeed("domain_names"), "ASC");
 
 
-				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
-					query.where(function () {
-						this.where(castJsonIfNeed('domain_names'), 'like', '%' + search_query + '%');
-					});
-				}
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("owner_user_id", access.token.getUserId(1));
+		}
 
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
-				}
+		// Query is used for searching
+		if (typeof searchQuery === "string" && searchQuery.length > 0) {
+			query.where(function () {
+				this.where(castJsonIfNeed("domain_names"), "like", `%${searchQuery}%`);
+			});
+		}
 
 
-				return query.then(utils.omitRows(omissions()));
-			})
-			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
-					return internalHost.cleanAllRowsCertificateMeta(rows);
-				}
+		if (typeof expand !== "undefined" && expand !== null) {
+			query.withGraphFetched(`[${expand.join(", ")}]`);
+		}
 
 
-				return rows;
-			});
+		const rows = await query.then(utils.omitRows(omissions()));
+		if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
+			internalHost.cleanAllRowsCertificateMeta(rows);
+		}
+		return rows;
 	},
 	},
 
 
 	/**
 	/**
@@ -445,21 +379,16 @@ const internalDeadHost = {
 	 * @param   {String}  visibility
 	 * @param   {String}  visibility
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	getCount: (user_id, visibility) => {
-		let query = deadHostModel
-			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+	getCount: async (user_id, visibility) => {
+		const query = deadHostModel.query().count("id as count").where("is_deleted", 0);
 
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 		}
 
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		const row = await query.first();
+		return Number.parseInt(row.count, 10);
+	},
 };
 };
 
 
-module.exports = internalDeadHost;
+export default internalDeadHost;

+ 121 - 123
backend/internal/host.js

@@ -1,11 +1,10 @@
-const _                    = require('lodash');
-const proxyHostModel       = require('../models/proxy_host');
-const redirectionHostModel = require('../models/redirection_host');
-const deadHostModel        = require('../models/dead_host');
-const {castJsonIfNeed}     = require('../lib/helpers');
+import _ from "lodash";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import deadHostModel from "../models/dead_host.js";
+import proxyHostModel from "../models/proxy_host.js";
+import redirectionHostModel from "../models/redirection_host.js";
 
 
 const internalHost = {
 const internalHost = {
-
 	/**
 	/**
 	 * Makes sure that the ssl_* and hsts_* fields play nicely together.
 	 * Makes sure that the ssl_* and hsts_* fields play nicely together.
 	 * ie: if there is no cert, then force_ssl is off.
 	 * ie: if there is no cert, then force_ssl is off.
@@ -15,25 +14,23 @@ const internalHost = {
 	 * @param   {object} [existing_data]
 	 * @param   {object} [existing_data]
 	 * @returns {object}
 	 * @returns {object}
 	 */
 	 */
-	cleanSslHstsData: function (data, existing_data) {
-		existing_data = existing_data === undefined ? {} : existing_data;
-
-		const combined_data = _.assign({}, existing_data, data);
+	cleanSslHstsData: (data, existingData) => {
+		const combinedData = _.assign({}, existingData || {}, data);
 
 
-		if (!combined_data.certificate_id) {
-			combined_data.ssl_forced    = false;
-			combined_data.http2_support = false;
+		if (!combinedData.certificate_id) {
+			combinedData.ssl_forced = false;
+			combinedData.http2_support = false;
 		}
 		}
 
 
-		if (!combined_data.ssl_forced) {
-			combined_data.hsts_enabled = false;
+		if (!combinedData.ssl_forced) {
+			combinedData.hsts_enabled = false;
 		}
 		}
 
 
-		if (!combined_data.hsts_enabled) {
-			combined_data.hsts_subdomains = false;
+		if (!combinedData.hsts_enabled) {
+			combinedData.hsts_subdomains = false;
 		}
 		}
 
 
-		return combined_data;
+		return combinedData;
 	},
 	},
 
 
 	/**
 	/**
@@ -42,11 +39,12 @@ const internalHost = {
 	 * @param   {Array}  rows
 	 * @param   {Array}  rows
 	 * @returns {Array}
 	 * @returns {Array}
 	 */
 	 */
-	cleanAllRowsCertificateMeta: function (rows) {
-		rows.map(function (row, idx) {
-			if (typeof rows[idx].certificate !== 'undefined' && rows[idx].certificate) {
+	cleanAllRowsCertificateMeta: (rows) => {
+		rows.map((_, idx) => {
+			if (typeof rows[idx].certificate !== "undefined" && rows[idx].certificate) {
 				rows[idx].certificate.meta = {};
 				rows[idx].certificate.meta = {};
 			}
 			}
+			return true;
 		});
 		});
 
 
 		return rows;
 		return rows;
@@ -58,8 +56,8 @@ const internalHost = {
 	 * @param   {Object}  row
 	 * @param   {Object}  row
 	 * @returns {Object}
 	 * @returns {Object}
 	 */
 	 */
-	cleanRowCertificateMeta: function (row) {
-		if (typeof row.certificate !== 'undefined' && row.certificate) {
+	cleanRowCertificateMeta: (row) => {
+		if (typeof row.certificate !== "undefined" && row.certificate) {
 			row.certificate.meta = {};
 			row.certificate.meta = {};
 		}
 		}
 
 
@@ -67,54 +65,33 @@ const internalHost = {
 	},
 	},
 
 
 	/**
 	/**
-	 * This returns all the host types with any domain listed in the provided domain_names array.
+	 * This returns all the host types with any domain listed in the provided domainNames array.
 	 * This is used by the certificates to temporarily disable any host that is using the domain
 	 * This is used by the certificates to temporarily disable any host that is using the domain
 	 *
 	 *
-	 * @param   {Array}  domain_names
+	 * @param   {Array}  domainNames
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	getHostsWithDomains: function (domain_names) {
-		const promises = [
-			proxyHostModel
-				.query()
-				.where('is_deleted', 0),
-			redirectionHostModel
-				.query()
-				.where('is_deleted', 0),
-			deadHostModel
-				.query()
-				.where('is_deleted', 0)
-		];
-
-		return Promise.all(promises)
-			.then((promises_results) => {
-				let response_object = {
-					total_count:       0,
-					dead_hosts:        [],
-					proxy_hosts:       [],
-					redirection_hosts: []
-				};
-
-				if (promises_results[0]) {
-					// Proxy Hosts
-					response_object.proxy_hosts  = internalHost._getHostsWithDomains(promises_results[0], domain_names);
-					response_object.total_count += response_object.proxy_hosts.length;
-				}
-
-				if (promises_results[1]) {
-					// Redirection Hosts
-					response_object.redirection_hosts = internalHost._getHostsWithDomains(promises_results[1], domain_names);
-					response_object.total_count      += response_object.redirection_hosts.length;
-				}
-
-				if (promises_results[2]) {
-					// Dead Hosts
-					response_object.dead_hosts   = internalHost._getHostsWithDomains(promises_results[2], domain_names);
-					response_object.total_count += response_object.dead_hosts.length;
-				}
-
-				return response_object;
-			});
+	getHostsWithDomains: async (domainNames) => {
+		const responseObject = {
+			total_count: 0,
+			dead_hosts: [],
+			proxy_hosts: [],
+			redirection_hosts: [],
+		};
+
+		const proxyRes = await proxyHostModel.query().where("is_deleted", 0);
+		responseObject.proxy_hosts = internalHost._getHostsWithDomains(proxyRes, domainNames);
+		responseObject.total_count += responseObject.proxy_hosts.length;
+
+		const redirRes = await redirectionHostModel.query().where("is_deleted", 0);
+		responseObject.redirection_hosts = internalHost._getHostsWithDomains(redirRes, domainNames);
+		responseObject.total_count += responseObject.redirection_hosts.length;
+
+		const deadRes = await deadHostModel.query().where("is_deleted", 0);
+		responseObject.dead_hosts = internalHost._getHostsWithDomains(deadRes, domainNames);
+		responseObject.total_count += responseObject.dead_hosts.length;
+
+		return responseObject;
 	},
 	},
 
 
 	/**
 	/**
@@ -125,112 +102,133 @@ const internalHost = {
 	 * @param   {Integer}  [ignore_id]     Must be supplied if type was also supplied
 	 * @param   {Integer}  [ignore_id]     Must be supplied if type was also supplied
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	isHostnameTaken: function (hostname, ignore_type, ignore_id) {
+	isHostnameTaken: (hostname, ignore_type, ignore_id) => {
 		const promises = [
 		const promises = [
 			proxyHostModel
 			proxyHostModel
 				.query()
 				.query()
-				.where('is_deleted', 0)
-				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
+				.where("is_deleted", 0)
+				.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
 			redirectionHostModel
 			redirectionHostModel
 				.query()
 				.query()
-				.where('is_deleted', 0)
-				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
+				.where("is_deleted", 0)
+				.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
 			deadHostModel
 			deadHostModel
 				.query()
 				.query()
-				.where('is_deleted', 0)
-				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%')
+				.where("is_deleted", 0)
+				.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
 		];
 		];
 
 
-		return Promise.all(promises)
-			.then((promises_results) => {
-				let is_taken = false;
-
-				if (promises_results[0]) {
-					// Proxy Hosts
-					if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[0], ignore_type === 'proxy' && ignore_id ? ignore_id : 0)) {
-						is_taken = true;
-					}
+		return Promise.all(promises).then((promises_results) => {
+			let is_taken = false;
+
+			if (promises_results[0]) {
+				// Proxy Hosts
+				if (
+					internalHost._checkHostnameRecordsTaken(
+						hostname,
+						promises_results[0],
+						ignore_type === "proxy" && ignore_id ? ignore_id : 0,
+					)
+				) {
+					is_taken = true;
 				}
 				}
+			}
 
 
-				if (promises_results[1]) {
-					// Redirection Hosts
-					if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[1], ignore_type === 'redirection' && ignore_id ? ignore_id : 0)) {
-						is_taken = true;
-					}
+			if (promises_results[1]) {
+				// Redirection Hosts
+				if (
+					internalHost._checkHostnameRecordsTaken(
+						hostname,
+						promises_results[1],
+						ignore_type === "redirection" && ignore_id ? ignore_id : 0,
+					)
+				) {
+					is_taken = true;
 				}
 				}
+			}
 
 
-				if (promises_results[2]) {
-					// Dead Hosts
-					if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[2], ignore_type === 'dead' && ignore_id ? ignore_id : 0)) {
-						is_taken = true;
-					}
+			if (promises_results[2]) {
+				// Dead Hosts
+				if (
+					internalHost._checkHostnameRecordsTaken(
+						hostname,
+						promises_results[2],
+						ignore_type === "dead" && ignore_id ? ignore_id : 0,
+					)
+				) {
+					is_taken = true;
 				}
 				}
+			}
 
 
-				return {
-					hostname: hostname,
-					is_taken: is_taken
-				};
-			});
+			return {
+				hostname: hostname,
+				is_taken: is_taken,
+			};
+		});
 	},
 	},
 
 
 	/**
 	/**
 	 * Private call only
 	 * Private call only
 	 *
 	 *
 	 * @param   {String}  hostname
 	 * @param   {String}  hostname
-	 * @param   {Array}   existing_rows
-	 * @param   {Integer} [ignore_id]
+	 * @param   {Array}   existingRows
+	 * @param   {Integer} [ignoreId]
 	 * @returns {Boolean}
 	 * @returns {Boolean}
 	 */
 	 */
-	_checkHostnameRecordsTaken: function (hostname, existing_rows, ignore_id) {
-		let is_taken = false;
+	_checkHostnameRecordsTaken: (hostname, existingRows, ignoreId) => {
+		let isTaken = false;
 
 
-		if (existing_rows && existing_rows.length) {
-			existing_rows.map(function (existing_row) {
-				existing_row.domain_names.map(function (existing_hostname) {
+		if (existingRows?.length) {
+			existingRows.map((existingRow) => {
+				existingRow.domain_names.map((existingHostname) => {
 					// Does this domain match?
 					// Does this domain match?
-					if (existing_hostname.toLowerCase() === hostname.toLowerCase()) {
-						if (!ignore_id || ignore_id !== existing_row.id) {
-							is_taken = true;
+					if (existingHostname.toLowerCase() === hostname.toLowerCase()) {
+						if (!ignoreId || ignoreId !== existingRow.id) {
+							isTaken = true;
 						}
 						}
 					}
 					}
+					return true;
 				});
 				});
+				return true;
 			});
 			});
 		}
 		}
 
 
-		return is_taken;
+		return isTaken;
 	},
 	},
 
 
 	/**
 	/**
 	 * Private call only
 	 * Private call only
 	 *
 	 *
 	 * @param   {Array}   hosts
 	 * @param   {Array}   hosts
-	 * @param   {Array}   domain_names
+	 * @param   {Array}   domainNames
 	 * @returns {Array}
 	 * @returns {Array}
 	 */
 	 */
-	_getHostsWithDomains: function (hosts, domain_names) {
-		let response = [];
+	_getHostsWithDomains: (hosts, domainNames) => {
+		const response = [];
 
 
-		if (hosts && hosts.length) {
-			hosts.map(function (host) {
-				let host_matches = false;
+		if (hosts?.length) {
+			hosts.map((host) => {
+				let hostMatches = false;
 
 
-				domain_names.map(function (domain_name) {
-					host.domain_names.map(function (host_domain_name) {
-						if (domain_name.toLowerCase() === host_domain_name.toLowerCase()) {
-							host_matches = true;
+				domainNames.map((domainName) => {
+					host.domain_names.map((hostDomainName) => {
+						if (domainName.toLowerCase() === hostDomainName.toLowerCase()) {
+							hostMatches = true;
 						}
 						}
+						return true;
 					});
 					});
+					return true;
 				});
 				});
 
 
-				if (host_matches) {
+				if (hostMatches) {
 					response.push(host);
 					response.push(host);
 				}
 				}
+				return true;
 			});
 			});
 		}
 		}
 
 
 		return response;
 		return response;
-	}
-
+	},
 };
 };
 
 
-module.exports = internalHost;
+export default internalHost;

+ 65 - 56
backend/internal/ip_ranges.js

@@ -1,45 +1,51 @@
-const https         = require('https');
-const fs            = require('fs');
-const logger        = require('../logger').ip_ranges;
-const error         = require('../lib/error');
-const utils         = require('../lib/utils');
-const internalNginx = require('./nginx');
-
-const CLOUDFRONT_URL   = 'https://ip-ranges.amazonaws.com/ip-ranges.json';
-const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4';
-const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
+import fs from "node:fs";
+import https from "node:https";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import { ipRanges as logger } from "../logger.js";
+import internalNginx from "./nginx.js";
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
+
+const CLOUDFRONT_URL = "https://ip-ranges.amazonaws.com/ip-ranges.json";
+const CLOUDFARE_V4_URL = "https://www.cloudflare.com/ips-v4";
+const CLOUDFARE_V6_URL = "https://www.cloudflare.com/ips-v6";
 
 
 const regIpV4 = /^(\d+\.?){4}\/\d+/;
 const regIpV4 = /^(\d+\.?){4}\/\d+/;
 const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
 const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
 
 
 const internalIpRanges = {
 const internalIpRanges = {
-
-	interval_timeout:    1000 * 60 * 60 * 6, // 6 hours
-	interval:            null,
+	interval_timeout: 1000 * 60 * 60 * 6, // 6 hours
+	interval: null,
 	interval_processing: false,
 	interval_processing: false,
-	iteration_count:     0,
+	iteration_count: 0,
 
 
 	initTimer: () => {
 	initTimer: () => {
-		logger.info('IP Ranges Renewal Timer initialized');
+		logger.info("IP Ranges Renewal Timer initialized");
 		internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout);
 		internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout);
 	},
 	},
 
 
 	fetchUrl: (url) => {
 	fetchUrl: (url) => {
 		return new Promise((resolve, reject) => {
 		return new Promise((resolve, reject) => {
-			logger.info('Fetching ' + url);
-			return https.get(url, (res) => {
-				res.setEncoding('utf8');
-				let raw_data = '';
-				res.on('data', (chunk) => {
-					raw_data += chunk;
-				});
+			logger.info(`Fetching ${url}`);
+			return https
+				.get(url, (res) => {
+					res.setEncoding("utf8");
+					let raw_data = "";
+					res.on("data", (chunk) => {
+						raw_data += chunk;
+					});
 
 
-				res.on('end', () => {
-					resolve(raw_data);
+					res.on("end", () => {
+						resolve(raw_data);
+					});
+				})
+				.on("error", (err) => {
+					reject(err);
 				});
 				});
-			}).on('error', (err) => {
-				reject(err);
-			});
 		});
 		});
 	},
 	},
 
 
@@ -49,27 +55,30 @@ const internalIpRanges = {
 	fetch: () => {
 	fetch: () => {
 		if (!internalIpRanges.interval_processing) {
 		if (!internalIpRanges.interval_processing) {
 			internalIpRanges.interval_processing = true;
 			internalIpRanges.interval_processing = true;
-			logger.info('Fetching IP Ranges from online services...');
+			logger.info("Fetching IP Ranges from online services...");
 
 
 			let ip_ranges = [];
 			let ip_ranges = [];
 
 
-			return internalIpRanges.fetchUrl(CLOUDFRONT_URL)
+			return internalIpRanges
+				.fetchUrl(CLOUDFRONT_URL)
 				.then((cloudfront_data) => {
 				.then((cloudfront_data) => {
-					let data = JSON.parse(cloudfront_data);
+					const data = JSON.parse(cloudfront_data);
 
 
-					if (data && typeof data.prefixes !== 'undefined') {
+					if (data && typeof data.prefixes !== "undefined") {
 						data.prefixes.map((item) => {
 						data.prefixes.map((item) => {
-							if (item.service === 'CLOUDFRONT') {
+							if (item.service === "CLOUDFRONT") {
 								ip_ranges.push(item.ip_prefix);
 								ip_ranges.push(item.ip_prefix);
 							}
 							}
+							return true;
 						});
 						});
 					}
 					}
 
 
-					if (data && typeof data.ipv6_prefixes !== 'undefined') {
+					if (data && typeof data.ipv6_prefixes !== "undefined") {
 						data.ipv6_prefixes.map((item) => {
 						data.ipv6_prefixes.map((item) => {
-							if (item.service === 'CLOUDFRONT') {
+							if (item.service === "CLOUDFRONT") {
 								ip_ranges.push(item.ipv6_prefix);
 								ip_ranges.push(item.ipv6_prefix);
 							}
 							}
+							return true;
 						});
 						});
 					}
 					}
 				})
 				})
@@ -77,38 +86,38 @@ const internalIpRanges = {
 					return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
 					return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
 				})
 				})
 				.then((cloudfare_data) => {
 				.then((cloudfare_data) => {
-					let items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line));
-					ip_ranges = [... ip_ranges, ... items];
+					const items = cloudfare_data.split("\n").filter((line) => regIpV4.test(line));
+					ip_ranges = [...ip_ranges, ...items];
 				})
 				})
 				.then(() => {
 				.then(() => {
 					return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
 					return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
 				})
 				})
 				.then((cloudfare_data) => {
 				.then((cloudfare_data) => {
-					let items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line));
-					ip_ranges = [... ip_ranges, ... items];
+					const items = cloudfare_data.split("\n").filter((line) => regIpV6.test(line));
+					ip_ranges = [...ip_ranges, ...items];
 				})
 				})
 				.then(() => {
 				.then(() => {
-					let clean_ip_ranges = [];
+					const clean_ip_ranges = [];
 					ip_ranges.map((range) => {
 					ip_ranges.map((range) => {
 						if (range) {
 						if (range) {
 							clean_ip_ranges.push(range);
 							clean_ip_ranges.push(range);
 						}
 						}
+						return true;
 					});
 					});
 
 
-					return internalIpRanges.generateConfig(clean_ip_ranges)
-						.then(() => {
-							if (internalIpRanges.iteration_count) {
-								// Reload nginx
-								return internalNginx.reload();
-							}
-						});
+					return internalIpRanges.generateConfig(clean_ip_ranges).then(() => {
+						if (internalIpRanges.iteration_count) {
+							// Reload nginx
+							return internalNginx.reload();
+						}
+					});
 				})
 				})
 				.then(() => {
 				.then(() => {
 					internalIpRanges.interval_processing = false;
 					internalIpRanges.interval_processing = false;
 					internalIpRanges.iteration_count++;
 					internalIpRanges.iteration_count++;
 				})
 				})
 				.catch((err) => {
 				.catch((err) => {
-					logger.error(err.message);
+					logger.fatal(err.message);
 					internalIpRanges.interval_processing = false;
 					internalIpRanges.interval_processing = false;
 				});
 				});
 		}
 		}
@@ -122,26 +131,26 @@ const internalIpRanges = {
 		const renderEngine = utils.getRenderEngine();
 		const renderEngine = utils.getRenderEngine();
 		return new Promise((resolve, reject) => {
 		return new Promise((resolve, reject) => {
 			let template = null;
 			let template = null;
-			let filename = '/etc/nginx/conf.d/include/ip_ranges.conf';
+			const filename = "/etc/nginx/conf.d/include/ip_ranges.conf";
 			try {
 			try {
-				template = fs.readFileSync(__dirname + '/../templates/ip_ranges.conf', {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/ip_ranges.conf`, { encoding: "utf8" });
 			} catch (err) {
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 				return;
 			}
 			}
 
 
 			renderEngine
 			renderEngine
-				.parseAndRender(template, {ip_ranges: ip_ranges})
+				.parseAndRender(template, { ip_ranges: ip_ranges })
 				.then((config_text) => {
 				.then((config_text) => {
-					fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
+					fs.writeFileSync(filename, config_text, { encoding: "utf8" });
 					resolve(true);
 					resolve(true);
 				})
 				})
 				.catch((err) => {
 				.catch((err) => {
-					logger.warn('Could not write ' + filename + ':', err.message);
-					reject(new error.ConfigurationError(err.message));
+					logger.warn(`Could not write ${filename}: ${err.message}`);
+					reject(new errs.ConfigurationError(err.message));
 				});
 				});
 		});
 		});
-	}
+	},
 };
 };
 
 
-module.exports = internalIpRanges;
+export default internalIpRanges;

+ 103 - 107
backend/internal/nginx.js

@@ -1,12 +1,15 @@
-const _      = require('lodash');
-const fs     = require('node:fs');
-const logger = require('../logger').nginx;
-const config = require('../lib/config');
-const utils  = require('../lib/utils');
-const error  = require('../lib/error');
+import fs from "node:fs";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import _ from "lodash";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import { nginx as logger } from "../logger.js";
 
 
-const internalNginx = {
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
 
 
+const internalNginx = {
 	/**
 	/**
 	 * This will:
 	 * This will:
 	 * - test the nginx config first to make sure it's OK
 	 * - test the nginx config first to make sure it's OK
@@ -24,7 +27,8 @@ const internalNginx = {
 	configure: (model, host_type, host) => {
 	configure: (model, host_type, host) => {
 		let combined_meta = {};
 		let combined_meta = {};
 
 
-		return internalNginx.test()
+		return internalNginx
+			.test()
 			.then(() => {
 			.then(() => {
 				// Nginx is OK
 				// Nginx is OK
 				// We're deleting this config regardless.
 				// We're deleting this config regardless.
@@ -37,20 +41,18 @@ const internalNginx = {
 			})
 			})
 			.then(() => {
 			.then(() => {
 				// Test nginx again and update meta with result
 				// Test nginx again and update meta with result
-				return internalNginx.test()
+				return internalNginx
+					.test()
 					.then(() => {
 					.then(() => {
 						// nginx is ok
 						// nginx is ok
 						combined_meta = _.assign({}, host.meta, {
 						combined_meta = _.assign({}, host.meta, {
 							nginx_online: true,
 							nginx_online: true,
-							nginx_err:    null
+							nginx_err: null,
 						});
 						});
 
 
-						return model
-							.query()
-							.where('id', host.id)
-							.patch({
-								meta: combined_meta
-							});
+						return model.query().where("id", host.id).patch({
+							meta: combined_meta,
+						});
 					})
 					})
 					.catch((err) => {
 					.catch((err) => {
 						// Remove the error_log line because it's a docker-ism false positive that doesn't need to be reported.
 						// Remove the error_log line because it's a docker-ism false positive that doesn't need to be reported.
@@ -58,28 +60,27 @@ const internalNginx = {
 						//   nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address)
 						//   nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address)
 
 
 						const valid_lines = [];
 						const valid_lines = [];
-						const err_lines   = err.message.split('\n');
+						const err_lines = err.message.split("\n");
 						err_lines.map((line) => {
 						err_lines.map((line) => {
-							if (line.indexOf('/var/log/nginx/error.log') === -1) {
+							if (line.indexOf("/var/log/nginx/error.log") === -1) {
 								valid_lines.push(line);
 								valid_lines.push(line);
 							}
 							}
+							return true;
 						});
 						});
 
 
-						if (config.debug()) {
-							logger.error('Nginx test failed:', valid_lines.join('\n'));
-						}
+						logger.debug("Nginx test failed:", valid_lines.join("\n"));
 
 
 						// config is bad, update meta and delete config
 						// config is bad, update meta and delete config
 						combined_meta = _.assign({}, host.meta, {
 						combined_meta = _.assign({}, host.meta, {
 							nginx_online: false,
 							nginx_online: false,
-							nginx_err:    valid_lines.join('\n')
+							nginx_err: valid_lines.join("\n"),
 						});
 						});
 
 
 						return model
 						return model
 							.query()
 							.query()
-							.where('id', host.id)
+							.where("id", host.id)
 							.patch({
 							.patch({
-								meta: combined_meta
+								meta: combined_meta,
 							})
 							})
 							.then(() => {
 							.then(() => {
 								internalNginx.renameConfigAsError(host_type, host);
 								internalNginx.renameConfigAsError(host_type, host);
@@ -101,22 +102,18 @@ const internalNginx = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	test: () => {
 	test: () => {
-		if (config.debug()) {
-			logger.info('Testing Nginx configuration');
-		}
-
-		return utils.execFile('/usr/sbin/nginx', ['-t', '-g', 'error_log off;']);
+		logger.debug("Testing Nginx configuration");
+		return utils.execFile("/usr/sbin/nginx", ["-t", "-g", "error_log off;"]);
 	},
 	},
 
 
 	/**
 	/**
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	reload: () => {
 	reload: () => {
-		return internalNginx.test()
-			.then(() => {
-				logger.info('Reloading Nginx');
-				return utils.execFile('/usr/sbin/nginx', ['-s', 'reload']);
-			});
+		return internalNginx.test().then(() => {
+			logger.info("Reloading Nginx");
+			return utils.execFile("/usr/sbin/nginx", ["-s", "reload"]);
+		});
 	},
 	},
 
 
 	/**
 	/**
@@ -125,8 +122,8 @@ const internalNginx = {
 	 * @returns {String}
 	 * @returns {String}
 	 */
 	 */
 	getConfigName: (host_type, host_id) => {
 	getConfigName: (host_type, host_id) => {
-		if (host_type === 'default') {
-			return '/data/nginx/default_host/site.conf';
+		if (host_type === "default") {
+			return "/data/nginx/default_host/site.conf";
 		}
 		}
 		return `/data/nginx/${internalNginx.getFileFriendlyHostType(host_type)}/${host_id}.conf`;
 		return `/data/nginx/${internalNginx.getFileFriendlyHostType(host_type)}/${host_id}.conf`;
 	},
 	},
@@ -141,38 +138,45 @@ const internalNginx = {
 			let template;
 			let template;
 
 
 			try {
 			try {
-				template = fs.readFileSync(`${__dirname}/../templates/_location.conf`, {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/_location.conf`, { encoding: "utf8" });
 			} catch (err) {
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 				return;
 			}
 			}
 
 
-			const renderEngine    = utils.getRenderEngine();
-			let renderedLocations = '';
+			const renderEngine = utils.getRenderEngine();
+			let renderedLocations = "";
 
 
 			const locationRendering = async () => {
 			const locationRendering = async () => {
 				for (let i = 0; i < host.locations.length; i++) {
 				for (let i = 0; i < host.locations.length; i++) {
-					const locationCopy = Object.assign({}, {access_list_id: host.access_list_id}, {certificate_id: host.certificate_id},
-						{ssl_forced: host.ssl_forced}, {caching_enabled: host.caching_enabled}, {block_exploits: host.block_exploits},
-						{allow_websocket_upgrade: host.allow_websocket_upgrade}, {http2_support: host.http2_support},
-						{hsts_enabled: host.hsts_enabled}, {hsts_subdomains: host.hsts_subdomains}, {access_list: host.access_list},
-						{certificate: host.certificate}, host.locations[i]);
-
-					if (locationCopy.forward_host.indexOf('/') > -1) {
-						const splitted = locationCopy.forward_host.split('/');
+					const locationCopy = Object.assign(
+						{},
+						{ access_list_id: host.access_list_id },
+						{ certificate_id: host.certificate_id },
+						{ ssl_forced: host.ssl_forced },
+						{ caching_enabled: host.caching_enabled },
+						{ block_exploits: host.block_exploits },
+						{ allow_websocket_upgrade: host.allow_websocket_upgrade },
+						{ http2_support: host.http2_support },
+						{ hsts_enabled: host.hsts_enabled },
+						{ hsts_subdomains: host.hsts_subdomains },
+						{ access_list: host.access_list },
+						{ certificate: host.certificate },
+						host.locations[i],
+					);
+
+					if (locationCopy.forward_host.indexOf("/") > -1) {
+						const splitted = locationCopy.forward_host.split("/");
 
 
 						locationCopy.forward_host = splitted.shift();
 						locationCopy.forward_host = splitted.shift();
-						locationCopy.forward_path = `/${splitted.join('/')}`;
+						locationCopy.forward_path = `/${splitted.join("/")}`;
 					}
 					}
 
 
-					// eslint-disable-next-line
 					renderedLocations += await renderEngine.parseAndRender(template, locationCopy);
 					renderedLocations += await renderEngine.parseAndRender(template, locationCopy);
 				}
 				}
-
 			};
 			};
 
 
 			locationRendering().then(() => resolve(renderedLocations));
 			locationRendering().then(() => resolve(renderedLocations));
-
 		});
 		});
 	},
 	},
 
 
@@ -183,23 +187,21 @@ const internalNginx = {
 	 */
 	 */
 	generateConfig: (host_type, host_row) => {
 	generateConfig: (host_type, host_row) => {
 		// Prevent modifying the original object:
 		// Prevent modifying the original object:
-		const host           = JSON.parse(JSON.stringify(host_row));
+		const host = JSON.parse(JSON.stringify(host_row));
 		const nice_host_type = internalNginx.getFileFriendlyHostType(host_type);
 		const nice_host_type = internalNginx.getFileFriendlyHostType(host_type);
 
 
-		if (config.debug()) {
-			logger.info(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
-		}
+		logger.debug(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
 
 
 		const renderEngine = utils.getRenderEngine();
 		const renderEngine = utils.getRenderEngine();
 
 
 		return new Promise((resolve, reject) => {
 		return new Promise((resolve, reject) => {
-			let template   = null;
+			let template = null;
 			const filename = internalNginx.getConfigName(nice_host_type, host.id);
 			const filename = internalNginx.getConfigName(nice_host_type, host.id);
 
 
 			try {
 			try {
-				template = fs.readFileSync(`${__dirname}/../templates/${nice_host_type}.conf`, {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/${nice_host_type}.conf`, { encoding: "utf8" });
 			} catch (err) {
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 				return;
 			}
 			}
 
 
@@ -207,27 +209,26 @@ const internalNginx = {
 			let origLocations;
 			let origLocations;
 
 
 			// Manipulate the data a bit before sending it to the template
 			// Manipulate the data a bit before sending it to the template
-			if (nice_host_type !== 'default') {
+			if (nice_host_type !== "default") {
 				host.use_default_location = true;
 				host.use_default_location = true;
-				if (typeof host.advanced_config !== 'undefined' && host.advanced_config) {
+				if (typeof host.advanced_config !== "undefined" && host.advanced_config) {
 					host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
 					host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
 				}
 				}
 			}
 			}
 
 
 			if (host.locations) {
 			if (host.locations) {
 				//logger.info ('host.locations = ' + JSON.stringify(host.locations, null, 2));
 				//logger.info ('host.locations = ' + JSON.stringify(host.locations, null, 2));
-				origLocations    = [].concat(host.locations);
+				origLocations = [].concat(host.locations);
 				locationsPromise = internalNginx.renderLocations(host).then((renderedLocations) => {
 				locationsPromise = internalNginx.renderLocations(host).then((renderedLocations) => {
 					host.locations = renderedLocations;
 					host.locations = renderedLocations;
 				});
 				});
 
 
 				// Allow someone who is using / custom location path to use it, and skip the default / location
 				// Allow someone who is using / custom location path to use it, and skip the default / location
 				_.map(host.locations, (location) => {
 				_.map(host.locations, (location) => {
-					if (location.path === '/') {
+					if (location.path === "/") {
 						host.use_default_location = false;
 						host.use_default_location = false;
 					}
 					}
 				});
 				});
-
 			} else {
 			} else {
 				locationsPromise = Promise.resolve();
 				locationsPromise = Promise.resolve();
 			}
 			}
@@ -239,11 +240,8 @@ const internalNginx = {
 				renderEngine
 				renderEngine
 					.parseAndRender(template, host)
 					.parseAndRender(template, host)
 					.then((config_text) => {
 					.then((config_text) => {
-						fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
-
-						if (config.debug()) {
-							logger.success('Wrote config:', filename, config_text);
-						}
+						fs.writeFileSync(filename, config_text, { encoding: "utf8" });
+						logger.debug("Wrote config:", filename, config_text);
 
 
 						// Restore locations array
 						// Restore locations array
 						host.locations = origLocations;
 						host.locations = origLocations;
@@ -251,11 +249,8 @@ const internalNginx = {
 						resolve(true);
 						resolve(true);
 					})
 					})
 					.catch((err) => {
 					.catch((err) => {
-						if (config.debug()) {
-							logger.warn(`Could not write ${filename}:`, err.message);
-						}
-
-						reject(new error.ConfigurationError(err.message));
+						logger.debug(`Could not write ${filename}:`, err.message);
+						reject(new errs.ConfigurationError(err.message));
 					});
 					});
 			});
 			});
 		});
 		});
@@ -270,20 +265,17 @@ const internalNginx = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	generateLetsEncryptRequestConfig: (certificate) => {
 	generateLetsEncryptRequestConfig: (certificate) => {
-		if (config.debug()) {
-			logger.info('Generating LetsEncrypt Request Config:', certificate);
-		}
-
+		logger.debug("Generating LetsEncrypt Request Config:", certificate);
 		const renderEngine = utils.getRenderEngine();
 		const renderEngine = utils.getRenderEngine();
 
 
 		return new Promise((resolve, reject) => {
 		return new Promise((resolve, reject) => {
-			let template   = null;
+			let template = null;
 			const filename = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
 			const filename = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
 
 
 			try {
 			try {
-				template = fs.readFileSync(`${__dirname}/../templates/letsencrypt-request.conf`, {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/letsencrypt-request.conf`, { encoding: "utf8" });
 			} catch (err) {
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 				return;
 			}
 			}
 
 
@@ -292,20 +284,13 @@ const internalNginx = {
 			renderEngine
 			renderEngine
 				.parseAndRender(template, certificate)
 				.parseAndRender(template, certificate)
 				.then((config_text) => {
 				.then((config_text) => {
-					fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
-
-					if (config.debug()) {
-						logger.success('Wrote config:', filename, config_text);
-					}
-
+					fs.writeFileSync(filename, config_text, { encoding: "utf8" });
+					logger.debug("Wrote config:", filename, config_text);
 					resolve(true);
 					resolve(true);
 				})
 				})
 				.catch((err) => {
 				.catch((err) => {
-					if (config.debug()) {
-						logger.warn(`Could not write ${filename}:`, err.message);
-					}
-
-					reject(new error.ConfigurationError(err.message));
+					logger.debug(`Could not write ${filename}:`, err.message);
+					reject(new errs.ConfigurationError(err.message));
 				});
 				});
 		});
 		});
 	},
 	},
@@ -316,11 +301,14 @@ const internalNginx = {
 	 * @param   {String}  filename
 	 * @param   {String}  filename
 	 */
 	 */
 	deleteFile: (filename) => {
 	deleteFile: (filename) => {
-		logger.debug(`Deleting file: ${filename}`);
+		if (!fs.existsSync(filename)) {
+			return;
+		}
 		try {
 		try {
+			logger.debug(`Deleting file: ${filename}`);
 			fs.unlinkSync(filename);
 			fs.unlinkSync(filename);
 		} catch (err) {
 		} catch (err) {
-			logger.debug('Could not delete file:', JSON.stringify(err, null, 2));
+			logger.debug("Could not delete file:", JSON.stringify(err, null, 2));
 		}
 		}
 	},
 	},
 
 
@@ -330,7 +318,7 @@ const internalNginx = {
 	 * @returns String
 	 * @returns String
 	 */
 	 */
 	getFileFriendlyHostType: (host_type) => {
 	getFileFriendlyHostType: (host_type) => {
-		return host_type.replace(/-/g, '_');
+		return host_type.replace(/-/g, "_");
 	},
 	},
 
 
 	/**
 	/**
@@ -341,7 +329,7 @@ const internalNginx = {
 	 */
 	 */
 	deleteLetsEncryptRequestConfig: (certificate) => {
 	deleteLetsEncryptRequestConfig: (certificate) => {
 		const config_file = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
 		const config_file = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
-		return new Promise((resolve/*, reject*/) => {
+		return new Promise((resolve /*, reject*/) => {
 			internalNginx.deleteFile(config_file);
 			internalNginx.deleteFile(config_file);
 			resolve();
 			resolve();
 		});
 		});
@@ -354,10 +342,13 @@ const internalNginx = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	deleteConfig: (host_type, host, delete_err_file) => {
 	deleteConfig: (host_type, host, delete_err_file) => {
-		const config_file     = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
+		const config_file = internalNginx.getConfigName(
+			internalNginx.getFileFriendlyHostType(host_type),
+			typeof host === "undefined" ? 0 : host.id,
+		);
 		const config_file_err = `${config_file}.err`;
 		const config_file_err = `${config_file}.err`;
 
 
-		return new Promise((resolve/*, reject*/) => {
+		return new Promise((resolve /*, reject*/) => {
 			internalNginx.deleteFile(config_file);
 			internalNginx.deleteFile(config_file);
 			if (delete_err_file) {
 			if (delete_err_file) {
 				internalNginx.deleteFile(config_file_err);
 				internalNginx.deleteFile(config_file_err);
@@ -372,10 +363,13 @@ const internalNginx = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	renameConfigAsError: (host_type, host) => {
 	renameConfigAsError: (host_type, host) => {
-		const config_file     = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
+		const config_file = internalNginx.getConfigName(
+			internalNginx.getFileFriendlyHostType(host_type),
+			typeof host === "undefined" ? 0 : host.id,
+		);
 		const config_file_err = `${config_file}.err`;
 		const config_file_err = `${config_file}.err`;
 
 
-		return new Promise((resolve/*, reject*/) => {
+		return new Promise((resolve /*, reject*/) => {
 			fs.unlink(config_file, () => {
 			fs.unlink(config_file, () => {
 				// ignore result, continue
 				// ignore result, continue
 				fs.rename(config_file, config_file_err, () => {
 				fs.rename(config_file, config_file_err, () => {
@@ -387,14 +381,15 @@ const internalNginx = {
 	},
 	},
 
 
 	/**
 	/**
-	 * @param   {String}  host_type
+	 * @param   {String}  hostType
 	 * @param   {Array}   hosts
 	 * @param   {Array}   hosts
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	bulkGenerateConfigs: (host_type, hosts) => {
+	bulkGenerateConfigs: (hostType, hosts) => {
 		const promises = [];
 		const promises = [];
 		hosts.map((host) => {
 		hosts.map((host) => {
-			promises.push(internalNginx.generateConfig(host_type, host));
+			promises.push(internalNginx.generateConfig(hostType, host));
+			return true;
 		});
 		});
 
 
 		return Promise.all(promises);
 		return Promise.all(promises);
@@ -409,6 +404,7 @@ const internalNginx = {
 		const promises = [];
 		const promises = [];
 		hosts.map((host) => {
 		hosts.map((host) => {
 			promises.push(internalNginx.deleteConfig(host_type, host, true));
 			promises.push(internalNginx.deleteConfig(host_type, host, true));
+			return true;
 		});
 		});
 
 
 		return Promise.all(promises);
 		return Promise.all(promises);
@@ -424,13 +420,13 @@ const internalNginx = {
 	 * @returns {boolean}
 	 * @returns {boolean}
 	 */
 	 */
 	ipv6Enabled: () => {
 	ipv6Enabled: () => {
-		if (typeof process.env.DISABLE_IPV6 !== 'undefined') {
+		if (typeof process.env.DISABLE_IPV6 !== "undefined") {
 			const disabled = process.env.DISABLE_IPV6.toLowerCase();
 			const disabled = process.env.DISABLE_IPV6.toLowerCase();
-			return !(disabled === 'on' || disabled === 'true' || disabled === '1' || disabled === 'yes');
+			return !(disabled === "on" || disabled === "true" || disabled === "1" || disabled === "yes");
 		}
 		}
 
 
 		return true;
 		return true;
-	}
+	},
 };
 };
 
 
-module.exports = internalNginx;
+export default internalNginx;

+ 202 - 200
backend/internal/proxy-host.js

@@ -1,107 +1,106 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const proxyHostModel      = require('../models/proxy_host');
-const internalHost        = require('./host');
-const internalNginx       = require('./nginx');
-const internalAuditLog    = require('./audit-log');
-const internalCertificate = require('./certificate');
-const {castJsonIfNeed}    = require('../lib/helpers');
-
-function omissions () {
-	return ['is_deleted', 'owner.is_deleted'];
-}
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import proxyHostModel from "../models/proxy_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
+
+const omissions = () => {
+	return ["is_deleted", "owner.is_deleted"];
+};
 
 
 const internalProxyHost = {
 const internalProxyHost = {
-
 	/**
 	/**
 	 * @param   {Access}  access
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	create: (access, data) => {
 	create: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data;
+		const createCertificate = thisData.certificate_id === "new";
 
 
-		if (create_certificate) {
-			delete data.certificate_id;
+		if (createCertificate) {
+			delete thisData.certificate_id;
 		}
 		}
 
 
-		return access.can('proxy_hosts:create', data)
+		return access
+			.can("proxy_hosts:create", thisData)
 			.then(() => {
 			.then(() => {
 				// Get a list of the domain names and check each of them against existing records
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
 
-				data.domain_names.map(function (domain_name) {
+				thisData.domain_names.map((domain_name) => {
 					domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
 					domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
+					return true;
 				});
 				});
 
 
-				return Promise.all(domain_name_check_promises)
-					.then((check_results) => {
-						check_results.map(function (result) {
-							if (result.is_taken) {
-								throw new error.ValidationError(result.hostname + ' is already in use');
-							}
-						});
+				return Promise.all(domain_name_check_promises).then((check_results) => {
+					check_results.map((result) => {
+						if (result.is_taken) {
+							throw new errs.ValidationError(`${result.hostname} is already in use`);
+						}
+						return true;
 					});
 					});
+				});
 			})
 			})
 			.then(() => {
 			.then(() => {
 				// At this point the domains should have been checked
 				// At this point the domains should have been checked
-				data.owner_user_id = access.token.getUserId(1);
-				data               = internalHost.cleanSslHstsData(data);
+				thisData.owner_user_id = access.token.getUserId(1);
+				thisData = internalHost.cleanSslHstsData(thisData);
 
 
 				// Fix for db field not having a default value
 				// Fix for db field not having a default value
 				// for this optional field.
 				// for this optional field.
-				if (typeof data.advanced_config === 'undefined') {
-					data.advanced_config = '';
+				if (typeof thisData.advanced_config === "undefined") {
+					thisData.advanced_config = "";
 				}
 				}
 
 
-				return proxyHostModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
+				return proxyHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
 			})
 			})
 			.then((row) => {
 			.then((row) => {
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
+				if (createCertificate) {
+					return internalCertificate
+						.createQuickCertificate(access, thisData)
 						.then((cert) => {
 						.then((cert) => {
 							// update host with cert id
 							// update host with cert id
 							return internalProxyHost.update(access, {
 							return internalProxyHost.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
+								id: row.id,
+								certificate_id: cert.id,
 							});
 							});
 						})
 						})
 						.then(() => {
 						.then(() => {
 							return row;
 							return row;
 						});
 						});
-				} else {
-					return row;
 				}
 				}
+				return row;
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				// re-fetch with cert
 				// re-fetch with cert
 				return internalProxyHost.get(access, {
 				return internalProxyHost.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner', 'access_list.[clients,items]']
+					id: row.id,
+					expand: ["certificate", "owner", "access_list.[clients,items]"],
 				});
 				});
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				// Configure nginx
 				// Configure nginx
-				return internalNginx.configure(proxyHostModel, 'proxy_host', row)
-					.then(() => {
-						return row;
-					});
+				return internalNginx.configure(proxyHostModel, "proxy_host", row).then(() => {
+					return row;
+				});
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				// Audit log
 				// Audit log
-				data.meta = _.assign({}, data.meta || {}, row.meta);
+				thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
 
 
 				// Add to audit log
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'proxy-host',
-					object_id:   row.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "created",
+						object_type: "proxy-host",
+						object_id: row.id,
+						meta: thisData,
+					})
 					.then(() => {
 					.then(() => {
 						return row;
 						return row;
 					});
 					});
@@ -115,100 +114,110 @@ const internalProxyHost = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	update: (access, data) => {
 	update: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data;
+		const create_certificate = thisData.certificate_id === "new";
 
 
 		if (create_certificate) {
 		if (create_certificate) {
-			delete data.certificate_id;
+			delete thisData.certificate_id;
 		}
 		}
 
 
-		return access.can('proxy_hosts:update', data.id)
+		return access
+			.can("proxy_hosts:update", thisData.id)
 			.then((/*access_data*/) => {
 			.then((/*access_data*/) => {
 				// Get a list of the domain names and check each of them against existing records
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
 
-				if (typeof data.domain_names !== 'undefined') {
-					data.domain_names.map(function (domain_name) {
-						domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'proxy', data.id));
+				if (typeof thisData.domain_names !== "undefined") {
+					thisData.domain_names.map((domain_name) => {
+						return domain_name_check_promises.push(
+							internalHost.isHostnameTaken(domain_name, "proxy", thisData.id),
+						);
 					});
 					});
 
 
-					return Promise.all(domain_name_check_promises)
-						.then((check_results) => {
-							check_results.map(function (result) {
-								if (result.is_taken) {
-									throw new error.ValidationError(result.hostname + ' is already in use');
-								}
-							});
+					return Promise.all(domain_name_check_promises).then((check_results) => {
+						check_results.map((result) => {
+							if (result.is_taken) {
+								throw new errs.ValidationError(`${result.hostname} is already in use`);
+							}
+							return true;
 						});
 						});
+					});
 				}
 				}
 			})
 			})
 			.then(() => {
 			.then(() => {
-				return internalProxyHost.get(access, {id: data.id});
+				return internalProxyHost.get(access, { id: thisData.id });
 			})
 			})
 			.then((row) => {
 			.then((row) => {
-				if (row.id !== data.id) {
+				if (row.id !== thisData.id) {
 					// Sanity check that something crazy hasn't happened
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Proxy Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Proxy Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
+					);
 				}
 				}
 
 
 				if (create_certificate) {
 				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
+					return internalCertificate
+						.createQuickCertificate(access, {
+							domain_names: thisData.domain_names || row.domain_names,
+							meta: _.assign({}, row.meta, thisData.meta),
+						})
 						.then((cert) => {
 						.then((cert) => {
 							// update host with cert id
 							// update host with cert id
-							data.certificate_id = cert.id;
+							thisData.certificate_id = cert.id;
 						})
 						})
 						.then(() => {
 						.then(() => {
 							return row;
 							return row;
 						});
 						});
-				} else {
-					return row;
 				}
 				}
+				return row;
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
+				thisData = _.assign(
+					{},
+					{
+						domain_names: row.domain_names,
+					},
+					data,
+				);
 
 
-				data = internalHost.cleanSslHstsData(data, row);
+				thisData = internalHost.cleanSslHstsData(thisData, row);
 
 
 				return proxyHostModel
 				return proxyHostModel
 					.query()
 					.query()
-					.where({id: data.id})
-					.patch(data)
+					.where({ id: thisData.id })
+					.patch(thisData)
 					.then(utils.omitRow(omissions()))
 					.then(utils.omitRow(omissions()))
 					.then((saved_row) => {
 					.then((saved_row) => {
 						// Add to audit log
 						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        data
-						})
+						return internalAuditLog
+							.add(access, {
+								action: "updated",
+								object_type: "proxy-host",
+								object_id: row.id,
+								meta: thisData,
+							})
 							.then(() => {
 							.then(() => {
 								return saved_row;
 								return saved_row;
 							});
 							});
 					});
 					});
 			})
 			})
 			.then(() => {
 			.then(() => {
-				return internalProxyHost.get(access, {
-					id:     data.id,
-					expand: ['owner', 'certificate', 'access_list.[clients,items]']
-				})
+				return internalProxyHost
+					.get(access, {
+						id: thisData.id,
+						expand: ["owner", "certificate", "access_list.[clients,items]"],
+					})
 					.then((row) => {
 					.then((row) => {
 						if (!row.enabled) {
 						if (!row.enabled) {
 							// No need to add nginx config if host is disabled
 							// No need to add nginx config if host is disabled
 							return row;
 							return row;
 						}
 						}
 						// Configure nginx
 						// Configure nginx
-						return internalNginx.configure(proxyHostModel, 'proxy_host', row)
-							.then((new_meta) => {
-								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
-							});
+						return internalNginx.configure(proxyHostModel, "proxy_host", row).then((new_meta) => {
+							row.meta = new_meta;
+							return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
+						});
 					});
 					});
 			});
 			});
 	},
 	},
@@ -222,39 +231,38 @@ const internalProxyHost = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	get: (access, data) => {
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
 
-		return access.can('proxy_hosts:get', data.id)
+		return access
+			.can("proxy_hosts:get", thisData.id)
 			.then((access_data) => {
 			.then((access_data) => {
-				let query = proxyHostModel
+				const query = proxyHostModel
 					.query()
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,access_list.[clients,items],certificate]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[owner,access_list.[clients,items],certificate]")
 					.first();
 					.first();
 
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 				}
 
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 				}
 
 
 				return query.then(utils.omitRow(omissions()));
 				return query.then(utils.omitRow(omissions()));
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (!row || !row.id) {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
 				}
-				row = internalHost.cleanRowCertificateMeta(row);
+				const thisRow = internalHost.cleanRowCertificateMeta(row);
 				// Custom omissions
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(row, thisData.omit);
 				}
 				}
-				return row;
+				return thisRow;
 			});
 			});
 	},
 	},
 
 
@@ -266,35 +274,35 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	delete: (access, data) => {
 	delete: (access, data) => {
-		return access.can('proxy_hosts:delete', data.id)
+		return access
+			.can("proxy_hosts:delete", data.id)
 			.then(() => {
 			.then(() => {
-				return internalProxyHost.get(access, {id: data.id});
+				return internalProxyHost.get(access, { id: data.id });
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (!row || !row.id) {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 				}
 
 
 				return proxyHostModel
 				return proxyHostModel
 					.query()
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Delete Nginx Config
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('proxy_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("proxy_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Add to audit log
 						// Add to audit log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "deleted",
+							object_type: "proxy-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 						});
 					});
 					});
 			})
 			})
@@ -311,39 +319,41 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	enable: (access, data) => {
 	enable: (access, data) => {
-		return access.can('proxy_hosts:update', data.id)
+		return access
+			.can("proxy_hosts:update", data.id)
 			.then(() => {
 			.then(() => {
 				return internalProxyHost.get(access, {
 				return internalProxyHost.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner', 'access_list']
+					id: data.id,
+					expand: ["certificate", "owner", "access_list"],
 				});
 				});
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (!row || !row.id) {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Host is already enabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (row.enabled) {
+					throw new errs.ValidationError("Host is already enabled");
 				}
 				}
 
 
 				row.enabled = 1;
 				row.enabled = 1;
 
 
 				return proxyHostModel
 				return proxyHostModel
 					.query()
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
 					.patch({
-						enabled: 1
+						enabled: 1,
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Configure nginx
 						// Configure nginx
-						return internalNginx.configure(proxyHostModel, 'proxy_host', row);
+						return internalNginx.configure(proxyHostModel, "proxy_host", row);
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Add to audit log
 						// Add to audit log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "enabled",
+							object_type: "proxy-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 						});
 					});
 					});
 			})
 			})
@@ -360,39 +370,40 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	disable: (access, data) => {
 	disable: (access, data) => {
-		return access.can('proxy_hosts:update', data.id)
+		return access
+			.can("proxy_hosts:update", data.id)
 			.then(() => {
 			.then(() => {
-				return internalProxyHost.get(access, {id: data.id});
+				return internalProxyHost.get(access, { id: data.id });
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (!row || !row.id) {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Host is already disabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (!row.enabled) {
+					throw new errs.ValidationError("Host is already disabled");
 				}
 				}
 
 
 				row.enabled = 0;
 				row.enabled = 0;
 
 
 				return proxyHostModel
 				return proxyHostModel
 					.query()
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
 					.patch({
-						enabled: 0
+						enabled: 0,
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Delete Nginx Config
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('proxy_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("proxy_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Add to audit log
 						// Add to audit log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "disabled",
+							object_type: "proxy-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 						});
 					});
 					});
 			})
 			})
@@ -409,40 +420,35 @@ const internalProxyHost = {
 	 * @param   {String}  [search_query]
 	 * @param   {String}  [search_query]
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	getAll: (access, expand, search_query) => {
-		return access.can('proxy_hosts:list')
-			.then((access_data) => {
-				let query = proxyHostModel
-					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,access_list,certificate]')
-					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
-
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
-				}
+	getAll: async (access, expand, searchQuery) => {
+		const accessData = await access.can("proxy_hosts:list");
+		const query = proxyHostModel
+			.query()
+			.where("is_deleted", 0)
+			.groupBy("id")
+			.allowGraph("[owner,access_list,certificate]")
+			.orderBy(castJsonIfNeed("domain_names"), "ASC");
 
 
-				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
-					query.where(function () {
-						this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
-					});
-				}
+		if (accessData.permission_visibility !== "all") {
+			query.andWhere("owner_user_id", access.token.getUserId(1));
+		}
 
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
-				}
+		// Query is used for searching
+		if (typeof searchQuery === "string" && searchQuery.length > 0) {
+			query.where(function () {
+				this.where(castJsonIfNeed("domain_names"), "like", `%${searchQuery}%`);
+			});
+		}
 
 
-				return query.then(utils.omitRows(omissions()));
-			})
-			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
-					return internalHost.cleanAllRowsCertificateMeta(rows);
-				}
+		if (typeof expand !== "undefined" && expand !== null) {
+			query.withGraphFetched(`[${expand.join(", ")}]`);
+		}
 
 
-				return rows;
-			});
+		const rows = await query.then(utils.omitRows(omissions()));
+		if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
+			return internalHost.cleanAllRowsCertificateMeta(rows);
+		}
+		return rows;
 	},
 	},
 
 
 	/**
 	/**
@@ -453,20 +459,16 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	getCount: (user_id, visibility) => {
 	getCount: (user_id, visibility) => {
-		let query = proxyHostModel
-			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+		const query = proxyHostModel.query().count("id as count").where("is_deleted", 0);
 
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 		}
 
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		return query.first().then((row) => {
+			return Number.parseInt(row.count, 10);
+		});
+	},
 };
 };
 
 
-module.exports = internalProxyHost;
+export default internalProxyHost;

+ 195 - 182
backend/internal/redirection-host.js

@@ -1,73 +1,73 @@
-const _                    = require('lodash');
-const error                = require('../lib/error');
-const utils                = require('../lib/utils');
-const redirectionHostModel = require('../models/redirection_host');
-const internalHost         = require('./host');
-const internalNginx        = require('./nginx');
-const internalAuditLog     = require('./audit-log');
-const internalCertificate  = require('./certificate');
-const {castJsonIfNeed}     = require('../lib/helpers');
-
-function omissions () {
-	return ['is_deleted'];
-}
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import redirectionHostModel from "../models/redirection_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
+
+const omissions = () => {
+	return ["is_deleted"];
+};
 
 
 const internalRedirectionHost = {
 const internalRedirectionHost = {
-
 	/**
 	/**
 	 * @param   {Access}  access
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	create: (access, data) => {
 	create: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data || {};
+		const createCertificate = thisData.certificate_id === "new";
 
 
-		if (create_certificate) {
-			delete data.certificate_id;
+		if (createCertificate) {
+			delete thisData.certificate_id;
 		}
 		}
 
 
-		return access.can('redirection_hosts:create', data)
+		return access
+			.can("redirection_hosts:create", thisData)
 			.then((/*access_data*/) => {
 			.then((/*access_data*/) => {
 				// Get a list of the domain names and check each of them against existing records
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
 
-				data.domain_names.map(function (domain_name) {
+				thisData.domain_names.map((domain_name) => {
 					domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
 					domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
+					return true;
 				});
 				});
 
 
-				return Promise.all(domain_name_check_promises)
-					.then((check_results) => {
-						check_results.map(function (result) {
-							if (result.is_taken) {
-								throw new error.ValidationError(result.hostname + ' is already in use');
-							}
-						});
+				return Promise.all(domain_name_check_promises).then((check_results) => {
+					check_results.map((result) => {
+						if (result.is_taken) {
+							throw new errs.ValidationError(`${result.hostname} is already in use`);
+						}
+						return true;
 					});
 					});
+				});
 			})
 			})
 			.then(() => {
 			.then(() => {
 				// At this point the domains should have been checked
 				// At this point the domains should have been checked
-				data.owner_user_id = access.token.getUserId(1);
-				data               = internalHost.cleanSslHstsData(data);
+				thisData.owner_user_id = access.token.getUserId(1);
+				thisData = internalHost.cleanSslHstsData(thisData);
 
 
 				// Fix for db field not having a default value
 				// Fix for db field not having a default value
 				// for this optional field.
 				// for this optional field.
-				if (typeof data.advanced_config === 'undefined') {
-					data.advanced_config = '';
+				if (typeof data.advanced_config === "undefined") {
+					data.advanced_config = "";
 				}
 				}
 
 
-				return redirectionHostModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
+				return redirectionHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
 			})
 			})
 			.then((row) => {
 			.then((row) => {
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
+				if (createCertificate) {
+					return internalCertificate
+						.createQuickCertificate(access, thisData)
 						.then((cert) => {
 						.then((cert) => {
 							// update host with cert id
 							// update host with cert id
 							return internalRedirectionHost.update(access, {
 							return internalRedirectionHost.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
+								id: row.id,
+								certificate_id: cert.id,
 							});
 							});
 						})
 						})
 						.then(() => {
 						.then(() => {
@@ -79,27 +79,27 @@ const internalRedirectionHost = {
 			.then((row) => {
 			.then((row) => {
 				// re-fetch with cert
 				// re-fetch with cert
 				return internalRedirectionHost.get(access, {
 				return internalRedirectionHost.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner']
+					id: row.id,
+					expand: ["certificate", "owner"],
 				});
 				});
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				// Configure nginx
 				// Configure nginx
-				return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
-					.then(() => {
-						return row;
-					});
+				return internalNginx.configure(redirectionHostModel, "redirection_host", row).then(() => {
+					return row;
+				});
 			})
 			})
 			.then((row) => {
 			.then((row) => {
-				data.meta = _.assign({}, data.meta || {}, row.meta);
+				thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
 
 
 				// Add to audit log
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'redirection-host',
-					object_id:   row.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "created",
+						object_type: "redirection-host",
+						object_id: row.id,
+						meta: thisData,
+					})
 					.then(() => {
 					.then(() => {
 						return row;
 						return row;
 					});
 					});
@@ -113,94 +113,107 @@ const internalRedirectionHost = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	update: (access, data) => {
 	update: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data || {};
+		const createCertificate = thisData.certificate_id === "new";
 
 
-		if (create_certificate) {
-			delete data.certificate_id;
+		if (createCertificate) {
+			delete thisData.certificate_id;
 		}
 		}
 
 
-		return access.can('redirection_hosts:update', data.id)
+		return access
+			.can("redirection_hosts:update", thisData.id)
 			.then((/*access_data*/) => {
 			.then((/*access_data*/) => {
 				// Get a list of the domain names and check each of them against existing records
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
-
-				if (typeof data.domain_names !== 'undefined') {
-					data.domain_names.map(function (domain_name) {
-						domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'redirection', data.id));
+				const domain_name_check_promises = [];
+
+				if (typeof thisData.domain_names !== "undefined") {
+					thisData.domain_names.map((domain_name) => {
+						domain_name_check_promises.push(
+							internalHost.isHostnameTaken(domain_name, "redirection", thisData.id),
+						);
+						return true;
 					});
 					});
 
 
-					return Promise.all(domain_name_check_promises)
-						.then((check_results) => {
-							check_results.map(function (result) {
-								if (result.is_taken) {
-									throw new error.ValidationError(result.hostname + ' is already in use');
-								}
-							});
+					return Promise.all(domain_name_check_promises).then((check_results) => {
+						check_results.map((result) => {
+							if (result.is_taken) {
+								throw new errs.ValidationError(`${result.hostname} is already in use`);
+							}
+							return true;
 						});
 						});
+					});
 				}
 				}
 			})
 			})
 			.then(() => {
 			.then(() => {
-				return internalRedirectionHost.get(access, {id: data.id});
+				return internalRedirectionHost.get(access, { id: thisData.id });
 			})
 			})
 			.then((row) => {
 			.then((row) => {
-				if (row.id !== data.id) {
+				if (row.id !== thisData.id) {
 					// Sanity check that something crazy hasn't happened
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Redirection Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Redirection Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
+					);
 				}
 				}
 
 
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
+				if (createCertificate) {
+					return internalCertificate
+						.createQuickCertificate(access, {
+							domain_names: thisData.domain_names || row.domain_names,
+							meta: _.assign({}, row.meta, thisData.meta),
+						})
 						.then((cert) => {
 						.then((cert) => {
 							// update host with cert id
 							// update host with cert id
-							data.certificate_id = cert.id;
+							thisData.certificate_id = cert.id;
 						})
 						})
 						.then(() => {
 						.then(() => {
 							return row;
 							return row;
 						});
 						});
-				} else {
-					return row;
 				}
 				}
+				return row;
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
+				thisData = _.assign(
+					{},
+					{
+						domain_names: row.domain_names,
+					},
+					thisData,
+				);
 
 
-				data = internalHost.cleanSslHstsData(data, row);
+				thisData = internalHost.cleanSslHstsData(thisData, row);
 
 
 				return redirectionHostModel
 				return redirectionHostModel
 					.query()
 					.query()
-					.where({id: data.id})
-					.patch(data)
+					.where({ id: thisData.id })
+					.patch(thisData)
 					.then((saved_row) => {
 					.then((saved_row) => {
 						// Add to audit log
 						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        data
-						})
+						return internalAuditLog
+							.add(access, {
+								action: "updated",
+								object_type: "redirection-host",
+								object_id: row.id,
+								meta: thisData,
+							})
 							.then(() => {
 							.then(() => {
 								return _.omit(saved_row, omissions());
 								return _.omit(saved_row, omissions());
 							});
 							});
 					});
 					});
 			})
 			})
 			.then(() => {
 			.then(() => {
-				return internalRedirectionHost.get(access, {
-					id:     data.id,
-					expand: ['owner', 'certificate']
-				})
+				return internalRedirectionHost
+					.get(access, {
+						id: thisData.id,
+						expand: ["owner", "certificate"],
+					})
 					.then((row) => {
 					.then((row) => {
 						// Configure nginx
 						// Configure nginx
-						return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
+						return internalNginx
+							.configure(redirectionHostModel, "redirection_host", row)
 							.then((new_meta) => {
 							.then((new_meta) => {
 								row.meta = new_meta;
 								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
+								return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
 							});
 							});
 					});
 					});
 			});
 			});
@@ -215,39 +228,39 @@ const internalRedirectionHost = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	get: (access, data) => {
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
 
-		return access.can('redirection_hosts:get', data.id)
+		return access
+			.can("redirection_hosts:get", thisData.id)
 			.then((access_data) => {
 			.then((access_data) => {
-				let query = redirectionHostModel
+				const query = redirectionHostModel
 					.query()
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,certificate]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[owner,certificate]")
 					.first();
 					.first();
 
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 				}
 
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 				}
 
 
 				return query.then(utils.omitRow(omissions()));
 				return query.then(utils.omitRow(omissions()));
 			})
 			})
 			.then((row) => {
 			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+				let thisRow = row;
+				if (!thisRow || !thisRow.id) {
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
 				}
-				row = internalHost.cleanRowCertificateMeta(row);
+				thisRow = internalHost.cleanRowCertificateMeta(thisRow);
 				// Custom omissions
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(thisRow, thisData.omit);
 				}
 				}
-				return row;
+				return thisRow;
 			});
 			});
 	},
 	},
 
 
@@ -259,35 +272,35 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	delete: (access, data) => {
 	delete: (access, data) => {
-		return access.can('redirection_hosts:delete', data.id)
+		return access
+			.can("redirection_hosts:delete", data.id)
 			.then(() => {
 			.then(() => {
-				return internalRedirectionHost.get(access, {id: data.id});
+				return internalRedirectionHost.get(access, { id: data.id });
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (!row || !row.id) {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 				}
 
 
 				return redirectionHostModel
 				return redirectionHostModel
 					.query()
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Delete Nginx Config
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('redirection_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("redirection_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Add to audit log
 						// Add to audit log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "deleted",
+							object_type: "redirection-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 						});
 					});
 					});
 			})
 			})
@@ -304,39 +317,41 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	enable: (access, data) => {
 	enable: (access, data) => {
-		return access.can('redirection_hosts:update', data.id)
+		return access
+			.can("redirection_hosts:update", data.id)
 			.then(() => {
 			.then(() => {
 				return internalRedirectionHost.get(access, {
 				return internalRedirectionHost.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner']
+					id: data.id,
+					expand: ["certificate", "owner"],
 				});
 				});
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (!row || !row.id) {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Host is already enabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (row.enabled) {
+					throw new errs.ValidationError("Host is already enabled");
 				}
 				}
 
 
 				row.enabled = 1;
 				row.enabled = 1;
 
 
 				return redirectionHostModel
 				return redirectionHostModel
 					.query()
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
 					.patch({
-						enabled: 1
+						enabled: 1,
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Configure nginx
 						// Configure nginx
-						return internalNginx.configure(redirectionHostModel, 'redirection_host', row);
+						return internalNginx.configure(redirectionHostModel, "redirection_host", row);
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Add to audit log
 						// Add to audit log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "enabled",
+							object_type: "redirection-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 						});
 					});
 					});
 			})
 			})
@@ -353,39 +368,40 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	disable: (access, data) => {
 	disable: (access, data) => {
-		return access.can('redirection_hosts:update', data.id)
+		return access
+			.can("redirection_hosts:update", data.id)
 			.then(() => {
 			.then(() => {
-				return internalRedirectionHost.get(access, {id: data.id});
+				return internalRedirectionHost.get(access, { id: data.id });
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (!row || !row.id) {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Host is already disabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (!row.enabled) {
+					throw new errs.ValidationError("Host is already disabled");
 				}
 				}
 
 
 				row.enabled = 0;
 				row.enabled = 0;
 
 
 				return redirectionHostModel
 				return redirectionHostModel
 					.query()
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
 					.patch({
-						enabled: 0
+						enabled: 0,
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Delete Nginx Config
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('redirection_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("redirection_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Add to audit log
 						// Add to audit log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "disabled",
+							object_type: "redirection-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 						});
 					});
 					});
 			})
 			})
@@ -403,34 +419,35 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	getAll: (access, expand, search_query) => {
 	getAll: (access, expand, search_query) => {
-		return access.can('redirection_hosts:list')
+		return access
+			.can("redirection_hosts:list")
 			.then((access_data) => {
 			.then((access_data) => {
-				let query = redirectionHostModel
+				const query = redirectionHostModel
 					.query()
 					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,certificate]')
-					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
+					.where("is_deleted", 0)
+					.groupBy("id")
+					.allowGraph("[owner,certificate]")
+					.orderBy(castJsonIfNeed("domain_names"), "ASC");
 
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 				}
 
 
 				// Query is used for searching
 				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
+				if (typeof search_query === "string" && search_query.length > 0) {
 					query.where(function () {
 					query.where(function () {
-						this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
+						this.where(castJsonIfNeed("domain_names"), "like", `%${search_query}%`);
 					});
 					});
 				}
 				}
 
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
+				if (typeof expand !== "undefined" && expand !== null) {
+					query.withGraphFetched(`[${expand.join(", ")}]`);
 				}
 				}
 
 
 				return query.then(utils.omitRows(omissions()));
 				return query.then(utils.omitRows(omissions()));
 			})
 			})
 			.then((rows) => {
 			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
+				if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
 					return internalHost.cleanAllRowsCertificateMeta(rows);
 					return internalHost.cleanAllRowsCertificateMeta(rows);
 				}
 				}
 
 
@@ -446,20 +463,16 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	getCount: (user_id, visibility) => {
 	getCount: (user_id, visibility) => {
-		let query = redirectionHostModel
-			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+		const query = redirectionHostModel.query().count("id as count").where("is_deleted", 0);
 
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 		}
 
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		return query.first().then((row) => {
+			return Number.parseInt(row.count, 10);
+		});
+	},
 };
 };
 
 
-module.exports = internalRedirectionHost;
+export default internalRedirectionHost;

+ 17 - 18
backend/internal/report.js

@@ -1,38 +1,37 @@
-const internalProxyHost       = require('./proxy-host');
-const internalRedirectionHost = require('./redirection-host');
-const internalDeadHost        = require('./dead-host');
-const internalStream          = require('./stream');
+import internalDeadHost from "./dead-host.js";
+import internalProxyHost from "./proxy-host.js";
+import internalRedirectionHost from "./redirection-host.js";
+import internalStream from "./stream.js";
 
 
 const internalReport = {
 const internalReport = {
-
 	/**
 	/**
 	 * @param  {Access}   access
 	 * @param  {Access}   access
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	getHostsReport: (access) => {
 	getHostsReport: (access) => {
-		return access.can('reports:hosts', 1)
+		return access
+			.can("reports:hosts", 1)
 			.then((access_data) => {
 			.then((access_data) => {
-				let user_id = access.token.getUserId(1);
+				const userId = access.token.getUserId(1);
 
 
-				let promises = [
-					internalProxyHost.getCount(user_id, access_data.visibility),
-					internalRedirectionHost.getCount(user_id, access_data.visibility),
-					internalStream.getCount(user_id, access_data.visibility),
-					internalDeadHost.getCount(user_id, access_data.visibility)
+				const promises = [
+					internalProxyHost.getCount(userId, access_data.visibility),
+					internalRedirectionHost.getCount(userId, access_data.visibility),
+					internalStream.getCount(userId, access_data.visibility),
+					internalDeadHost.getCount(userId, access_data.visibility),
 				];
 				];
 
 
 				return Promise.all(promises);
 				return Promise.all(promises);
 			})
 			})
 			.then((counts) => {
 			.then((counts) => {
 				return {
 				return {
-					proxy:       counts.shift(),
+					proxy: counts.shift(),
 					redirection: counts.shift(),
 					redirection: counts.shift(),
-					stream:      counts.shift(),
-					dead:        counts.shift()
+					stream: counts.shift(),
+					dead: counts.shift(),
 				};
 				};
 			});
 			});
-
-	}
+	},
 };
 };
 
 
-module.exports = internalReport;
+export default internalReport;

+ 35 - 43
backend/internal/setting.js

@@ -1,10 +1,9 @@
-const fs            = require('fs');
-const error         = require('../lib/error');
-const settingModel  = require('../models/setting');
-const internalNginx = require('./nginx');
+import fs from "node:fs";
+import errs from "../lib/error.js";
+import settingModel from "../models/setting.js";
+import internalNginx from "./nginx.js";
 
 
 const internalSetting = {
 const internalSetting = {
-
 	/**
 	/**
 	 * @param  {Access}  access
 	 * @param  {Access}  access
 	 * @param  {Object}  data
 	 * @param  {Object}  data
@@ -12,37 +11,38 @@ const internalSetting = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	update: (access, data) => {
 	update: (access, data) => {
-		return access.can('settings:update', data.id)
+		return access
+			.can("settings:update", data.id)
 			.then((/*access_data*/) => {
 			.then((/*access_data*/) => {
-				return internalSetting.get(access, {id: data.id});
+				return internalSetting.get(access, { id: data.id });
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (row.id !== data.id) {
 				if (row.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Setting could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Setting could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
+					);
 				}
 				}
 
 
-				return settingModel
-					.query()
-					.where({id: data.id})
-					.patch(data);
+				return settingModel.query().where({ id: data.id }).patch(data);
 			})
 			})
 			.then(() => {
 			.then(() => {
 				return internalSetting.get(access, {
 				return internalSetting.get(access, {
-					id: data.id
+					id: data.id,
 				});
 				});
 			})
 			})
 			.then((row) => {
 			.then((row) => {
-				if (row.id === 'default-site') {
+				if (row.id === "default-site") {
 					// write the html if we need to
 					// write the html if we need to
-					if (row.value === 'html') {
-						fs.writeFileSync('/data/nginx/default_www/index.html', row.meta.html, {encoding: 'utf8'});
+					if (row.value === "html") {
+						fs.writeFileSync("/data/nginx/default_www/index.html", row.meta.html, { encoding: "utf8" });
 					}
 					}
 
 
 					// Configure nginx
 					// Configure nginx
-					return internalNginx.deleteConfig('default')
+					return internalNginx
+						.deleteConfig("default")
 						.then(() => {
 						.then(() => {
-							return internalNginx.generateConfig('default', row);
+							return internalNginx.generateConfig("default", row);
 						})
 						})
 						.then(() => {
 						.then(() => {
 							return internalNginx.test();
 							return internalNginx.test();
@@ -54,7 +54,8 @@ const internalSetting = {
 							return row;
 							return row;
 						})
 						})
 						.catch((/*err*/) => {
 						.catch((/*err*/) => {
-							internalNginx.deleteConfig('default')
+							internalNginx
+								.deleteConfig("default")
 								.then(() => {
 								.then(() => {
 									return internalNginx.test();
 									return internalNginx.test();
 								})
 								})
@@ -63,12 +64,11 @@ const internalSetting = {
 								})
 								})
 								.then(() => {
 								.then(() => {
 									// I'm being slack here I know..
 									// I'm being slack here I know..
-									throw new error.ValidationError('Could not reconfigure Nginx. Please check logs.');
+									throw new errs.ValidationError("Could not reconfigure Nginx. Please check logs.");
 								});
 								});
 						});
 						});
-				} else {
-					return row;
 				}
 				}
+				return row;
 			});
 			});
 	},
 	},
 
 
@@ -79,19 +79,16 @@ const internalSetting = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	get: (access, data) => {
 	get: (access, data) => {
-		return access.can('settings:get', data.id)
+		return access
+			.can("settings:get", data.id)
 			.then(() => {
 			.then(() => {
-				return settingModel
-					.query()
-					.where('id', data.id)
-					.first();
+				return settingModel.query().where("id", data.id).first();
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (row) {
 				if (row) {
 					return row;
 					return row;
-				} else {
-					throw new error.ItemNotFoundError(data.id);
 				}
 				}
+				throw new errs.ItemNotFoundError(data.id);
 			});
 			});
 	},
 	},
 
 
@@ -102,15 +99,13 @@ const internalSetting = {
 	 * @returns {*}
 	 * @returns {*}
 	 */
 	 */
 	getCount: (access) => {
 	getCount: (access) => {
-		return access.can('settings:list')
+		return access
+			.can("settings:list")
 			.then(() => {
 			.then(() => {
-				return settingModel
-					.query()
-					.count('id as count')
-					.first();
+				return settingModel.query().count("id as count").first();
 			})
 			})
 			.then((row) => {
 			.then((row) => {
-				return parseInt(row.count, 10);
+				return Number.parseInt(row.count, 10);
 			});
 			});
 	},
 	},
 
 
@@ -121,13 +116,10 @@ const internalSetting = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	getAll: (access) => {
 	getAll: (access) => {
-		return access.can('settings:list')
-			.then(() => {
-				return settingModel
-					.query()
-					.orderBy('description', 'ASC');
-			});
-	}
+		return access.can("settings:list").then(() => {
+			return settingModel.query().orderBy("description", "ASC");
+		});
+	},
 };
 };
 
 
-module.exports = internalSetting;
+export default internalSetting;

+ 156 - 153
backend/internal/stream.js

@@ -1,88 +1,85 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const streamModel         = require('../models/stream');
-const internalNginx       = require('./nginx');
-const internalAuditLog    = require('./audit-log');
-const internalCertificate = require('./certificate');
-const internalHost        = require('./host');
-const {castJsonIfNeed}    = require('../lib/helpers');
-
-function omissions () {
-	return ['is_deleted', 'owner.is_deleted', 'certificate.is_deleted'];
-}
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import streamModel from "../models/stream.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
+
+const omissions = () => {
+	return ["is_deleted", "owner.is_deleted", "certificate.is_deleted"];
+};
 
 
 const internalStream = {
 const internalStream = {
-
 	/**
 	/**
 	 * @param   {Access}  access
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	create: (access, data) => {
 	create: (access, data) => {
-		const create_certificate = data.certificate_id === 'new';
+		const create_certificate = data.certificate_id === "new";
 
 
 		if (create_certificate) {
 		if (create_certificate) {
 			delete data.certificate_id;
 			delete data.certificate_id;
 		}
 		}
 
 
-		return access.can('streams:create', data)
+		return access
+			.can("streams:create", data)
 			.then((/*access_data*/) => {
 			.then((/*access_data*/) => {
 				// TODO: At this point the existing ports should have been checked
 				// TODO: At this point the existing ports should have been checked
 				data.owner_user_id = access.token.getUserId(1);
 				data.owner_user_id = access.token.getUserId(1);
 
 
-				if (typeof data.meta === 'undefined') {
+				if (typeof data.meta === "undefined") {
 					data.meta = {};
 					data.meta = {};
 				}
 				}
 
 
 				// streams aren't routed by domain name so don't store domain names in the DB
 				// streams aren't routed by domain name so don't store domain names in the DB
-				let data_no_domains = structuredClone(data);
+				const data_no_domains = structuredClone(data);
 				delete data_no_domains.domain_names;
 				delete data_no_domains.domain_names;
 
 
-				return streamModel
-					.query()
-					.insertAndFetch(data_no_domains)
-					.then(utils.omitRow(omissions()));
+				return streamModel.query().insertAndFetch(data_no_domains).then(utils.omitRow(omissions()));
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (create_certificate) {
 				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
+					return internalCertificate
+						.createQuickCertificate(access, data)
 						.then((cert) => {
 						.then((cert) => {
 							// update host with cert id
 							// update host with cert id
 							return internalStream.update(access, {
 							return internalStream.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
+								id: row.id,
+								certificate_id: cert.id,
 							});
 							});
 						})
 						})
 						.then(() => {
 						.then(() => {
 							return row;
 							return row;
 						});
 						});
-				} else {
-					return row;
 				}
 				}
+				return row;
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				// re-fetch with cert
 				// re-fetch with cert
 				return internalStream.get(access, {
 				return internalStream.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner']
+					id: row.id,
+					expand: ["certificate", "owner"],
 				});
 				});
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				// Configure nginx
 				// Configure nginx
-				return internalNginx.configure(streamModel, 'stream', row)
-					.then(() => {
-						return row;
-					});
+				return internalNginx.configure(streamModel, "stream", row).then(() => {
+					return row;
+				});
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				// Add to audit log
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'stream',
-					object_id:   row.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "created",
+						object_type: "stream",
+						object_id: row.id,
+						meta: data,
+					})
 					.then(() => {
 					.then(() => {
 						return row;
 						return row;
 					});
 					});
@@ -96,72 +93,78 @@ const internalStream = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	update: (access, data) => {
 	update: (access, data) => {
-		const create_certificate = data.certificate_id === 'new';
+		let thisData = data;
+		const create_certificate = thisData.certificate_id === "new";
 
 
 		if (create_certificate) {
 		if (create_certificate) {
-			delete data.certificate_id;
+			delete thisData.certificate_id;
 		}
 		}
 
 
-		return access.can('streams:update', data.id)
+		return access
+			.can("streams:update", thisData.id)
 			.then((/*access_data*/) => {
 			.then((/*access_data*/) => {
 				// TODO: at this point the existing streams should have been checked
 				// TODO: at this point the existing streams should have been checked
-				return internalStream.get(access, {id: data.id});
+				return internalStream.get(access, { id: thisData.id });
 			})
 			})
 			.then((row) => {
 			.then((row) => {
-				if (row.id !== data.id) {
+				if (row.id !== thisData.id) {
 					// Sanity check that something crazy hasn't happened
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Stream could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Stream could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
+					);
 				}
 				}
 
 
 				if (create_certificate) {
 				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
+					return internalCertificate
+						.createQuickCertificate(access, {
+							domain_names: thisData.domain_names || row.domain_names,
+							meta: _.assign({}, row.meta, thisData.meta),
+						})
 						.then((cert) => {
 						.then((cert) => {
 							// update host with cert id
 							// update host with cert id
-							data.certificate_id = cert.id;
+							thisData.certificate_id = cert.id;
 						})
 						})
 						.then(() => {
 						.then(() => {
 							return row;
 							return row;
 						});
 						});
-				} else {
-					return row;
 				}
 				}
+				return row;
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
+				thisData = _.assign(
+					{},
+					{
+						domain_names: row.domain_names,
+					},
+					thisData,
+				);
 
 
 				return streamModel
 				return streamModel
 					.query()
 					.query()
-					.patchAndFetchById(row.id, data)
+					.patchAndFetchById(row.id, thisData)
 					.then(utils.omitRow(omissions()))
 					.then(utils.omitRow(omissions()))
 					.then((saved_row) => {
 					.then((saved_row) => {
 						// Add to audit log
 						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'stream',
-							object_id:   row.id,
-							meta:        data
-						})
+						return internalAuditLog
+							.add(access, {
+								action: "updated",
+								object_type: "stream",
+								object_id: row.id,
+								meta: thisData,
+							})
 							.then(() => {
 							.then(() => {
 								return saved_row;
 								return saved_row;
 							});
 							});
 					});
 					});
 			})
 			})
 			.then(() => {
 			.then(() => {
-				return internalStream.get(access, {id: data.id, expand: ['owner', 'certificate']})
-					.then((row) => {
-						return internalNginx.configure(streamModel, 'stream', row)
-							.then((new_meta) => {
-								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
-							});
+				return internalStream.get(access, { id: thisData.id, expand: ["owner", "certificate"] }).then((row) => {
+					return internalNginx.configure(streamModel, "stream", row).then((new_meta) => {
+						row.meta = new_meta;
+						return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
 					});
 					});
+				});
 			});
 			});
 	},
 	},
 
 
@@ -174,39 +177,39 @@ const internalStream = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	get: (access, data) => {
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
 
-		return access.can('streams:get', data.id)
+		return access
+			.can("streams:get", thisData.id)
 			.then((access_data) => {
 			.then((access_data) => {
-				let query = streamModel
+				const query = streamModel
 					.query()
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,certificate]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[owner,certificate]")
 					.first();
 					.first();
 
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 				}
 
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 				}
 
 
 				return query.then(utils.omitRow(omissions()));
 				return query.then(utils.omitRow(omissions()));
 			})
 			})
 			.then((row) => {
 			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+				let thisRow = row;
+				if (!thisRow || !thisRow.id) {
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
 				}
-				row = internalHost.cleanRowCertificateMeta(row);
+				thisRow = internalHost.cleanRowCertificateMeta(thisRow);
 				// Custom omissions
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(thisRow, thisData.omit);
 				}
 				}
-				return row;
+				return thisRow;
 			});
 			});
 	},
 	},
 
 
@@ -218,35 +221,35 @@ const internalStream = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	delete: (access, data) => {
 	delete: (access, data) => {
-		return access.can('streams:delete', data.id)
+		return access
+			.can("streams:delete", data.id)
 			.then(() => {
 			.then(() => {
-				return internalStream.get(access, {id: data.id});
+				return internalStream.get(access, { id: data.id });
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (!row || !row.id) {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 				}
 
 
 				return streamModel
 				return streamModel
 					.query()
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Delete Nginx Config
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('stream', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("stream", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Add to audit log
 						// Add to audit log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'stream',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "deleted",
+							object_type: "stream",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 						});
 					});
 					});
 			})
 			})
@@ -263,39 +266,41 @@ const internalStream = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	enable: (access, data) => {
 	enable: (access, data) => {
-		return access.can('streams:update', data.id)
+		return access
+			.can("streams:update", data.id)
 			.then(() => {
 			.then(() => {
 				return internalStream.get(access, {
 				return internalStream.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner']
+					id: data.id,
+					expand: ["certificate", "owner"],
 				});
 				});
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (!row || !row.id) {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Stream is already enabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (row.enabled) {
+					throw new errs.ValidationError("Stream is already enabled");
 				}
 				}
 
 
 				row.enabled = 1;
 				row.enabled = 1;
 
 
 				return streamModel
 				return streamModel
 					.query()
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
 					.patch({
-						enabled: 1
+						enabled: 1,
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Configure nginx
 						// Configure nginx
-						return internalNginx.configure(streamModel, 'stream', row);
+						return internalNginx.configure(streamModel, "stream", row);
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Add to audit log
 						// Add to audit log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'stream',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "enabled",
+							object_type: "stream",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 						});
 					});
 					});
 			})
 			})
@@ -312,39 +317,40 @@ const internalStream = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	disable: (access, data) => {
 	disable: (access, data) => {
-		return access.can('streams:update', data.id)
+		return access
+			.can("streams:update", data.id)
 			.then(() => {
 			.then(() => {
-				return internalStream.get(access, {id: data.id});
+				return internalStream.get(access, { id: data.id });
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (!row || !row.id) {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Stream is already disabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (!row.enabled) {
+					throw new errs.ValidationError("Stream is already disabled");
 				}
 				}
 
 
 				row.enabled = 0;
 				row.enabled = 0;
 
 
 				return streamModel
 				return streamModel
 					.query()
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
 					.patch({
-						enabled: 0
+						enabled: 0,
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Delete Nginx Config
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('stream', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("stream", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Add to audit log
 						// Add to audit log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'stream-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "disabled",
+							object_type: "stream",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 						});
 					});
 					});
 			})
 			})
@@ -362,34 +368,35 @@ const internalStream = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	getAll: (access, expand, search_query) => {
 	getAll: (access, expand, search_query) => {
-		return access.can('streams:list')
+		return access
+			.can("streams:list")
 			.then((access_data) => {
 			.then((access_data) => {
 				const query = streamModel
 				const query = streamModel
 					.query()
 					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,certificate]')
-					.orderBy('incoming_port', 'ASC');
+					.where("is_deleted", 0)
+					.groupBy("id")
+					.allowGraph("[owner,certificate]")
+					.orderBy("incoming_port", "ASC");
 
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 				}
 
 
 				// Query is used for searching
 				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
+				if (typeof search_query === "string" && search_query.length > 0) {
 					query.where(function () {
 					query.where(function () {
-						this.where(castJsonIfNeed('incoming_port'), 'like', `%${search_query}%`);
+						this.where(castJsonIfNeed("incoming_port"), "like", `%${search_query}%`);
 					});
 					});
 				}
 				}
 
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
+				if (typeof expand !== "undefined" && expand !== null) {
+					query.withGraphFetched(`[${expand.join(", ")}]`);
 				}
 				}
 
 
 				return query.then(utils.omitRows(omissions()));
 				return query.then(utils.omitRows(omissions()));
 			})
 			})
 			.then((rows) => {
 			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
+				if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
 					return internalHost.cleanAllRowsCertificateMeta(rows);
 					return internalHost.cleanAllRowsCertificateMeta(rows);
 				}
 				}
 
 
@@ -405,20 +412,16 @@ const internalStream = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	getCount: (user_id, visibility) => {
 	getCount: (user_id, visibility) => {
-		const query = streamModel
-			.query()
-			.count('id AS count')
-			.where('is_deleted', 0);
+		const query = streamModel.query().count("id AS count").where("is_deleted", 0);
 
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 		}
 
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		return query.first().then((row) => {
+			return Number.parseInt(row.count, 10);
+		});
+	},
 };
 };
 
 
-module.exports = internalStream;
+export default internalStream;

+ 108 - 116
backend/internal/token.js

@@ -1,14 +1,14 @@
-const _          = require('lodash');
-const error      = require('../lib/error');
-const userModel  = require('../models/user');
-const authModel  = require('../models/auth');
-const helpers    = require('../lib/helpers');
-const TokenModel = require('../models/token');
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { parseDatePeriod } from "../lib/helpers.js";
+import authModel from "../models/auth.js";
+import TokenModel from "../models/token.js";
+import userModel from "../models/user.js";
 
 
-const ERROR_MESSAGE_INVALID_AUTH = 'Invalid email or password';
-
-module.exports = {
+const ERROR_MESSAGE_INVALID_AUTH = "Invalid email or password";
+const ERROR_MESSAGE_INVALID_AUTH_I18N = "error.invalid-auth";
 
 
+export default {
 	/**
 	/**
 	 * @param   {Object} data
 	 * @param   {Object} data
 	 * @param   {String} data.identity
 	 * @param   {String} data.identity
@@ -18,70 +18,66 @@ module.exports = {
 	 * @param   {String} [issuer]
 	 * @param   {String} [issuer]
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	getTokenFromEmail: (data, issuer) => {
-		let Token = new TokenModel();
+	getTokenFromEmail: async (data, issuer) => {
+		const Token = TokenModel();
 
 
-		data.scope  = data.scope || 'user';
-		data.expiry = data.expiry || '1d';
+		data.scope = data.scope || "user";
+		data.expiry = data.expiry || "1d";
 
 
-		return userModel
+		const user = await userModel
 			.query()
 			.query()
-			.where('email', data.identity.toLowerCase().trim())
-			.andWhere('is_deleted', 0)
-			.andWhere('is_disabled', 0)
-			.first()
-			.then((user) => {
-				if (user) {
-					// Get auth
-					return authModel
-						.query()
-						.where('user_id', '=', user.id)
-						.where('type', '=', 'password')
-						.first()
-						.then((auth) => {
-							if (auth) {
-								return auth.verifyPassword(data.secret)
-									.then((valid) => {
-										if (valid) {
-
-											if (data.scope !== 'user' && _.indexOf(user.roles, data.scope) === -1) {
-												// The scope requested doesn't exist as a role against the user,
-												// you shall not pass.
-												throw new error.AuthError('Invalid scope: ' + data.scope);
-											}
-
-											// Create a moment of the expiry expression
-											let expiry = helpers.parseDatePeriod(data.expiry);
-											if (expiry === null) {
-												throw new error.AuthError('Invalid expiry time: ' + data.expiry);
-											}
-
-											return Token.create({
-												iss:   issuer || 'api',
-												attrs: {
-													id: user.id
-												},
-												scope:     [data.scope],
-												expiresIn: data.expiry
-											})
-												.then((signed) => {
-													return {
-														token:   signed.token,
-														expires: expiry.toISOString()
-													};
-												});
-										} else {
-											throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
-										}
-									});
-							} else {
-								throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
-							}
-						});
-				} else {
-					throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
-				}
-			});
+			.where("email", data.identity.toLowerCase().trim())
+			.andWhere("is_deleted", 0)
+			.andWhere("is_disabled", 0)
+			.first();
+
+		if (!user) {
+			throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
+		}
+
+		const auth = await authModel
+			.query()
+			.where("user_id", "=", user.id)
+			.where("type", "=", "password")
+			.first();
+
+		if (!auth) {
+			throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
+		}
+
+		const valid = await auth.verifyPassword(data.secret);
+		if (!valid) {
+			throw new errs.AuthError(
+				ERROR_MESSAGE_INVALID_AUTH,
+				ERROR_MESSAGE_INVALID_AUTH_I18N,
+			);
+		}
+
+		if (data.scope !== "user" && _.indexOf(user.roles, data.scope) === -1) {
+			// The scope requested doesn't exist as a role against the user,
+			// you shall not pass.
+			throw new errs.AuthError(`Invalid scope: ${data.scope}`);
+		}
+
+		// Create a moment of the expiry expression
+		const expiry = parseDatePeriod(data.expiry);
+		if (expiry === null) {
+			throw new errs.AuthError(`Invalid expiry time: ${data.expiry}`);
+		}
+
+		const signed = await Token.create({
+			iss: issuer || "api",
+			attrs: {
+				id: user.id,
+			},
+			scope: [data.scope],
+			expiresIn: data.expiry,
+		});
+
+		return {
+			token: signed.token,
+			expires: expiry.toISOString(),
+		};
 	},
 	},
 
 
 	/**
 	/**
@@ -91,74 +87,70 @@ module.exports = {
 	 * @param {String} [data.scope]   Only considered if existing token scope is admin
 	 * @param {String} [data.scope]   Only considered if existing token scope is admin
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	getFreshToken: (access, data) => {
-		let Token = new TokenModel();
+	getFreshToken: async (access, data) => {
+		const Token = TokenModel();
+		const thisData = data || {};
 
 
-		data        = data || {};
-		data.expiry = data.expiry || '1d';
-
-		if (access && access.token.getUserId(0)) {
+		thisData.expiry = thisData.expiry || "1d";
 
 
+		if (access?.token.getUserId(0)) {
 			// Create a moment of the expiry expression
 			// Create a moment of the expiry expression
-			let expiry = helpers.parseDatePeriod(data.expiry);
+			const expiry = parseDatePeriod(thisData.expiry);
 			if (expiry === null) {
 			if (expiry === null) {
-				throw new error.AuthError('Invalid expiry time: ' + data.expiry);
+				throw new errs.AuthError(`Invalid expiry time: ${thisData.expiry}`);
 			}
 			}
 
 
-			let token_attrs = {
-				id: access.token.getUserId(0)
+			const token_attrs = {
+				id: access.token.getUserId(0),
 			};
 			};
 
 
 			// Only admins can request otherwise scoped tokens
 			// Only admins can request otherwise scoped tokens
-			let scope = access.token.get('scope');
-			if (data.scope && access.token.hasScope('admin')) {
-				scope = [data.scope];
+			let scope = access.token.get("scope");
+			if (thisData.scope && access.token.hasScope("admin")) {
+				scope = [thisData.scope];
 
 
-				if (data.scope === 'job-board' || data.scope === 'worker') {
+				if (thisData.scope === "job-board" || thisData.scope === "worker") {
 					token_attrs.id = 0;
 					token_attrs.id = 0;
 				}
 				}
 			}
 			}
 
 
-			return Token.create({
-				iss:       'api',
-				scope:     scope,
-				attrs:     token_attrs,
-				expiresIn: data.expiry
-			})
-				.then((signed) => {
-					return {
-						token:   signed.token,
-						expires: expiry.toISOString()
-					};
-				});
-		} else {
-			throw new error.AssertionFailedError('Existing token contained invalid user data');
+			const signed = await Token.create({
+				iss: "api",
+				scope: scope,
+				attrs: token_attrs,
+				expiresIn: thisData.expiry,
+			});
+
+			return {
+				token: signed.token,
+				expires: expiry.toISOString(),
+			};
 		}
 		}
+		throw new error.AssertionFailedError("Existing token contained invalid user data");
 	},
 	},
 
 
 	/**
 	/**
 	 * @param   {Object} user
 	 * @param   {Object} user
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	getTokenFromUser: (user) => {
-		const expire = '1d';
-		const Token  = new TokenModel();
-		const expiry = helpers.parseDatePeriod(expire);
+	getTokenFromUser: async (user) => {
+		const expire = "1d";
+		const Token = TokenModel();
+		const expiry = parseDatePeriod(expire);
 
 
-		return Token.create({
-			iss:   'api',
+		const signed = await Token.create({
+			iss: "api",
 			attrs: {
 			attrs: {
-				id: user.id
+				id: user.id,
 			},
 			},
-			scope:     ['user'],
-			expiresIn: expire
-		})
-			.then((signed) => {
-				return {
-					token:   signed.token,
-					expires: expiry.toISOString(),
-					user:    user
-				};
-			});
-	}
+			scope: ["user"],
+			expiresIn: expire,
+		});
+
+		return {
+			token: signed.token,
+			expires: expiry.toISOString(),
+			user: user,
+		};
+	},
 };
 };

+ 222 - 241
backend/internal/user.js

@@ -1,93 +1,76 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const userModel           = require('../models/user');
-const userPermissionModel = require('../models/user_permission');
-const authModel           = require('../models/auth');
-const gravatar            = require('gravatar');
-const internalToken       = require('./token');
-const internalAuditLog    = require('./audit-log');
-
-function omissions () {
-	return ['is_deleted'];
-}
+import gravatar from "gravatar";
+import _ from "lodash";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import authModel from "../models/auth.js";
+import userModel from "../models/user.js";
+import userPermissionModel from "../models/user_permission.js";
+import internalAuditLog from "./audit-log.js";
+import internalToken from "./token.js";
+
+const omissions = () => {
+	return ["is_deleted", "permissions.id", "permissions.user_id", "permissions.created_on", "permissions.modified_on"];
+};
 
 
-const internalUser = {
+const DEFAULT_AVATAR = gravatar.url("[email protected]", { default: "mm" });
 
 
+const internalUser = {
 	/**
 	/**
+	 * Create a user can happen unauthenticated only once and only when no active users exist.
+	 * Otherwise, a valid auth method is required.
+	 *
 	 * @param   {Access}  access
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	create: (access, data) => {
-		let auth = data.auth || null;
+	create: async (access, data) => {
+		const auth = data.auth || null;
 		delete data.auth;
 		delete data.auth;
 
 
-		data.avatar = data.avatar || '';
-		data.roles  = data.roles || [];
+		data.avatar = data.avatar || "";
+		data.roles = data.roles || [];
 
 
-		if (typeof data.is_disabled !== 'undefined') {
+		if (typeof data.is_disabled !== "undefined") {
 			data.is_disabled = data.is_disabled ? 1 : 0;
 			data.is_disabled = data.is_disabled ? 1 : 0;
 		}
 		}
 
 
-		return access.can('users:create', data)
-			.then(() => {
-				data.avatar = gravatar.url(data.email, {default: 'mm'});
-
-				return userModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
-			})
-			.then((user) => {
-				if (auth) {
-					return authModel
-						.query()
-						.insert({
-							user_id: user.id,
-							type:    auth.type,
-							secret:  auth.secret,
-							meta:    {}
-						})
-						.then(() => {
-							return user;
-						});
-				} else {
-					return user;
-				}
-			})
-			.then((user) => {
-				// Create permissions row as well
-				let is_admin = data.roles.indexOf('admin') !== -1;
+		await access.can("users:create", data);
+		data.avatar = gravatar.url(data.email, { default: "mm" });
 
 
-				return userPermissionModel
-					.query()
-					.insert({
-						user_id:           user.id,
-						visibility:        is_admin ? 'all' : 'user',
-						proxy_hosts:       'manage',
-						redirection_hosts: 'manage',
-						dead_hosts:        'manage',
-						streams:           'manage',
-						access_lists:      'manage',
-						certificates:      'manage'
-					})
-					.then(() => {
-						return internalUser.get(access, {id: user.id, expand: ['permissions']});
-					});
-			})
-			.then((user) => {
-				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'user',
-					object_id:   user.id,
-					meta:        user
-				})
-					.then(() => {
-						return user;
-					});
+		let user = await userModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
+		if (auth) {
+			user = await authModel.query().insert({
+				user_id: user.id,
+				type: auth.type,
+				secret: auth.secret,
+				meta: {},
 			});
 			});
+		}
+
+		// Create permissions row as well
+		const isAdmin = data.roles.indexOf("admin") !== -1;
+
+		await userPermissionModel.query().insert({
+			user_id: user.id,
+			visibility: isAdmin ? "all" : "user",
+			proxy_hosts: "manage",
+			redirection_hosts: "manage",
+			dead_hosts: "manage",
+			streams: "manage",
+			access_lists: "manage",
+			certificates: "manage",
+		});
+
+		user = await internalUser.get(access, { id: user.id, expand: ["permissions"] });
+
+		await internalAuditLog.add(access, {
+			action: "created",
+			object_type: "user",
+			object_id: user.id,
+			meta: user,
+		});
+
+		return user;
 	},
 	},
 
 
 	/**
 	/**
@@ -99,62 +82,57 @@ const internalUser = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	update: (access, data) => {
 	update: (access, data) => {
-		if (typeof data.is_disabled !== 'undefined') {
+		if (typeof data.is_disabled !== "undefined") {
 			data.is_disabled = data.is_disabled ? 1 : 0;
 			data.is_disabled = data.is_disabled ? 1 : 0;
 		}
 		}
 
 
-		return access.can('users:update', data.id)
+		return access
+			.can("users:update", data.id)
 			.then(() => {
 			.then(() => {
-
 				// Make sure that the user being updated doesn't change their email to another user that is already using it
 				// Make sure that the user being updated doesn't change their email to another user that is already using it
 				// 1. get user we want to update
 				// 1. get user we want to update
-				return internalUser.get(access, {id: data.id})
-					.then((user) => {
-
-						// 2. if email is to be changed, find other users with that email
-						if (typeof data.email !== 'undefined') {
-							data.email = data.email.toLowerCase().trim();
-
-							if (user.email !== data.email) {
-								return internalUser.isEmailAvailable(data.email, data.id)
-									.then((available) => {
-										if (!available) {
-											throw new error.ValidationError('Email address already in use - ' + data.email);
-										}
-
-										return user;
-									});
-							}
+				return internalUser.get(access, { id: data.id }).then((user) => {
+					// 2. if email is to be changed, find other users with that email
+					if (typeof data.email !== "undefined") {
+						data.email = data.email.toLowerCase().trim();
+
+						if (user.email !== data.email) {
+							return internalUser.isEmailAvailable(data.email, data.id).then((available) => {
+								if (!available) {
+									throw new errs.ValidationError(`Email address already in use - ${data.email}`);
+								}
+								return user;
+							});
 						}
 						}
+					}
 
 
-						// No change to email:
-						return user;
-					});
+					// No change to email:
+					return user;
+				});
 			})
 			})
 			.then((user) => {
 			.then((user) => {
 				if (user.id !== data.id) {
 				if (user.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
+					);
 				}
 				}
 
 
-				data.avatar = gravatar.url(data.email || user.email, {default: 'mm'});
-
-				return userModel
-					.query()
-					.patchAndFetchById(user.id, data)
-					.then(utils.omitRow(omissions()));
+				data.avatar = gravatar.url(data.email || user.email, { default: "mm" });
+				return userModel.query().patchAndFetchById(user.id, data).then(utils.omitRow(omissions()));
 			})
 			})
 			.then(() => {
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			})
 			.then((user) => {
 			.then((user) => {
 				// Add to audit log
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'updated',
-					object_type: 'user',
-					object_id:   user.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "updated",
+						object_type: "user",
+						object_id: user.id,
+						meta: { ...data, id: user.id, name: user.name },
+					})
 					.then(() => {
 					.then(() => {
 						return user;
 						return user;
 					});
 					});
@@ -170,37 +148,41 @@ const internalUser = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	get: (access, data) => {
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
 
-		if (typeof data.id === 'undefined' || !data.id) {
-			data.id = access.token.getUserId(0);
+		if (typeof thisData.id === "undefined" || !thisData.id) {
+			thisData.id = access.token.getUserId(0);
 		}
 		}
 
 
-		return access.can('users:get', data.id)
+		return access
+			.can("users:get", thisData.id)
 			.then(() => {
 			.then(() => {
-				let query = userModel
+				const query = userModel
 					.query()
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[permissions]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[permissions]")
 					.first();
 					.first();
 
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 				}
 
 
 				return query.then(utils.omitRow(omissions()));
 				return query.then(utils.omitRow(omissions()));
 			})
 			})
 			.then((row) => {
 			.then((row) => {
 				if (!row || !row.id) {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
 				}
 				// Custom omissions
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(row, thisData.omit);
+				}
+
+				if (row.avatar === "") {
+					row.avatar = DEFAULT_AVATAR;
 				}
 				}
+
 				return row;
 				return row;
 			});
 			});
 	},
 	},
@@ -213,20 +195,15 @@ const internalUser = {
 	 * @param user_id
 	 * @param user_id
 	 */
 	 */
 	isEmailAvailable: (email, user_id) => {
 	isEmailAvailable: (email, user_id) => {
-		let query = userModel
-			.query()
-			.where('email', '=', email.toLowerCase().trim())
-			.where('is_deleted', 0)
-			.first();
+		const query = userModel.query().where("email", "=", email.toLowerCase().trim()).where("is_deleted", 0).first();
 
 
-		if (typeof user_id !== 'undefined') {
-			query.where('id', '!=', user_id);
+		if (typeof user_id !== "undefined") {
+			query.where("id", "!=", user_id);
 		}
 		}
 
 
-		return query
-			.then((user) => {
-				return !user;
-			});
+		return query.then((user) => {
+			return !user;
+		});
 	},
 	},
 
 
 	/**
 	/**
@@ -237,33 +214,34 @@ const internalUser = {
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
 	delete: (access, data) => {
 	delete: (access, data) => {
-		return access.can('users:delete', data.id)
+		return access
+			.can("users:delete", data.id)
 			.then(() => {
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			})
 			.then((user) => {
 			.then((user) => {
 				if (!user) {
 				if (!user) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 				}
 
 
 				// Make sure user can't delete themselves
 				// Make sure user can't delete themselves
 				if (user.id === access.token.getUserId(0)) {
 				if (user.id === access.token.getUserId(0)) {
-					throw new error.PermissionError('You cannot delete yourself.');
+					throw new errs.PermissionError("You cannot delete yourself.");
 				}
 				}
 
 
 				return userModel
 				return userModel
 					.query()
 					.query()
-					.where('id', user.id)
+					.where("id", user.id)
 					.patch({
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Add to audit log
 						// Add to audit log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'user',
-							object_id:   user.id,
-							meta:        _.omit(user, omissions())
+							action: "deleted",
+							object_type: "user",
+							object_id: user.id,
+							meta: _.omit(user, omissions()),
 						});
 						});
 					});
 					});
 			})
 			})
@@ -272,6 +250,14 @@ const internalUser = {
 			});
 			});
 	},
 	},
 
 
+	deleteAll: async () => {
+		await userModel
+			.query()
+			.patch({
+				is_deleted: 1,
+			});
+	},
+
 	/**
 	/**
 	 * This will only count the users
 	 * This will only count the users
 	 *
 	 *
@@ -280,26 +266,26 @@ const internalUser = {
 	 * @returns {*}
 	 * @returns {*}
 	 */
 	 */
 	getCount: (access, search_query) => {
 	getCount: (access, search_query) => {
-		return access.can('users:list')
+		return access
+			.can("users:list")
 			.then(() => {
 			.then(() => {
-				let query = userModel
-					.query()
-					.count('id as count')
-					.where('is_deleted', 0)
-					.first();
+				const query = userModel.query().count("id as count").where("is_deleted", 0).first();
 
 
 				// Query is used for searching
 				// Query is used for searching
-				if (typeof search_query === 'string') {
+				if (typeof search_query === "string") {
 					query.where(function () {
 					query.where(function () {
-						this.where('user.name', 'like', '%' + search_query + '%')
-							.orWhere('user.email', 'like', '%' + search_query + '%');
+						this.where("user.name", "like", `%${search_query}%`).orWhere(
+							"user.email",
+							"like",
+							`%${search_query}%`,
+						);
 					});
 					});
 				}
 				}
 
 
 				return query;
 				return query;
 			})
 			})
 			.then((row) => {
 			.then((row) => {
-				return parseInt(row.count, 10);
+				return Number.parseInt(row.count, 10);
 			});
 			});
 	},
 	},
 
 
@@ -311,30 +297,28 @@ const internalUser = {
 	 * @param   {String}  [search_query]
 	 * @param   {String}  [search_query]
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	getAll: (access, expand, search_query) => {
-		return access.can('users:list')
-			.then(() => {
-				let query = userModel
-					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[permissions]')
-					.orderBy('name', 'ASC');
-
-				// Query is used for searching
-				if (typeof search_query === 'string') {
-					query.where(function () {
-						this.where('name', 'like', '%' + search_query + '%')
-							.orWhere('email', 'like', '%' + search_query + '%');
-					});
-				}
+	getAll: async (access, expand, search_query) => {
+		await access.can("users:list");
+		const query = userModel
+			.query()
+			.where("is_deleted", 0)
+			.groupBy("id")
+			.allowGraph("[permissions]")
+			.orderBy("name", "ASC");
+
+		// Query is used for searching
+		if (typeof search_query === "string") {
+			query.where(function () {
+				this.where("name", "like", `%${search_query}%`).orWhere("email", "like", `%${search_query}%`);
+			});
+		}
 
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
-				}
+		if (typeof expand !== "undefined" && expand !== null) {
+			query.withGraphFetched(`[${expand.join(", ")}]`);
+		}
 
 
-				return query.then(utils.omitRows(omissions()));
-			});
+		const res = await query;
+		return utils.omitRows(omissions())(res);
 	},
 	},
 
 
 	/**
 	/**
@@ -342,11 +326,11 @@ const internalUser = {
 	 * @param   {Integer} [id_requested]
 	 * @param   {Integer} [id_requested]
 	 * @returns {[String]}
 	 * @returns {[String]}
 	 */
 	 */
-	getUserOmisionsByAccess: (access, id_requested) => {
+	getUserOmisionsByAccess: (access, idRequested) => {
 		let response = []; // Admin response
 		let response = []; // Admin response
 
 
-		if (!access.token.hasScope('admin') && access.token.getUserId(0) !== id_requested) {
-			response = ['roles', 'is_deleted']; // Restricted response
+		if (!access.token.hasScope("admin") && access.token.getUserId(0) !== idRequested) {
+			response = ["is_deleted"]; // Restricted response
 		}
 		}
 
 
 		return response;
 		return response;
@@ -361,26 +345,30 @@ const internalUser = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	setPassword: (access, data) => {
 	setPassword: (access, data) => {
-		return access.can('users:password', data.id)
+		return access
+			.can("users:password", data.id)
 			.then(() => {
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			})
 			.then((user) => {
 			.then((user) => {
 				if (user.id !== data.id) {
 				if (user.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
+					);
 				}
 				}
 
 
 				if (user.id === access.token.getUserId(0)) {
 				if (user.id === access.token.getUserId(0)) {
 					// they're setting their own password. Make sure their current password is correct
 					// they're setting their own password. Make sure their current password is correct
-					if (typeof data.current === 'undefined' || !data.current) {
-						throw new error.ValidationError('Current password was not supplied');
+					if (typeof data.current === "undefined" || !data.current) {
+						throw new errs.ValidationError("Current password was not supplied");
 					}
 					}
 
 
-					return internalToken.getTokenFromEmail({
-						identity: user.email,
-						secret:   data.current
-					})
+					return internalToken
+						.getTokenFromEmail({
+							identity: user.email,
+							secret: data.current,
+						})
 						.then(() => {
 						.then(() => {
 							return user;
 							return user;
 						});
 						});
@@ -392,43 +380,36 @@ const internalUser = {
 				// Get auth, patch if it exists
 				// Get auth, patch if it exists
 				return authModel
 				return authModel
 					.query()
 					.query()
-					.where('user_id', user.id)
-					.andWhere('type', data.type)
+					.where("user_id", user.id)
+					.andWhere("type", data.type)
 					.first()
 					.first()
 					.then((existing_auth) => {
 					.then((existing_auth) => {
 						if (existing_auth) {
 						if (existing_auth) {
 							// patch
 							// patch
-							return authModel
-								.query()
-								.where('user_id', user.id)
-								.andWhere('type', data.type)
-								.patch({
-									type:   data.type, // This is required for the model to encrypt on save
-									secret: data.secret
-								});
-						} else {
-							// insert
-							return authModel
-								.query()
-								.insert({
-									user_id: user.id,
-									type:    data.type,
-									secret:  data.secret,
-									meta:    {}
-								});
+							return authModel.query().where("user_id", user.id).andWhere("type", data.type).patch({
+								type: data.type, // This is required for the model to encrypt on save
+								secret: data.secret,
+							});
 						}
 						}
+						// insert
+						return authModel.query().insert({
+							user_id: user.id,
+							type: data.type,
+							secret: data.secret,
+							meta: {},
+						});
 					})
 					})
 					.then(() => {
 					.then(() => {
 						// Add to Audit Log
 						// Add to Audit Log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'user',
-							object_id:   user.id,
-							meta:        {
-								name:             user.name,
+							action: "updated",
+							object_type: "user",
+							object_id: user.id,
+							meta: {
+								name: user.name,
 								password_changed: true,
 								password_changed: true,
-								auth_type:        data.type
-							}
+								auth_type: data.type,
+							},
 						});
 						});
 					});
 					});
 			})
 			})
@@ -443,14 +424,17 @@ const internalUser = {
 	 * @return {Promise}
 	 * @return {Promise}
 	 */
 	 */
 	setPermissions: (access, data) => {
 	setPermissions: (access, data) => {
-		return access.can('users:permissions', data.id)
+		return access
+			.can("users:permissions", data.id)
 			.then(() => {
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			})
 			.then((user) => {
 			.then((user) => {
 				if (user.id !== data.id) {
 				if (user.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
+					);
 				}
 				}
 
 
 				return user;
 				return user;
@@ -459,34 +443,30 @@ const internalUser = {
 				// Get perms row, patch if it exists
 				// Get perms row, patch if it exists
 				return userPermissionModel
 				return userPermissionModel
 					.query()
 					.query()
-					.where('user_id', user.id)
+					.where("user_id", user.id)
 					.first()
 					.first()
 					.then((existing_auth) => {
 					.then((existing_auth) => {
 						if (existing_auth) {
 						if (existing_auth) {
 							// patch
 							// patch
 							return userPermissionModel
 							return userPermissionModel
 								.query()
 								.query()
-								.where('user_id', user.id)
-								.patchAndFetchById(existing_auth.id, _.assign({user_id: user.id}, data));
-						} else {
-							// insert
-							return userPermissionModel
-								.query()
-								.insertAndFetch(_.assign({user_id: user.id}, data));
+								.where("user_id", user.id)
+								.patchAndFetchById(existing_auth.id, _.assign({ user_id: user.id }, data));
 						}
 						}
+						// insert
+						return userPermissionModel.query().insertAndFetch(_.assign({ user_id: user.id }, data));
 					})
 					})
 					.then((permissions) => {
 					.then((permissions) => {
 						// Add to Audit Log
 						// Add to Audit Log
 						return internalAuditLog.add(access, {
 						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'user',
-							object_id:   user.id,
-							meta:        {
-								name:        user.name,
-								permissions: permissions
-							}
+							action: "updated",
+							object_type: "user",
+							object_id: user.id,
+							meta: {
+								name: user.name,
+								permissions: permissions,
+							},
 						});
 						});
-
 					});
 					});
 			})
 			})
 			.then(() => {
 			.then(() => {
@@ -500,14 +480,15 @@ const internalUser = {
 	 * @param {Integer}  data.id
 	 * @param {Integer}  data.id
 	 */
 	 */
 	loginAs: (access, data) => {
 	loginAs: (access, data) => {
-		return access.can('users:loginas', data.id)
+		return access
+			.can("users:loginas", data.id)
 			.then(() => {
 			.then(() => {
 				return internalUser.get(access, data);
 				return internalUser.get(access, data);
 			})
 			})
 			.then((user) => {
 			.then((user) => {
 				return internalToken.getTokenFromUser(user);
 				return internalToken.getTokenFromUser(user);
 			});
 			});
-	}
+	},
 };
 };
 
 
-module.exports = internalUser;
+export default internalUser;

+ 211 - 240
backend/lib/access.js

@@ -4,91 +4,90 @@
  * "scope" in this file means "where did this token come from and what is using it", so 99% of the time
  * "scope" in this file means "where did this token come from and what is using it", so 99% of the time
  * the "scope" is going to be "user" because it would be a user token. This is not to be confused with
  * the "scope" is going to be "user" because it would be a user token. This is not to be confused with
  * the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else.
  * the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else.
- *
- *
  */
  */
 
 
-const _              = require('lodash');
-const logger         = require('../logger').access;
-const Ajv            = require('ajv/dist/2020');
-const error          = require('./error');
-const userModel      = require('../models/user');
-const proxyHostModel = require('../models/proxy_host');
-const TokenModel     = require('../models/token');
-const roleSchema     = require('./access/roles.json');
-const permsSchema    = require('./access/permissions.json');
-
-module.exports = function (token_string) {
-	let Token                 = new TokenModel();
-	let token_data            = null;
-	let initialised           = false;
-	let object_cache          = {};
-	let allow_internal_access = false;
-	let user_roles            = [];
-	let permissions           = {};
+import fs from "node:fs";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import Ajv from "ajv/dist/2020.js";
+import _ from "lodash";
+import { access as logger } from "../logger.js";
+import proxyHostModel from "../models/proxy_host.js";
+import TokenModel from "../models/token.js";
+import userModel from "../models/user.js";
+import permsSchema from "./access/permissions.json" with { type: "json" };
+import roleSchema from "./access/roles.json" with { type: "json" };
+import errs from "./error.js";
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
+
+export default function (tokenString) {
+	const Token = TokenModel();
+	let tokenData = null;
+	let initialised = false;
+	const objectCache = {};
+	let allowInternalAccess = false;
+	let userRoles = [];
+	let permissions = {};
 
 
 	/**
 	/**
 	 * Loads the Token object from the token string
 	 * Loads the Token object from the token string
 	 *
 	 *
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	this.init = () => {
-		return new Promise((resolve, reject) => {
-			if (initialised) {
-				resolve();
-			} else if (!token_string) {
-				reject(new error.PermissionError('Permission Denied'));
+	this.init = async () => {
+		if (initialised) {
+			return;
+		}
+
+		if (!tokenString) {
+			throw new errs.PermissionError("Permission Denied");
+		}
+
+		tokenData = await Token.load(tokenString);
+
+		// At this point we need to load the user from the DB and make sure they:
+		// - exist (and not soft deleted)
+		// - still have the appropriate scopes for this token
+		// This is only required when the User ID is supplied or if the token scope has `user`
+		if (
+			tokenData.attrs.id ||
+			(typeof tokenData.scope !== "undefined" && _.indexOf(tokenData.scope, "user") !== -1)
+		) {
+			// Has token user id or token user scope
+			const user = await userModel
+				.query()
+				.where("id", tokenData.attrs.id)
+				.andWhere("is_deleted", 0)
+				.andWhere("is_disabled", 0)
+				.allowGraph("[permissions]")
+				.withGraphFetched("[permissions]")
+				.first();
+
+			if (user) {
+				// make sure user has all scopes of the token
+				// The `user` role is not added against the user row, so we have to just add it here to get past this check.
+				user.roles.push("user");
+
+				let ok = true;
+				_.forEach(tokenData.scope, (scope_item) => {
+					if (_.indexOf(user.roles, scope_item) === -1) {
+						ok = false;
+					}
+				});
+
+				if (!ok) {
+					throw new errs.AuthError("Invalid token scope for User");
+				}
+				initialised = true;
+				userRoles = user.roles;
+				permissions = user.permissions;
 			} else {
 			} else {
-				resolve(Token.load(token_string)
-					.then((data) => {
-						token_data = data;
-
-						// At this point we need to load the user from the DB and make sure they:
-						// - exist (and not soft deleted)
-						// - still have the appropriate scopes for this token
-						// This is only required when the User ID is supplied or if the token scope has `user`
-
-						if (token_data.attrs.id || (typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'user') !== -1)) {
-							// Has token user id or token user scope
-							return userModel
-								.query()
-								.where('id', token_data.attrs.id)
-								.andWhere('is_deleted', 0)
-								.andWhere('is_disabled', 0)
-								.allowGraph('[permissions]')
-								.withGraphFetched('[permissions]')
-								.first()
-								.then((user) => {
-									if (user) {
-										// make sure user has all scopes of the token
-										// The `user` role is not added against the user row, so we have to just add it here to get past this check.
-										user.roles.push('user');
-
-										let is_ok = true;
-										_.forEach(token_data.scope, (scope_item) => {
-											if (_.indexOf(user.roles, scope_item) === -1) {
-												is_ok = false;
-											}
-										});
-
-										if (!is_ok) {
-											throw new error.AuthError('Invalid token scope for User');
-										} else {
-											initialised = true;
-											user_roles  = user.roles;
-											permissions = user.permissions;
-										}
-
-									} else {
-										throw new error.AuthError('User cannot be loaded for Token');
-									}
-								});
-						} else {
-							initialised = true;
-						}
-					}));
+				throw new errs.AuthError("User cannot be loaded for Token");
 			}
 			}
-		});
+		}
+		initialised = true;
 	};
 	};
 
 
 	/**
 	/**
@@ -96,140 +95,121 @@ module.exports = function (token_string) {
 	 * This only applies to USER token scopes, as all other tokens are not really bound
 	 * This only applies to USER token scopes, as all other tokens are not really bound
 	 * by object scopes
 	 * by object scopes
 	 *
 	 *
-	 * @param   {String} object_type
+	 * @param   {String} objectType
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	this.loadObjects = (object_type) => {
-		return new Promise((resolve, reject) => {
-			if (Token.hasScope('user')) {
-				if (typeof token_data.attrs.id === 'undefined' || !token_data.attrs.id) {
-					reject(new error.AuthError('User Token supplied without a User ID'));
-				} else {
-					let token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
-					let query;
-
-					if (typeof object_cache[object_type] === 'undefined') {
-						switch (object_type) {
-
-						// USERS - should only return yourself
-						case 'users':
-							resolve(token_user_id ? [token_user_id] : []);
-							break;
-
-							// Proxy Hosts
-						case 'proxy_hosts':
-							query = proxyHostModel
-								.query()
-								.select('id')
-								.andWhere('is_deleted', 0);
-
-							if (permissions.visibility === 'user') {
-								query.andWhere('owner_user_id', token_user_id);
-							}
-
-							resolve(query
-								.then((rows) => {
-									let result = [];
-									_.forEach(rows, (rule_row) => {
-										result.push(rule_row.id);
-									});
-
-									// enum should not have less than 1 item
-									if (!result.length) {
-										result.push(0);
-									}
-
-									return result;
-								})
-							);
-							break;
-
-							// DEFAULT: null
-						default:
-							resolve(null);
-							break;
+	this.loadObjects = async (objectType) => {
+		let objects = null;
+
+		if (Token.hasScope("user")) {
+			if (typeof tokenData.attrs.id === "undefined" || !tokenData.attrs.id) {
+				throw new errs.AuthError("User Token supplied without a User ID");
+			}
+
+			const tokenUserId = tokenData.attrs.id ? tokenData.attrs.id : 0;
+
+			if (typeof objectCache[objectType] !== "undefined") {
+				objects = objectCache[objectType];
+			} else {
+				switch (objectType) {
+					// USERS - should only return yourself
+					case "users":
+						objects = tokenUserId ? [tokenUserId] : [];
+						break;
+
+					// Proxy Hosts
+					case "proxy_hosts": {
+						const query = proxyHostModel
+							.query()
+							.select("id")
+							.andWhere("is_deleted", 0);
+
+						if (permissions.visibility === "user") {
+							query.andWhere("owner_user_id", tokenUserId);
+						}
+
+						const rows = await query;
+						objects = [];
+						_.forEach(rows, (ruleRow) => {
+							objects.push(ruleRow.id);
+						});
+
+						// enum should not have less than 1 item
+						if (!objects.length) {
+							objects.push(0);
 						}
 						}
-					} else {
-						resolve(object_cache[object_type]);
+						break;
 					}
 					}
 				}
 				}
-			} else {
-				resolve(null);
+				objectCache[objectType] = objects;
 			}
 			}
-		})
-			.then((objects) => {
-				object_cache[object_type] = objects;
-				return objects;
-			});
+		}
+		return objects;
 	};
 	};
 
 
 	/**
 	/**
 	 * Creates a schema object on the fly with the IDs and other values required to be checked against the permissionSchema
 	 * Creates a schema object on the fly with the IDs and other values required to be checked against the permissionSchema
 	 *
 	 *
-	 * @param   {String} permission_label
+	 * @param   {String} permissionLabel
 	 * @returns {Object}
 	 * @returns {Object}
 	 */
 	 */
-	this.getObjectSchema = (permission_label) => {
-		let base_object_type = permission_label.split(':').shift();
+	this.getObjectSchema = async (permissionLabel) => {
+		const baseObjectType = permissionLabel.split(":").shift();
 
 
-		let schema = {
-			$id:                  'objects',
-			description:          'Actor Properties',
-			type:                 'object',
+		const schema = {
+			$id: "objects",
+			description: "Actor Properties",
+			type: "object",
 			additionalProperties: false,
 			additionalProperties: false,
-			properties:           {
+			properties: {
 				user_id: {
 				user_id: {
 					anyOf: [
 					anyOf: [
 						{
 						{
-							type: 'number',
-							enum: [Token.get('attrs').id]
-						}
-					]
+							type: "number",
+							enum: [Token.get("attrs").id],
+						},
+					],
 				},
 				},
 				scope: {
 				scope: {
-					type:    'string',
-					pattern: '^' + Token.get('scope') + '$'
-				}
-			}
+					type: "string",
+					pattern: `^${Token.get("scope")}$`,
+				},
+			},
 		};
 		};
 
 
-		return this.loadObjects(base_object_type)
-			.then((object_result) => {
-				if (typeof object_result === 'object' && object_result !== null) {
-					schema.properties[base_object_type] = {
-						type:    'number',
-						enum:    object_result,
-						minimum: 1
-					};
-				} else {
-					schema.properties[base_object_type] = {
-						type:    'number',
-						minimum: 1
-					};
-				}
+		const result = await this.loadObjects(baseObjectType);
+		if (typeof result === "object" && result !== null) {
+			schema.properties[baseObjectType] = {
+				type: "number",
+				enum: result,
+				minimum: 1,
+			};
+		} else {
+			schema.properties[baseObjectType] = {
+				type: "number",
+				minimum: 1,
+			};
+		}
 
 
-				return schema;
-			});
+		return schema;
 	};
 	};
 
 
-	return {
+	// here:
 
 
+	return {
 		token: Token,
 		token: Token,
 
 
 		/**
 		/**
 		 *
 		 *
-		 * @param   {Boolean}  [allow_internal]
+		 * @param   {Boolean}  [allowInternal]
 		 * @returns {Promise}
 		 * @returns {Promise}
 		 */
 		 */
-		load: (allow_internal) => {
-			return new Promise(function (resolve/*, reject*/) {
-				if (token_string) {
-					resolve(Token.load(token_string));
-				} else {
-					allow_internal_access = allow_internal;
-					resolve(allow_internal_access || null);
-				}
-			});
+		load: async (allowInternal) => {
+			if (tokenString) {
+				return await Token.load(tokenString);
+			}
+			allowInternalAccess = allowInternal;
+			return allowInternal || null;
 		},
 		},
 
 
 		reloadObjects: this.loadObjects,
 		reloadObjects: this.loadObjects,
@@ -240,68 +220,59 @@ module.exports = function (token_string) {
 		 * @param {*}       [data]
 		 * @param {*}       [data]
 		 * @returns {Promise}
 		 * @returns {Promise}
 		 */
 		 */
-		can: (permission, data) => {
-			if (allow_internal_access === true) {
-				return Promise.resolve(true);
-				//return true;
-			} else {
-				return this.init()
-					.then(() => {
-						// Initialised, token decoded ok
-						return this.getObjectSchema(permission)
-							.then((objectSchema) => {
-								const data_schema = {
-									[permission]: {
-										data:                         data,
-										scope:                        Token.get('scope'),
-										roles:                        user_roles,
-										permission_visibility:        permissions.visibility,
-										permission_proxy_hosts:       permissions.proxy_hosts,
-										permission_redirection_hosts: permissions.redirection_hosts,
-										permission_dead_hosts:        permissions.dead_hosts,
-										permission_streams:           permissions.streams,
-										permission_access_lists:      permissions.access_lists,
-										permission_certificates:      permissions.certificates
-									}
-								};
-
-								let permissionSchema = {
-									$async:               true,
-									$id:                  'permissions',
-									type:                 'object',
-									additionalProperties: false,
-									properties:           {}
-								};
-
-								permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json');
-
-								const ajv = new Ajv({
-									verbose:      true,
-									allErrors:    true,
-									breakOnError: true,
-									coerceTypes:  true,
-									schemas:      [
-										roleSchema,
-										permsSchema,
-										objectSchema,
-										permissionSchema
-									]
-								});
-
-								return ajv.validate('permissions', data_schema)
-									.then(() => {
-										return data_schema[permission];
-									});
-							});
-					})
-					.catch((err) => {
-						err.permission      = permission;
-						err.permission_data = data;
-						logger.error(permission, data, err.message);
-
-						throw new error.PermissionError('Permission Denied', err);
-					});
+		can: async (permission, data) => {
+			if (allowInternalAccess === true) {
+				return true;
 			}
 			}
-		}
+
+			try {
+				await this.init();
+				const objectSchema = await this.getObjectSchema(permission);
+
+				const dataSchema = {
+					[permission]: {
+						data: data,
+						scope: Token.get("scope"),
+						roles: userRoles,
+						permission_visibility: permissions.visibility,
+						permission_proxy_hosts: permissions.proxy_hosts,
+						permission_redirection_hosts: permissions.redirection_hosts,
+						permission_dead_hosts: permissions.dead_hosts,
+						permission_streams: permissions.streams,
+						permission_access_lists: permissions.access_lists,
+						permission_certificates: permissions.certificates,
+					},
+				};
+
+				const permissionSchema = {
+					$async: true,
+					$id: "permissions",
+					type: "object",
+					additionalProperties: false,
+					properties: {},
+				};
+
+				const rawData = fs.readFileSync(`${__dirname}/access/${permission.replace(/:/gim, "-")}.json`, {
+					encoding: "utf8",
+				});
+				permissionSchema.properties[permission] = JSON.parse(rawData);
+
+				const ajv = new Ajv({
+					verbose: true,
+					allErrors: true,
+					breakOnError: true,
+					coerceTypes: true,
+					schemas: [roleSchema, permsSchema, objectSchema, permissionSchema],
+				});
+
+				const valid = ajv.validate("permissions", dataSchema);
+				return valid && dataSchema[permission];
+			} catch (err) {
+				err.permission = permission;
+				err.permission_data = data;
+				logger.error(permission, data, err.message);
+				throw errs.PermissionError("Permission Denied", err);
+			}
+		},
 	};
 	};
-};
+}

+ 72 - 71
backend/lib/certbot.js

@@ -1,85 +1,86 @@
-const dnsPlugins = require('../global/certbot-dns-plugins.json');
-const utils      = require('./utils');
-const error      = require('./error');
-const logger     = require('../logger').certbot;
-const batchflow  = require('batchflow');
+import batchflow from "batchflow";
+import dnsPlugins from "../certbot/dns-plugins.json" with { type: "json" };
+import { certbot as logger } from "../logger.js";
+import errs from "./error.js";
+import utils from "./utils.js";
 
 
-const CERTBOT_VERSION_REPLACEMENT = '$(certbot --version | grep -Eo \'[0-9](\\.[0-9]+)+\')';
+const CERTBOT_VERSION_REPLACEMENT = "$(certbot --version | grep -Eo '[0-9](\\.[0-9]+)+')";
 
 
-const certbot = {
+/**
+ * Installs a cerbot plugin given the key for the object from
+ * ../certbot/dns-plugins.json
+ *
+ * @param   {string}  pluginKey
+ * @returns {Object}
+ */
+const installPlugin = async (pluginKey) => {
+	if (typeof dnsPlugins[pluginKey] === "undefined") {
+		// throw Error(`Certbot plugin ${pluginKey} not found`);
+		throw new errs.ItemNotFoundError(pluginKey);
+	}
 
 
-	/**
-	 * @param {array} pluginKeys
-	 */
-	installPlugins: async (pluginKeys) => {
-		let hasErrors = false;
+	const plugin = dnsPlugins[pluginKey];
+	logger.start(`Installing ${pluginKey}...`);
 
 
-		return new Promise((resolve, reject) => {
-			if (pluginKeys.length === 0) {
-				resolve();
-				return;
-			}
+	plugin.version = plugin.version.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
+	plugin.dependencies = plugin.dependencies.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
 
 
-			batchflow(pluginKeys).sequential()
-				.each((_i, pluginKey, next) => {
-					certbot.installPlugin(pluginKey)
-						.then(() => {
-							next();
-						})
-						.catch((err) => {
-							hasErrors = true;
-							next(err);
-						});
-				})
-				.error((err) => {
-					logger.error(err.message);
-				})
-				.end(() => {
-					if (hasErrors) {
-						reject(new error.CommandError('Some plugins failed to install. Please check the logs above', 1));
-					} else {
-						resolve();
-					}
-				});
-		});
-	},
-
-	/**
-	 * Installs a cerbot plugin given the key for the object from
-	 * ../global/certbot-dns-plugins.json
-	 *
-	 * @param   {string}  pluginKey
-	 * @returns {Object}
-	 */
-	installPlugin: async (pluginKey) => {
-		if (typeof dnsPlugins[pluginKey] === 'undefined') {
-			// throw Error(`Certbot plugin ${pluginKey} not found`);
-			throw new error.ItemNotFoundError(pluginKey);
-		}
+	// SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly
+	// in new versions of Python
+	let env = Object.assign({}, process.env, { SETUPTOOLS_USE_DISTUTILS: "stdlib" });
+	if (typeof plugin.env === "object") {
+		env = Object.assign(env, plugin.env);
+	}
 
 
-		const plugin = dnsPlugins[pluginKey];
-		logger.start(`Installing ${pluginKey}...`);
+	const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version}  && deactivate`;
+	return utils
+		.exec(cmd, { env })
+		.then((result) => {
+			logger.complete(`Installed ${pluginKey}`);
+			return result;
+		})
+		.catch((err) => {
+			throw err;
+		});
+};
 
 
-		plugin.version      = plugin.version.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
-		plugin.dependencies = plugin.dependencies.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
+/**
+ * @param {array} pluginKeys
+ */
+const installPlugins = async (pluginKeys) => {
+	let hasErrors = false;
 
 
-		// SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly
-		// in new versions of Python
-		let env = Object.assign({}, process.env, {SETUPTOOLS_USE_DISTUTILS: 'stdlib'});
-		if (typeof plugin.env === 'object') {
-			env = Object.assign(env, plugin.env);
+	return new Promise((resolve, reject) => {
+		if (pluginKeys.length === 0) {
+			resolve();
+			return;
 		}
 		}
 
 
-		const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version}  && deactivate`;
-		return utils.exec(cmd, {env})
-			.then((result) => {
-				logger.complete(`Installed ${pluginKey}`);
-				return result;
+		batchflow(pluginKeys)
+			.sequential()
+			.each((_i, pluginKey, next) => {
+				installPlugin(pluginKey)
+					.then(() => {
+						next();
+					})
+					.catch((err) => {
+						hasErrors = true;
+						next(err);
+					});
+			})
+			.error((err) => {
+				logger.error(err.message);
 			})
 			})
-			.catch((err) => {
-				throw err;
+			.end(() => {
+				if (hasErrors) {
+					reject(
+						new errs.CommandError("Some plugins failed to install. Please check the logs above", 1),
+					);
+				} else {
+					resolve();
+				}
 			});
 			});
-	},
+	});
 };
 };
 
 
-module.exports = certbot;
+export { installPlugins, installPlugin };

+ 151 - 144
backend/lib/config.js

@@ -1,6 +1,6 @@
-const fs      = require('fs');
-const NodeRSA = require('node-rsa');
-const logger  = require('../logger').global;
+import fs from "node:fs";
+import NodeRSA from "node-rsa";
+import { global as logger } from "../logger.js";
 
 
 const keysFile         = '/data/keys.json';
 const keysFile         = '/data/keys.json';
 const mysqlEngine      = 'mysql2';
 const mysqlEngine      = 'mysql2';
@@ -12,18 +12,20 @@ let instance = null;
 // 1. Load from config file first (not recommended anymore)
 // 1. Load from config file first (not recommended anymore)
 // 2. Use config env variables next
 // 2. Use config env variables next
 const configure = () => {
 const configure = () => {
-	const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json';
+	const filename = `${process.env.NODE_CONFIG_DIR || "./config"}/${process.env.NODE_ENV || "default"}.json`;
 	if (fs.existsSync(filename)) {
 	if (fs.existsSync(filename)) {
 		let configData;
 		let configData;
 		try {
 		try {
-			configData = require(filename);
+			// Load this json  synchronously
+			const rawData = fs.readFileSync(filename);
+			configData = JSON.parse(rawData);
 		} catch (_) {
 		} catch (_) {
 			// do nothing
 			// do nothing
 		}
 		}
 
 
-		if (configData && configData.database) {
+		if (configData?.database) {
 			logger.info(`Using configuration from file: ${filename}`);
 			logger.info(`Using configuration from file: ${filename}`);
-			instance      = configData;
+			instance = configData;
 			instance.keys = getKeys();
 			instance.keys = getKeys();
 			return;
 			return;
 		}
 		}
@@ -34,15 +36,15 @@ const configure = () => {
 	const envMysqlName = process.env.DB_MYSQL_NAME || null;
 	const envMysqlName = process.env.DB_MYSQL_NAME || null;
 	if (envMysqlHost && envMysqlUser && envMysqlName) {
 	if (envMysqlHost && envMysqlUser && envMysqlName) {
 		// we have enough mysql creds to go with mysql
 		// we have enough mysql creds to go with mysql
-		logger.info('Using MySQL configuration');
+		logger.info("Using MySQL configuration");
 		instance = {
 		instance = {
 			database: {
 			database: {
-				engine:   mysqlEngine,
-				host:     envMysqlHost,
-				port:     process.env.DB_MYSQL_PORT || 3306,
-				user:     envMysqlUser,
+				engine: mysqlEngine,
+				host: envMysqlHost,
+				port: process.env.DB_MYSQL_PORT || 3306,
+				user: envMysqlUser,
 				password: process.env.DB_MYSQL_PASSWORD,
 				password: process.env.DB_MYSQL_PASSWORD,
-				name:     envMysqlName,
+				name: envMysqlName,
 			},
 			},
 			keys: getKeys(),
 			keys: getKeys(),
 		};
 		};
@@ -54,33 +56,33 @@ const configure = () => {
 	const envPostgresName = process.env.DB_POSTGRES_NAME || null;
 	const envPostgresName = process.env.DB_POSTGRES_NAME || null;
 	if (envPostgresHost && envPostgresUser && envPostgresName) {
 	if (envPostgresHost && envPostgresUser && envPostgresName) {
 		// we have enough postgres creds to go with postgres
 		// we have enough postgres creds to go with postgres
-		logger.info('Using Postgres configuration');
+		logger.info("Using Postgres configuration");
 		instance = {
 		instance = {
 			database: {
 			database: {
-				engine:   postgresEngine,
-				host:     envPostgresHost,
-				port:     process.env.DB_POSTGRES_PORT || 5432,
-				user:     envPostgresUser,
+				engine: postgresEngine,
+				host: envPostgresHost,
+				port: process.env.DB_POSTGRES_PORT || 5432,
+				user: envPostgresUser,
 				password: process.env.DB_POSTGRES_PASSWORD,
 				password: process.env.DB_POSTGRES_PASSWORD,
-				name:     envPostgresName,
+				name: envPostgresName,
 			},
 			},
 			keys: getKeys(),
 			keys: getKeys(),
 		};
 		};
 		return;
 		return;
 	}
 	}
 
 
-	const envSqliteFile = process.env.DB_SQLITE_FILE || '/data/database.sqlite';
+	const envSqliteFile = process.env.DB_SQLITE_FILE || "/data/database.sqlite";
 	logger.info(`Using Sqlite: ${envSqliteFile}`);
 	logger.info(`Using Sqlite: ${envSqliteFile}`);
 	instance = {
 	instance = {
 		database: {
 		database: {
-			engine: 'knex-native',
-			knex:   {
-				client:     sqliteClientName,
+			engine: "knex-native",
+			knex: {
+				client: sqliteClientName,
 				connection: {
 				connection: {
-					filename: envSqliteFile
+					filename: envSqliteFile,
 				},
 				},
-				useNullAsDefault: true
-			}
+				useNullAsDefault: true,
+			},
 		},
 		},
 		keys: getKeys(),
 		keys: getKeys(),
 	};
 	};
@@ -88,150 +90,155 @@ const configure = () => {
 
 
 const getKeys = () => {
 const getKeys = () => {
 	// Get keys from file
 	// Get keys from file
+	logger.debug("Cheecking for keys file:", keysFile);
 	if (!fs.existsSync(keysFile)) {
 	if (!fs.existsSync(keysFile)) {
 		generateKeys();
 		generateKeys();
 	} else if (process.env.DEBUG) {
 	} else if (process.env.DEBUG) {
-		logger.info('Keys file exists OK');
+		logger.info("Keys file exists OK");
 	}
 	}
 	try {
 	try {
-		return require(keysFile);
+		// Load this json keysFile synchronously and return the json object
+		const rawData = fs.readFileSync(keysFile);
+		return JSON.parse(rawData);
 	} catch (err) {
 	} catch (err) {
-		logger.error('Could not read JWT key pair from config file: ' + keysFile, err);
+		logger.error(`Could not read JWT key pair from config file: ${keysFile}`, err);
 		process.exit(1);
 		process.exit(1);
 	}
 	}
 };
 };
 
 
 const generateKeys = () => {
 const generateKeys = () => {
-	logger.info('Creating a new JWT key pair...');
+	logger.info("Creating a new JWT key pair...");
 	// Now create the keys and save them in the config.
 	// Now create the keys and save them in the config.
 	const key = new NodeRSA({ b: 2048 });
 	const key = new NodeRSA({ b: 2048 });
 	key.generateKeyPair();
 	key.generateKeyPair();
 
 
 	const keys = {
 	const keys = {
-		key: key.exportKey('private').toString(),
-		pub: key.exportKey('public').toString(),
+		key: key.exportKey("private").toString(),
+		pub: key.exportKey("public").toString(),
 	};
 	};
 
 
 	// Write keys config
 	// Write keys config
 	try {
 	try {
 		fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2));
 		fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2));
 	} catch (err) {
 	} catch (err) {
-		logger.error('Could not write JWT key pair to config file: ' + keysFile + ': ' + err.message);
+		logger.error(`Could not write JWT key pair to config file: ${keysFile}: ${err.message}`);
 		process.exit(1);
 		process.exit(1);
 	}
 	}
-	logger.info('Wrote JWT key pair to config file: ' + keysFile);
+	logger.info(`Wrote JWT key pair to config file: ${keysFile}`);
 };
 };
 
 
-module.exports = {
-
-	/**
-	 *
-	 * @param   {string}  key   ie: 'database' or 'database.engine'
-	 * @returns {boolean}
-	 */
-	has: function(key) {
-		instance === null && configure();
-		const keys = key.split('.');
-		let level  = instance;
-		let has    = true;
-		keys.forEach((keyItem) =>{
-			if (typeof level[keyItem] === 'undefined') {
-				has = false;
-			} else {
-				level = level[keyItem];
-			}
-		});
-
-		return has;
-	},
-
-	/**
-	 * Gets a specific key from the top level
-	 *
-	 * @param {string} key
-	 * @returns {*}
-	 */
-	get: function (key) {
-		instance === null && configure();
-		if (key && typeof instance[key] !== 'undefined') {
-			return instance[key];
-		}
-		return instance;
-	},
-
-	/**
-	 * Is this a sqlite configuration?
-	 *
-	 * @returns {boolean}
-	 */
-	isSqlite: function () {
-		instance === null && configure();
-		return instance.database.knex && instance.database.knex.client === sqliteClientName;
-	},
-
-	/**
-	 * Is this a mysql configuration?
-	 *
-	 * @returns {boolean}
-	 */
-	isMysql: function () {
-		instance === null && configure();
-		return instance.database.engine === mysqlEngine;
-	},
-	
-	/**
-		 * Is this a postgres configuration?
-		 *
-		 * @returns {boolean}
-		 */
-	isPostgres: function () {
-		instance === null && configure();
-		return instance.database.engine === postgresEngine;
-	},
-
-	/**
-	 * Are we running in debug mdoe?
-	 *
-	 * @returns {boolean}
-	 */
-	debug: function () {
-		return !!process.env.DEBUG;
-	},
-
-	/**
-	 * Returns a public key
-	 *
-	 * @returns {string}
-	 */
-	getPublicKey: function () {
-		instance === null && configure();
-		return instance.keys.pub;
-	},
-
-	/**
-	 * Returns a private key
-	 *
-	 * @returns {string}
-	 */
-	getPrivateKey: function () {
-		instance === null && configure();
-		return instance.keys.key;
-	},
-
-	/**
-	 * @returns {boolean}
-	 */
-	useLetsencryptStaging: function () {
-		return !!process.env.LE_STAGING;
-	},
-
-	/**
-	 * @returns {string|null}
-	 */
-	useLetsencryptServer: function () {
-		if (process.env.LE_SERVER) {
-			return process.env.LE_SERVER;
+/**
+ *
+ * @param   {string}  key   ie: 'database' or 'database.engine'
+ * @returns {boolean}
+ */
+const configHas = (key) => {
+	instance === null && configure();
+	const keys = key.split(".");
+	let level = instance;
+	let has = true;
+	keys.forEach((keyItem) => {
+		if (typeof level[keyItem] === "undefined") {
+			has = false;
+		} else {
+			level = level[keyItem];
 		}
 		}
-		return null;
+	});
+
+	return has;
+};
+
+/**
+ * Gets a specific key from the top level
+ *
+ * @param {string} key
+ * @returns {*}
+ */
+const configGet = (key) => {
+	instance === null && configure();
+	if (key && typeof instance[key] !== "undefined") {
+		return instance[key];
 	}
 	}
+	return instance;
 };
 };
+
+/**
+ * Is this a sqlite configuration?
+ *
+ * @returns {boolean}
+ */
+const isSqlite = () => {
+	instance === null && configure();
+	return instance.database.knex && instance.database.knex.client === sqliteClientName;
+};
+
+/**
+ * Is this a mysql configuration?
+ *
+ * @returns {boolean}
+ */
+const isMysql = () => {
+	instance === null && configure();
+	return instance.database.engine === mysqlEngine;
+};
+
+/**
+ * Is this a postgres configuration?
+ *
+ * @returns {boolean}
+ */
+const isPostgres = () => {
+	instance === null && configure();
+	return instance.database.engine === postgresEngine;
+};
+
+/**
+ * Are we running in debug mdoe?
+ *
+ * @returns {boolean}
+ */
+const isDebugMode = () => !!process.env.DEBUG;
+
+/**
+ * Are we running in CI?
+ *
+ * @returns {boolean}
+ */
+const isCI = () => process.env.CI === 'true' && process.env.DEBUG === 'true';
+
+/**
+ * Returns a public key
+ *
+ * @returns {string}
+ */
+const getPublicKey = () => {
+	instance === null && configure();
+	return instance.keys.pub;
+};
+
+/**
+ * Returns a private key
+ *
+ * @returns {string}
+ */
+const getPrivateKey = () => {
+	instance === null && configure();
+	return instance.keys.key;
+};
+
+/**
+ * @returns {boolean}
+ */
+const useLetsencryptStaging = () => !!process.env.LE_STAGING;
+
+/**
+ * @returns {string|null}
+ */
+const useLetsencryptServer = () => {
+	if (process.env.LE_SERVER) {
+		return process.env.LE_SERVER;
+	}
+	return null;
+};
+
+export { isCI, configHas, configGet, isSqlite, isMysql, isPostgres, isDebugMode, getPrivateKey, getPublicKey, useLetsencryptStaging, useLetsencryptServer };

+ 52 - 48
backend/lib/error.js

@@ -1,99 +1,103 @@
-const _    = require('lodash');
-const util = require('util');
+import _ from "lodash";
 
 
-module.exports = {
-
-	PermissionError: function (message, previous) {
+const errs = {
+	PermissionError: function (_, previous) {
 		Error.captureStackTrace(this, this.constructor);
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
 		this.previous = previous;
-		this.message  = 'Permission Denied';
-		this.public   = true;
-		this.status   = 403;
+		this.message = "Permission Denied";
+		this.public = true;
+		this.status = 403;
 	},
 	},
 
 
 	ItemNotFoundError: function (id, previous) {
 	ItemNotFoundError: function (id, previous) {
 		Error.captureStackTrace(this, this.constructor);
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
 		this.previous = previous;
-		this.message  = 'Item Not Found - ' + id;
-		this.public   = true;
-		this.status   = 404;
+		this.message = "Not Found";
+		if (id) {
+			this.message = `Not Found - ${id}`;
+		}
+		this.public = true;
+		this.status = 404;
 	},
 	},
 
 
-	AuthError: function (message, previous) {
+	AuthError: function (message, messageI18n, previous) {
 		Error.captureStackTrace(this, this.constructor);
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
 		this.previous = previous;
-		this.message  = message;
-		this.public   = true;
-		this.status   = 401;
+		this.message = message;
+		this.message_i18n = messageI18n;
+		this.public = true;
+		this.status = 400;
 	},
 	},
 
 
 	InternalError: function (message, previous) {
 	InternalError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
 		this.previous = previous;
-		this.message  = message;
-		this.status   = 500;
-		this.public   = false;
+		this.message = message;
+		this.status = 500;
+		this.public = false;
 	},
 	},
 
 
 	InternalValidationError: function (message, previous) {
 	InternalValidationError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
 		this.previous = previous;
-		this.message  = message;
-		this.status   = 400;
-		this.public   = false;
+		this.message = message;
+		this.status = 400;
+		this.public = false;
 	},
 	},
 
 
 	ConfigurationError: function (message, previous) {
 	ConfigurationError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
 		this.previous = previous;
-		this.message  = message;
-		this.status   = 400;
-		this.public   = true;
+		this.message = message;
+		this.status = 400;
+		this.public = true;
 	},
 	},
 
 
 	CacheError: function (message, previous) {
 	CacheError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
-		this.message  = message;
+		this.name = this.constructor.name;
+		this.message = message;
 		this.previous = previous;
 		this.previous = previous;
-		this.status   = 500;
-		this.public   = false;
+		this.status = 500;
+		this.public = false;
 	},
 	},
 
 
 	ValidationError: function (message, previous) {
 	ValidationError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
 		this.previous = previous;
-		this.message  = message;
-		this.public   = true;
-		this.status   = 400;
+		this.message = message;
+		this.public = true;
+		this.status = 400;
 	},
 	},
 
 
 	AssertionFailedError: function (message, previous) {
 	AssertionFailedError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
 		this.previous = previous;
-		this.message  = message;
-		this.public   = false;
-		this.status   = 400;
+		this.message = message;
+		this.public = false;
+		this.status = 400;
 	},
 	},
 
 
 	CommandError: function (stdErr, code, previous) {
 	CommandError: function (stdErr, code, previous) {
 		Error.captureStackTrace(this, this.constructor);
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
 		this.previous = previous;
-		this.message  = stdErr;
-		this.code     = code;
-		this.public   = false;
+		this.message = stdErr;
+		this.code = code;
+		this.public = false;
 	},
 	},
 };
 };
 
 
-_.forEach(module.exports, function (error) {
-	util.inherits(error, Error);
+_.forEach(errs, (err) => {
+	err.prototype = Object.create(Error.prototype);
 });
 });
+
+export default errs;

+ 8 - 7
backend/lib/express/cors.js

@@ -1,12 +1,13 @@
-module.exports = function (req, res, next) {
+export default (req, res, next) => {
 	if (req.headers.origin) {
 	if (req.headers.origin) {
 		res.set({
 		res.set({
-			'Access-Control-Allow-Origin':      req.headers.origin,
-			'Access-Control-Allow-Credentials': true,
-			'Access-Control-Allow-Methods':     'OPTIONS, GET, POST',
-			'Access-Control-Allow-Headers':     'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit',
-			'Access-Control-Max-Age':           5 * 60,
-			'Access-Control-Expose-Headers':    'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit'
+			"Access-Control-Allow-Origin": req.headers.origin,
+			"Access-Control-Allow-Credentials": true,
+			"Access-Control-Allow-Methods": "OPTIONS, GET, POST",
+			"Access-Control-Allow-Headers":
+				"Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
+			"Access-Control-Max-Age": 5 * 60,
+			"Access-Control-Expose-Headers": "X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
 		});
 		});
 		next();
 		next();
 	} else {
 	} else {

+ 12 - 12
backend/lib/express/jwt-decode.js

@@ -1,15 +1,15 @@
-const Access = require('../access');
+import Access from "../access.js";
 
 
-module.exports = () => {
-	return function (req, res, next) {
-		res.locals.access = null;
-		let access        = new Access(res.locals.token || null);
-		access.load()
-			.then(() => {
-				res.locals.access = access;
-				next();
-			})
-			.catch(next);
+export default () => {
+	return async (_, res, next) => {
+		try {
+			res.locals.access = null;
+			const access = new Access(res.locals.token || null);
+			await access.load();
+			res.locals.access = access;
+			next();
+		} catch (err) {
+			next(err);
+		}
 	};
 	};
 };
 };
-

+ 5 - 5
backend/lib/express/jwt.js

@@ -1,13 +1,13 @@
-module.exports = function () {
-	return function (req, res, next) {
+export default function () {
+	return (req, res, next) => {
 		if (req.headers.authorization) {
 		if (req.headers.authorization) {
-			let parts = req.headers.authorization.split(' ');
+			const parts = req.headers.authorization.split(" ");
 
 
-			if (parts && parts[0] === 'Bearer' && parts[1]) {
+			if (parts && parts[0] === "Bearer" && parts[1]) {
 				res.locals.token = parts[1];
 				res.locals.token = parts[1];
 			}
 			}
 		}
 		}
 
 
 		next();
 		next();
 	};
 	};
-};
+}

+ 16 - 16
backend/lib/express/pagination.js

@@ -1,7 +1,6 @@
-let _ = require('lodash');
-
-module.exports = function (default_sort, default_offset, default_limit, max_limit) {
+import _  from "lodash";
 
 
+export default (default_sort, default_offset, default_limit, max_limit) => {
 	/**
 	/**
 	 * This will setup the req query params with filtered data and defaults
 	 * This will setup the req query params with filtered data and defaults
 	 *
 	 *
@@ -11,34 +10,35 @@ module.exports = function (default_sort, default_offset, default_limit, max_limi
 	 *
 	 *
 	 */
 	 */
 
 
-	return function (req, res, next) {
-
-		req.query.offset = typeof req.query.limit === 'undefined' ? default_offset || 0 : parseInt(req.query.offset, 10);
-		req.query.limit  = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10);
+	return (req, _res, next) => {
+		req.query.offset =
+			typeof req.query.limit === "undefined" ? default_offset || 0 : Number.parseInt(req.query.offset, 10);
+		req.query.limit =
+			typeof req.query.limit === "undefined" ? default_limit || 50 : Number.parseInt(req.query.limit, 10);
 
 
 		if (max_limit && req.query.limit > max_limit) {
 		if (max_limit && req.query.limit > max_limit) {
 			req.query.limit = max_limit;
 			req.query.limit = max_limit;
 		}
 		}
 
 
 		// Sorting
 		// Sorting
-		let sort       = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort;
-		let myRegexp   = /.*\.(asc|desc)$/ig;
-		let sort_array = [];
+		let sort = typeof req.query.sort === "undefined" ? default_sort : req.query.sort;
+		const myRegexp = /.*\.(asc|desc)$/gi;
+		const sort_array = [];
 
 
-		sort = sort.split(',');
-		_.map(sort, function (val) {
-			let matches = myRegexp.exec(val);
+		sort = sort.split(",");
+		_.map(sort, (val) => {
+			const matches = myRegexp.exec(val);
 
 
 			if (matches !== null) {
 			if (matches !== null) {
-				let dir = matches[1];
+				const dir = matches[1];
 				sort_array.push({
 				sort_array.push({
 					field: val.substr(0, val.length - (dir.length + 1)),
 					field: val.substr(0, val.length - (dir.length + 1)),
-					dir:   dir.toLowerCase()
+					dir: dir.toLowerCase(),
 				});
 				});
 			} else {
 			} else {
 				sort_array.push({
 				sort_array.push({
 					field: val,
 					field: val,
-					dir:   'asc'
+					dir: "asc",
 				});
 				});
 			}
 			}
 		});
 		});

+ 2 - 3
backend/lib/express/user-id-from-me.js

@@ -1,9 +1,8 @@
-module.exports = (req, res, next) => {
+export default (req, res, next) => {
 	if (req.params.user_id === 'me' && res.locals.access) {
 	if (req.params.user_id === 'me' && res.locals.access) {
 		req.params.user_id = res.locals.access.token.get('attrs').id;
 		req.params.user_id = res.locals.access.token.get('attrs').id;
 	} else {
 	} else {
-		req.params.user_id = parseInt(req.params.user_id, 10);
+		req.params.user_id = Number.parseInt(req.params.user_id, 10);
 	}
 	}
-
 	next();
 	next();
 };
 };

+ 52 - 56
backend/lib/helpers.js

@@ -1,62 +1,58 @@
-const moment       = require('moment');
-const {isPostgres} = require('./config');
-const {ref}        = require('objection');
+import moment from "moment";
+import { ref } from "objection";
+import { isPostgres } from "./config.js";
 
 
-module.exports = {
-
-	/**
-	 * Takes an expression such as 30d and returns a moment object of that date in future
-	 *
-	 * Key      Shorthand
-	 * ==================
-	 * years         y
-	 * quarters      Q
-	 * months        M
-	 * weeks         w
-	 * days          d
-	 * hours         h
-	 * minutes       m
-	 * seconds       s
-	 * milliseconds  ms
-	 *
-	 * @param {String}  expression
-	 * @returns {Object}
-	 */
-	parseDatePeriod: function (expression) {
-		let matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
-		if (matches) {
-			return moment().add(matches[1], matches[2]);
-		}
+/**
+ * Takes an expression such as 30d and returns a moment object of that date in future
+ *
+ * Key      Shorthand
+ * ==================
+ * years         y
+ * quarters      Q
+ * months        M
+ * weeks         w
+ * days          d
+ * hours         h
+ * minutes       m
+ * seconds       s
+ * milliseconds  ms
+ *
+ * @param {String}  expression
+ * @returns {Object}
+ */
+const parseDatePeriod = (expression) => {
+	const matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
+	if (matches) {
+		return moment().add(matches[1], matches[2]);
+	}
 
 
-		return null;
-	},
+	return null;
+};
 
 
-	convertIntFieldsToBool: function (obj, fields) {
-		fields.forEach(function (field) {
-			if (typeof obj[field] !== 'undefined') {
-				obj[field] = obj[field] === 1;
-			}
-		});
-		return obj;
-	},
+const convertIntFieldsToBool = (obj, fields) => {
+	fields.forEach((field) => {
+		if (typeof obj[field] !== "undefined") {
+			obj[field] = obj[field] === 1;
+		}
+	});
+	return obj;
+};
 
 
-	convertBoolFieldsToInt: function (obj, fields) {
-		fields.forEach(function (field) {
-			if (typeof obj[field] !== 'undefined') {
-				obj[field] = obj[field] ? 1 : 0;
-			}
-		});
-		return obj;
-	},
+const convertBoolFieldsToInt = (obj, fields) => {
+	fields.forEach((field) => {
+		if (typeof obj[field] !== "undefined") {
+			obj[field] = obj[field] ? 1 : 0;
+		}
+	});
+	return obj;
+};
 
 
-	/**
-	 * Casts a column to json if using postgres
-	 *
-	 * @param {string} colName
-	 * @returns {string|Objection.ReferenceBuilder}
-	 */
-	castJsonIfNeed: function (colName) {
-		return isPostgres() ? ref(colName).castText() : colName;
-	}
+/**
+ * Casts a column to json if using postgres
+ *
+ * @param {string} colName
+ * @returns {string|Objection.ReferenceBuilder}
+ */
+const castJsonIfNeed = (colName) => (isPostgres() ? ref(colName).castText() : colName);
 
 
-};
+export { parseDatePeriod, convertIntFieldsToBool, convertBoolFieldsToInt, castJsonIfNeed };

+ 25 - 21
backend/lib/migrate_template.js

@@ -1,33 +1,34 @@
-const migrate_name = 'identifier_for_migrate';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "identifier_for_migrate";
 
 
 /**
 /**
  * Migrate
  * Migrate
  *
  *
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
- * @param {Object} knex
- * @param {Promise} Promise
+ * @param   {Object} knex
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex, Promise) {
-
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (_knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
 	// Create Table example:
 	// Create Table example:
 
 
-	/*return knex.schema.createTable('notification', (table) => {
+	/*
+	return knex.schema.createTable('notification', (table) => {
 		 table.increments().primary();
 		 table.increments().primary();
 		 table.string('name').notNull();
 		 table.string('name').notNull();
 		 table.string('type').notNull();
 		 table.string('type').notNull();
 		 table.integer('created_on').notNull();
 		 table.integer('created_on').notNull();
 		 table.integer('modified_on').notNull();
 		 table.integer('modified_on').notNull();
 	 })
 	 })
-	 .then(function () {
-		logger.info('[' + migrate_name + '] Notification Table created');
-	 });*/
+		.then(function () {
+			logger.info('[' + migrateName + '] Notification Table created');
+		});
+	 */
 
 
-	logger.info('[' + migrate_name + '] Migrating Up Complete');
+	logger.info(`[${migrateName}] Migrating Up Complete`);
 
 
 	return Promise.resolve(true);
 	return Promise.resolve(true);
 };
 };
@@ -35,21 +36,24 @@ exports.up = function (knex, Promise) {
 /**
 /**
  * Undo Migrate
  * Undo Migrate
  *
  *
- * @param {Object} knex
- * @param {Promise} Promise
+ * @param   {Object} knex
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex, Promise) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (_knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 
 	// Drop table example:
 	// Drop table example:
 
 
-	/*return knex.schema.dropTable('notification')
-	 .then(() => {
-		logger.info('[' + migrate_name + '] Notification Table dropped');
-	 });*/
+	/*
+	return knex.schema.dropTable('notification')
+		.then(() => {
+			logger.info(`[${migrateName}] Notification Table dropped`);
+		});
+	*/
 
 
-	logger.info('[' + migrate_name + '] Migrating Down Complete');
+	logger.info(`[${migrateName}] Migrating Down Complete`);
 
 
 	return Promise.resolve(true);
 	return Promise.resolve(true);
 };
 };
+
+export { up, down };

+ 92 - 92
backend/lib/utils.js

@@ -1,110 +1,110 @@
-const _          = require('lodash');
-const exec       = require('node:child_process').exec;
-const execFile   = require('node:child_process').execFile;
-const { Liquid } = require('liquidjs');
-const logger     = require('../logger').global;
-const error      = require('./error');
+import { exec as nodeExec, execFile as nodeExecFile } from "node:child_process";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import { Liquid } from "liquidjs";
+import _ from "lodash";
+import { global as logger } from "../logger.js";
+import errs from "./error.js";
 
 
-module.exports = {
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
 
 
-	exec: async (cmd, options = {}) => {
-		logger.debug('CMD:', cmd);
-
-		const { stdout, stderr } = await new Promise((resolve, reject) => {
-			const child = exec(cmd, options, (isError, stdout, stderr) => {
-				if (isError) {
-					reject(new error.CommandError(stderr, isError));
-				} else {
-					resolve({ stdout, stderr });
-				}
-			});
+const exec = async (cmd, options = {}) => {
+	logger.debug("CMD:", cmd);
+	const { stdout, stderr } = await new Promise((resolve, reject) => {
+		const child = nodeExec(cmd, options, (isError, stdout, stderr) => {
+			if (isError) {
+				reject(new errs.CommandError(stderr, isError));
+			} else {
+				resolve({ stdout, stderr });
+			}
+		});
 
 
-			child.on('error', (e) => {
-				reject(new error.CommandError(stderr, 1, e));
-			});
+		child.on("error", (e) => {
+			reject(new errs.CommandError(stderr, 1, e));
 		});
 		});
-		return stdout;
-	},
+	});
+	return stdout;
+};
 
 
-	/**
-	 * @param   {String} cmd
-	 * @param   {Array}  args
-	 * @param   {Object|undefined}  options
-	 * @returns {Promise}
-	 */
-	execFile: (cmd, args, options) => {
-		logger.debug(`CMD: ${cmd} ${args ? args.join(' ') : ''}`);
-		if (typeof options === 'undefined') {
-			options = {};
-		}
+/**
+ * @param   {String} cmd
+ * @param   {Array}  args
+ * @param   {Object|undefined}  options
+ * @returns {Promise}
+ */
+const execFile = (cmd, args, options) => {
+	logger.debug(`CMD: ${cmd} ${args ? args.join(" ") : ""}`);
+	const opts = options || {};
 
 
-		return new Promise((resolve, reject) => {
-			execFile(cmd, args, options, (err, stdout, stderr) => {
-				if (err && typeof err === 'object') {
-					reject(new error.CommandError(stderr, 1, err));
-				} else {
-					resolve(stdout.trim());
-				}
-			});
+	return new Promise((resolve, reject) => {
+		nodeExecFile(cmd, args, opts, (err, stdout, stderr) => {
+			if (err && typeof err === "object") {
+				reject(new errs.CommandError(stderr, 1, err));
+			} else {
+				resolve(stdout.trim());
+			}
 		});
 		});
-	},
+	});
+};
 
 
+/**
+ * Used in objection query builder
+ *
+ * @param   {Array}  omissions
+ * @returns {Function}
+ */
+const omitRow = (omissions) => {
 	/**
 	/**
-	 * Used in objection query builder
-	 *
-	 * @param   {Array}  omissions
-	 * @returns {Function}
+	 * @param   {Object} row
+	 * @returns {Object}
 	 */
 	 */
-	omitRow: (omissions) => {
-		/**
-		 * @param   {Object} row
-		 * @returns {Object}
-		 */
-		return (row) => {
-			return _.omit(row, omissions);
-		};
-	},
+	return (row) => {
+		return _.omit(row, omissions);
+	};
+};
 
 
+/**
+ * Used in objection query builder
+ *
+ * @param   {Array}  omissions
+ * @returns {Function}
+ */
+const omitRows = (omissions) => {
 	/**
 	/**
-	 * Used in objection query builder
-	 *
-	 * @param   {Array}  omissions
-	 * @returns {Function}
+	 * @param   {Array} rows
+	 * @returns {Object}
 	 */
 	 */
-	omitRows: (omissions) => {
-		/**
-		 * @param   {Array} rows
-		 * @returns {Object}
-		 */
-		return (rows) => {
-			rows.forEach((row, idx) => {
-				rows[idx] = _.omit(row, omissions);
-			});
-			return rows;
-		};
-	},
+	return (rows) => {
+		rows.forEach((row, idx) => {
+			rows[idx] = _.omit(row, omissions);
+		});
+		return rows;
+	};
+};
+
+/**
+ * @returns {Object} Liquid render engine
+ */
+const getRenderEngine = () => {
+	const renderEngine = new Liquid({
+		root: `${__dirname}/../templates/`,
+	});
 
 
 	/**
 	/**
-	 * @returns {Object} Liquid render engine
+	 * nginxAccessRule expects the object given to have 2 properties:
+	 *
+	 * directive  string
+	 * address    string
 	 */
 	 */
-	getRenderEngine: () => {
-		const renderEngine = new Liquid({
-			root: `${__dirname}/../templates/`
-		});
-
-		/**
-		 * nginxAccessRule expects the object given to have 2 properties:
-		 *
-		 * directive  string
-		 * address    string
-		 */
-		renderEngine.registerFilter('nginxAccessRule', (v) => {
-			if (typeof v.directive !== 'undefined' && typeof v.address !== 'undefined' && v.directive && v.address) {
-				return `${v.directive} ${v.address};`;
-			}
-			return '';
-		});
+	renderEngine.registerFilter("nginxAccessRule", (v) => {
+		if (typeof v.directive !== "undefined" && typeof v.address !== "undefined" && v.directive && v.address) {
+			return `${v.directive} ${v.address};`;
+		}
+		return "";
+	});
 
 
-		return renderEngine;
-	}
+	return renderEngine;
 };
 };
+
+export default { exec, execFile, omitRow, omitRows, getRenderEngine };

+ 35 - 33
backend/lib/validator/api.js

@@ -1,12 +1,12 @@
-const Ajv   = require('ajv/dist/2020');
-const error = require('../error');
+import Ajv from "ajv/dist/2020.js";
+import errs from "../error.js";
 
 
 const ajv = new Ajv({
 const ajv = new Ajv({
-	verbose:         true,
-	allErrors:       true,
+	verbose: true,
+	allErrors: true,
 	allowUnionTypes: true,
 	allowUnionTypes: true,
-	strict:          false,
-	coerceTypes:     true,
+	strict: false,
+	coerceTypes: true,
 });
 });
 
 
 /**
 /**
@@ -14,30 +14,32 @@ const ajv = new Ajv({
  * @param {Object} payload
  * @param {Object} payload
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-function apiValidator (schema, payload/*, description*/) {
-	return new Promise(function Promise_apiValidator (resolve, reject) {
-		if (schema === null) {
-			reject(new error.ValidationError('Schema is undefined'));
-			return;
-		}
-
-		if (typeof payload === 'undefined') {
-			reject(new error.ValidationError('Payload is undefined'));
-			return;
-		}
-
-		const validate = ajv.compile(schema);
-		const valid    = validate(payload);
-
-		if (valid && !validate.errors) {
-			resolve(payload);
-		} else {
-			let message = ajv.errorsText(validate.errors);
-			let err     = new error.ValidationError(message);
-			err.debug   = [validate.errors, payload];
-			reject(err);
-		}
-	});
-}
-
-module.exports = apiValidator;
+const apiValidator = async (schema, payload /*, description*/) => {
+	if (!schema) {
+		throw new errs.ValidationError("Schema is undefined");
+	}
+
+	// Can't use falsy check here as valid payload could be `0` or `false`
+	if (typeof payload === "undefined") {
+		throw new errs.ValidationError("Payload is undefined");
+	}
+
+
+	const validate = ajv.compile(schema);
+
+	const valid = validate(payload);
+
+
+	if (valid && !validate.errors) {
+		return payload;
+	}
+
+
+
+	const message = ajv.errorsText(validate.errors);
+	const err = new errs.ValidationError(message);
+	err.debug = {validationErrors: validate.errors, payload};
+	throw err;
+};
+
+export default apiValidator;

+ 18 - 18
backend/lib/validator/index.js

@@ -1,17 +1,17 @@
-const _                 = require('lodash');
-const Ajv               = require('ajv/dist/2020');
-const error             = require('../error');
-const commonDefinitions = require('../../schema/common.json');
+import Ajv from 'ajv/dist/2020.js';
+import _ from "lodash";
+import commonDefinitions from "../../schema/common.json" with { type: "json" };
+import errs from "../error.js";
 
 
 RegExp.prototype.toJSON = RegExp.prototype.toString;
 RegExp.prototype.toJSON = RegExp.prototype.toString;
 
 
 const ajv = new Ajv({
 const ajv = new Ajv({
-	verbose:         true,
-	allErrors:       true,
+	verbose: true,
+	allErrors: true,
 	allowUnionTypes: true,
 	allowUnionTypes: true,
-	coerceTypes:     true,
-	strict:          false,
-	schemas:         [commonDefinitions]
+	coerceTypes: true,
+	strict: false,
+	schemas: [commonDefinitions],
 });
 });
 
 
 /**
 /**
@@ -20,26 +20,26 @@ const ajv = new Ajv({
  * @param   {Object} payload
  * @param   {Object} payload
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-function validator (schema, payload) {
-	return new Promise(function (resolve, reject) {
+const validator = (schema, payload) => {
+	return new Promise((resolve, reject) => {
 		if (!payload) {
 		if (!payload) {
-			reject(new error.InternalValidationError('Payload is falsy'));
+			reject(new errs.InternalValidationError("Payload is falsy"));
 		} else {
 		} else {
 			try {
 			try {
-				let validate = ajv.compile(schema);
-				let valid    = validate(payload);
+				const validate = ajv.compile(schema);
+				const valid = validate(payload);
 
 
 				if (valid && !validate.errors) {
 				if (valid && !validate.errors) {
 					resolve(_.cloneDeep(payload));
 					resolve(_.cloneDeep(payload));
 				} else {
 				} else {
-					let message = ajv.errorsText(validate.errors);
-					reject(new error.InternalValidationError(message));
+					const message = ajv.errorsText(validate.errors);
+					reject(new errs.InternalValidationError(message));
 				}
 				}
 			} catch (err) {
 			} catch (err) {
 				reject(err);
 				reject(err);
 			}
 			}
 		}
 		}
 	});
 	});
-}
+};
 
 
-module.exports = validator;
+export default validator;

+ 16 - 12
backend/logger.js

@@ -1,14 +1,18 @@
-const {Signale} = require('signale');
+import signale from "signale";
 
 
-module.exports = {
-	global:    new Signale({scope: 'Global   '}),
-	migrate:   new Signale({scope: 'Migrate  '}),
-	express:   new Signale({scope: 'Express  '}),
-	access:    new Signale({scope: 'Access   '}),
-	nginx:     new Signale({scope: 'Nginx    '}),
-	ssl:       new Signale({scope: 'SSL      '}),
-	certbot:   new Signale({scope: 'Certbot  '}),
-	import:    new Signale({scope: 'Importer '}),
-	setup:     new Signale({scope: 'Setup    '}),
-	ip_ranges: new Signale({scope: 'IP Ranges'})
+const opts = {
+	logLevel: "info",
 };
 };
+
+const global = new signale.Signale({ scope: "Global   ", ...opts });
+const migrate = new signale.Signale({ scope: "Migrate  ", ...opts });
+const express = new signale.Signale({ scope: "Express  ", ...opts });
+const access = new signale.Signale({ scope: "Access   ", ...opts });
+const nginx = new signale.Signale({ scope: "Nginx    ", ...opts });
+const ssl = new signale.Signale({ scope: "SSL      ", ...opts });
+const certbot = new signale.Signale({ scope: "Certbot  ", ...opts });
+const importer = new signale.Signale({ scope: "Importer ", ...opts });
+const setup = new signale.Signale({ scope: "Setup    ", ...opts });
+const ipRanges = new signale.Signale({ scope: "IP Ranges", ...opts });
+
+export { global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges };

+ 11 - 13
backend/migrate.js

@@ -1,15 +1,13 @@
-const db     = require('./db');
-const logger = require('./logger').migrate;
+import db from "./db.js";
+import { migrate as logger } from "./logger.js";
 
 
-module.exports = {
-	latest: function () {
-		return db.migrate.currentVersion()
-			.then((version) => {
-				logger.info('Current database version:', version);
-				return db.migrate.latest({
-					tableName: 'migrations',
-					directory: 'migrations'
-				});
-			});
-	}
+const migrateUp = async () => {
+	const version = await db.migrate.currentVersion();
+	logger.info("Current database version:", version);
+	return await db.migrate.latest({
+		tableName: "migrations",
+		directory: "migrations",
+	});
 };
 };
+
+export { migrateUp };

+ 134 - 133
backend/migrations/20180618015850_initial.js

@@ -1,5 +1,6 @@
-const migrate_name = 'initial-schema';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "initial-schema";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,199 +8,199 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.createTable('auth', (table) => {
-		table.increments().primary();
-		table.dateTime('created_on').notNull();
-		table.dateTime('modified_on').notNull();
-		table.integer('user_id').notNull().unsigned();
-		table.string('type', 30).notNull();
-		table.string('secret').notNull();
-		table.json('meta').notNull();
-		table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.createTable("auth", (table) => {
+			table.increments().primary();
+			table.dateTime("created_on").notNull();
+			table.dateTime("modified_on").notNull();
+			table.integer("user_id").notNull().unsigned();
+			table.string("type", 30).notNull();
+			table.string("secret").notNull();
+			table.json("meta").notNull();
+			table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] auth Table created');
+			logger.info(`[${migrateName}] auth Table created`);
 
 
-			return knex.schema.createTable('user', (table) => {
+			return knex.schema.createTable("user", (table) => {
 				table.increments().primary();
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.integer('is_disabled').notNull().unsigned().defaultTo(0);
-				table.string('email').notNull();
-				table.string('name').notNull();
-				table.string('nickname').notNull();
-				table.string('avatar').notNull();
-				table.json('roles').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.integer("is_disabled").notNull().unsigned().defaultTo(0);
+				table.string("email").notNull();
+				table.string("name").notNull();
+				table.string("nickname").notNull();
+				table.string("avatar").notNull();
+				table.json("roles").notNull();
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] user Table created');
+			logger.info(`[${migrateName}] user Table created`);
 
 
-			return knex.schema.createTable('user_permission', (table) => {
+			return knex.schema.createTable("user_permission", (table) => {
 				table.increments().primary();
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('user_id').notNull().unsigned();
-				table.string('visibility').notNull();
-				table.string('proxy_hosts').notNull();
-				table.string('redirection_hosts').notNull();
-				table.string('dead_hosts').notNull();
-				table.string('streams').notNull();
-				table.string('access_lists').notNull();
-				table.string('certificates').notNull();
-				table.unique('user_id');
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("user_id").notNull().unsigned();
+				table.string("visibility").notNull();
+				table.string("proxy_hosts").notNull();
+				table.string("redirection_hosts").notNull();
+				table.string("dead_hosts").notNull();
+				table.string("streams").notNull();
+				table.string("access_lists").notNull();
+				table.string("certificates").notNull();
+				table.unique("user_id");
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] user_permission Table created');
+			logger.info(`[${migrateName}] user_permission Table created`);
 
 
-			return knex.schema.createTable('proxy_host', (table) => {
+			return knex.schema.createTable("proxy_host", (table) => {
 				table.increments().primary();
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.json('domain_names').notNull();
-				table.string('forward_ip').notNull();
-				table.integer('forward_port').notNull().unsigned();
-				table.integer('access_list_id').notNull().unsigned().defaultTo(0);
-				table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-				table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
-				table.integer('caching_enabled').notNull().unsigned().defaultTo(0);
-				table.integer('block_exploits').notNull().unsigned().defaultTo(0);
-				table.text('advanced_config').notNull().defaultTo('');
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.json("domain_names").notNull();
+				table.string("forward_ip").notNull();
+				table.integer("forward_port").notNull().unsigned();
+				table.integer("access_list_id").notNull().unsigned().defaultTo(0);
+				table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+				table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
+				table.integer("caching_enabled").notNull().unsigned().defaultTo(0);
+				table.integer("block_exploits").notNull().unsigned().defaultTo(0);
+				table.text("advanced_config").notNull().defaultTo("");
+				table.json("meta").notNull();
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table created');
+			logger.info(`[${migrateName}] proxy_host Table created`);
 
 
-			return knex.schema.createTable('redirection_host', (table) => {
+			return knex.schema.createTable("redirection_host", (table) => {
 				table.increments().primary();
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.json('domain_names').notNull();
-				table.string('forward_domain_name').notNull();
-				table.integer('preserve_path').notNull().unsigned().defaultTo(0);
-				table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-				table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
-				table.integer('block_exploits').notNull().unsigned().defaultTo(0);
-				table.text('advanced_config').notNull().defaultTo('');
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.json("domain_names").notNull();
+				table.string("forward_domain_name").notNull();
+				table.integer("preserve_path").notNull().unsigned().defaultTo(0);
+				table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+				table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
+				table.integer("block_exploits").notNull().unsigned().defaultTo(0);
+				table.text("advanced_config").notNull().defaultTo("");
+				table.json("meta").notNull();
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table created');
+			logger.info(`[${migrateName}] redirection_host Table created`);
 
 
-			return knex.schema.createTable('dead_host', (table) => {
+			return knex.schema.createTable("dead_host", (table) => {
 				table.increments().primary();
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.json('domain_names').notNull();
-				table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-				table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
-				table.text('advanced_config').notNull().defaultTo('');
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.json("domain_names").notNull();
+				table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+				table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
+				table.text("advanced_config").notNull().defaultTo("");
+				table.json("meta").notNull();
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table created');
+			logger.info(`[${migrateName}] dead_host Table created`);
 
 
-			return knex.schema.createTable('stream', (table) => {
+			return knex.schema.createTable("stream", (table) => {
 				table.increments().primary();
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.integer('incoming_port').notNull().unsigned();
-				table.string('forward_ip').notNull();
-				table.integer('forwarding_port').notNull().unsigned();
-				table.integer('tcp_forwarding').notNull().unsigned().defaultTo(0);
-				table.integer('udp_forwarding').notNull().unsigned().defaultTo(0);
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.integer("incoming_port").notNull().unsigned();
+				table.string("forward_ip").notNull();
+				table.integer("forwarding_port").notNull().unsigned();
+				table.integer("tcp_forwarding").notNull().unsigned().defaultTo(0);
+				table.integer("udp_forwarding").notNull().unsigned().defaultTo(0);
+				table.json("meta").notNull();
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] stream Table created');
+			logger.info(`[${migrateName}] stream Table created`);
 
 
-			return knex.schema.createTable('access_list', (table) => {
+			return knex.schema.createTable("access_list", (table) => {
 				table.increments().primary();
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.string('name').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.string("name").notNull();
+				table.json("meta").notNull();
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table created');
+			logger.info(`[${migrateName}] access_list Table created`);
 
 
-			return knex.schema.createTable('certificate', (table) => {
+			return knex.schema.createTable("certificate", (table) => {
 				table.increments().primary();
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.string('provider').notNull();
-				table.string('nice_name').notNull().defaultTo('');
-				table.json('domain_names').notNull();
-				table.dateTime('expires_on').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.string("provider").notNull();
+				table.string("nice_name").notNull().defaultTo("");
+				table.json("domain_names").notNull();
+				table.dateTime("expires_on").notNull();
+				table.json("meta").notNull();
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] certificate Table created');
+			logger.info(`[${migrateName}] certificate Table created`);
 
 
-			return knex.schema.createTable('access_list_auth', (table) => {
+			return knex.schema.createTable("access_list_auth", (table) => {
 				table.increments().primary();
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('access_list_id').notNull().unsigned();
-				table.string('username').notNull();
-				table.string('password').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("access_list_id").notNull().unsigned();
+				table.string("username").notNull();
+				table.string("password").notNull();
+				table.json("meta").notNull();
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list_auth Table created');
+			logger.info(`[${migrateName}] access_list_auth Table created`);
 
 
-			return knex.schema.createTable('audit_log', (table) => {
+			return knex.schema.createTable("audit_log", (table) => {
 				table.increments().primary();
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('user_id').notNull().unsigned();
-				table.string('object_type').notNull().defaultTo('');
-				table.integer('object_id').notNull().unsigned().defaultTo(0);
-				table.string('action').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("user_id").notNull().unsigned();
+				table.string("object_type").notNull().defaultTo("");
+				table.integer("object_id").notNull().unsigned().defaultTo(0);
+				table.string("action").notNull();
+				table.json("meta").notNull();
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] audit_log Table created');
+			logger.info(`[${migrateName}] audit_log Table created`);
 		});
 		});
-
 };
 };
 
 
 /**
 /**
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
 	return Promise.resolve(true);
 	return Promise.resolve(true);
 };
 };
+
+export { up, down };

+ 15 - 14
backend/migrations/20180929054513_websockets.js

@@ -1,5 +1,6 @@
-const migrate_name = 'websockets';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "websockets";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,29 +8,29 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('allow_websocket_upgrade').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("allow_websocket_upgrade").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
 		});
-
 };
 };
 
 
 /**
 /**
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 	return Promise.resolve(true);
-};
+};
+
+export { up, down };

+ 15 - 13
backend/migrations/20181019052346_forward_host.js

@@ -1,5 +1,6 @@
-const migrate_name = 'forward_host';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "forward_host";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,17 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.renameColumn('forward_ip', 'forward_host');
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.renameColumn("forward_ip", "forward_host");
+		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
 		});
 };
 };
 
 
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 	return Promise.resolve(true);
-};
+};
+
+export { up, down };

+ 19 - 18
backend/migrations/20181113041458_http2_support.js

@@ -1,5 +1,6 @@
-const migrate_name = 'http2_support';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "http2_support";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,31 +8,31 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('http2_support').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("http2_support").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 
 
-			return knex.schema.table('redirection_host', function (redirection_host) {
-				redirection_host.integer('http2_support').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("redirection_host", (redirection_host) => {
+				redirection_host.integer("http2_support").notNull().unsigned().defaultTo(0);
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 
 
-			return knex.schema.table('dead_host', function (dead_host) {
-				dead_host.integer('http2_support').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("dead_host", (dead_host) => {
+				dead_host.integer("http2_support").notNull().unsigned().defaultTo(0);
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table altered');
+			logger.info(`[${migrateName}] dead_host Table altered`);
 		});
 		});
 };
 };
 
 
@@ -39,11 +40,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 	return Promise.resolve(true);
 };
 };
 
 
+export { up, down };

+ 14 - 12
backend/migrations/20181213013211_forward_scheme.js

@@ -1,5 +1,6 @@
-const migrate_name = 'forward_scheme';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "forward_scheme";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,17 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.string('forward_scheme').notNull().defaultTo('http');
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.string("forward_scheme").notNull().defaultTo("http");
+		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
 		});
 };
 };
 
 
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 	return Promise.resolve(true);
 };
 };
+
+export { up, down };

+ 23 - 21
backend/migrations/20190104035154_disabled.js

@@ -1,5 +1,6 @@
-const migrate_name = 'disabled';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "disabled";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,38 +8,38 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('enabled').notNull().unsigned().defaultTo(1);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("enabled").notNull().unsigned().defaultTo(1);
+		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 
 
-			return knex.schema.table('redirection_host', function (redirection_host) {
-				redirection_host.integer('enabled').notNull().unsigned().defaultTo(1);
+			return knex.schema.table("redirection_host", (redirection_host) => {
+				redirection_host.integer("enabled").notNull().unsigned().defaultTo(1);
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 
 
-			return knex.schema.table('dead_host', function (dead_host) {
-				dead_host.integer('enabled').notNull().unsigned().defaultTo(1);
+			return knex.schema.table("dead_host", (dead_host) => {
+				dead_host.integer("enabled").notNull().unsigned().defaultTo(1);
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table altered');
+			logger.info(`[${migrateName}] dead_host Table altered`);
 
 
-			return knex.schema.table('stream', function (stream) {
-				stream.integer('enabled').notNull().unsigned().defaultTo(1);
+			return knex.schema.table("stream", (stream) => {
+				stream.integer("enabled").notNull().unsigned().defaultTo(1);
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] stream Table altered');
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 		});
 };
 };
 
 
@@ -46,10 +47,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 	return Promise.resolve(true);
 };
 };
+
+export { up, down };

+ 14 - 12
backend/migrations/20190215115310_customlocations.js

@@ -1,5 +1,6 @@
-const migrate_name = 'custom_locations';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "custom_locations";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -8,17 +9,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.json('locations');
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.json("locations");
+		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
 		});
 };
 };
 
 
@@ -26,10 +27,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 	return Promise.resolve(true);
 };
 };
+
+export { up, down };

+ 23 - 21
backend/migrations/20190218060101_hsts.js

@@ -1,5 +1,6 @@
-const migrate_name = 'hsts';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "hsts";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,34 +8,34 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
-		proxy_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
+			proxy_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 
 
-			return knex.schema.table('redirection_host', function (redirection_host) {
-				redirection_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
-				redirection_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("redirection_host", (redirection_host) => {
+				redirection_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
+				redirection_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 
 
-			return knex.schema.table('dead_host', function (dead_host) {
-				dead_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
-				dead_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("dead_host", (dead_host) => {
+				dead_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
+				dead_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table altered');
+			logger.info(`[${migrateName}] dead_host Table altered`);
 		});
 		});
 };
 };
 
 
@@ -42,10 +43,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 	return Promise.resolve(true);
 };
 };
+
+export { up, down };

+ 10 - 9
backend/migrations/20190227065017_settings.js

@@ -1,5 +1,6 @@
-const migrate_name = 'settings';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "settings";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,11 +8,10 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
 	return knex.schema.createTable('setting', (table) => {
 	return knex.schema.createTable('setting', (table) => {
 		table.string('id').notNull().primary();
 		table.string('id').notNull().primary();
@@ -21,7 +21,7 @@ exports.up = function (knex/*, Promise*/) {
 		table.json('meta').notNull();
 		table.json('meta').notNull();
 	})
 	})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] setting Table created');
+			logger.info(`[${migrateName}] setting Table created`);
 		});
 		});
 };
 };
 
 
@@ -29,10 +29,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
 	return Promise.resolve(true);
 	return Promise.resolve(true);
 };
 };
+
+export { up, down };

+ 27 - 28
backend/migrations/20200410143839_access_list_client.js

@@ -1,5 +1,6 @@
-const migrate_name = 'access_list_client';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "access_list_client";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,32 +8,30 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.createTable('access_list_client', (table) => {
-		table.increments().primary();
-		table.dateTime('created_on').notNull();
-		table.dateTime('modified_on').notNull();
-		table.integer('access_list_id').notNull().unsigned();
-		table.string('address').notNull();
-		table.string('directive').notNull();
-		table.json('meta').notNull();
-
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] access_list_client Table created');
+	return knex.schema
+		.createTable("access_list_client", (table) => {
+			table.increments().primary();
+			table.dateTime("created_on").notNull();
+			table.dateTime("modified_on").notNull();
+			table.integer("access_list_id").notNull().unsigned();
+			table.string("address").notNull();
+			table.string("directive").notNull();
+			table.json("meta").notNull();
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] access_list_client Table created`);
 
 
-			return knex.schema.table('access_list', function (access_list) {
-				access_list.integer('satify_any').notNull().defaultTo(0);
+			return knex.schema.table("access_list", (access_list) => {
+				access_list.integer("satify_any").notNull().defaultTo(0);
 			});
 			});
 		})
 		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table altered');
+			logger.info(`[${migrateName}] access_list Table altered`);
 		});
 		});
 };
 };
 
 
@@ -40,14 +39,14 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param {Object} knex
  * @param {Object} knex
- * @param {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 
-	return knex.schema.dropTable('access_list_client')
-		.then(() => {
-			logger.info('[' + migrate_name + '] access_list_client Table dropped');
-		});
+	return knex.schema.dropTable("access_list_client").then(() => {
+		logger.info(`[${migrateName}] access_list_client Table dropped`);
+	});
 };
 };
+
+export { up, down };

+ 14 - 12
backend/migrations/20200410143840_access_list_client_fix.js

@@ -1,5 +1,6 @@
-const migrate_name = 'access_list_client_fix';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "access_list_client_fix";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,17 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('access_list', function (access_list) {
-		access_list.renameColumn('satify_any', 'satisfy_any');
-	})
+	return knex.schema
+		.table("access_list", (access_list) => {
+			access_list.renameColumn("satify_any", "satisfy_any");
+		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table altered');
+			logger.info(`[${migrateName}] access_list Table altered`);
 		});
 		});
 };
 };
 
 
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 	return Promise.resolve(true);
 };
 };
+
+export { up, down };

+ 19 - 17
backend/migrations/20201014143841_pass_auth.js

@@ -1,5 +1,6 @@
-const migrate_name = 'pass_auth';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "pass_auth";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,18 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object}  knex
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('access_list', function (access_list) {
-		access_list.integer('pass_auth').notNull().defaultTo(1);
-	})
+	return knex.schema
+		.table("access_list", (access_list) => {
+			access_list.integer("pass_auth").notNull().defaultTo(1);
+		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table altered');
+			logger.info(`[${migrateName}] access_list Table altered`);
 		});
 		});
 };
 };
 
 
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param {Object} knex
  * @param {Object} knex
- * @param {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 
-	return knex.schema.table('access_list', function (access_list) {
-		access_list.dropColumn('pass_auth');
-	})
+	return knex.schema
+		.table("access_list", (access_list) => {
+			access_list.dropColumn("pass_auth");
+		})
 		.then(() => {
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list pass_auth Column dropped');
+			logger.info(`[${migrateName}] access_list pass_auth Column dropped`);
 		});
 		});
 };
 };
+
+export { up, down };

+ 21 - 19
backend/migrations/20210210154702_redirection_scheme.js

@@ -1,5 +1,6 @@
-const migrate_name = 'redirection_scheme';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "redirection_scheme";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,18 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object} knex
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('redirection_host', (table) => {
-		table.string('forward_scheme').notNull().defaultTo('$scheme');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.string("forward_scheme").notNull().defaultTo("$scheme");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 		});
 };
 };
 
 
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object} knex
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 
-	return knex.schema.table('redirection_host', (table) => {
-		table.dropColumn('forward_scheme');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.dropColumn("forward_scheme");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 		});
 };
 };
+
+export { up, down };

+ 21 - 19
backend/migrations/20210210154703_redirection_status_code.js

@@ -1,5 +1,6 @@
-const migrate_name = 'redirection_status_code';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "redirection_status_code";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -7,18 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  * @see http://knexjs.org/#Schema
  *
  *
  * @param   {Object} knex
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex/*, Promise*/) {
-
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('redirection_host', (table) => {
-		table.integer('forward_http_code').notNull().unsigned().defaultTo(302);
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.integer("forward_http_code").notNull().unsigned().defaultTo(302);
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 		});
 };
 };
 
 
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  * Undo Migrate
  *
  *
  * @param   {Object} knex
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 
-	return knex.schema.table('redirection_host', (table) => {
-		table.dropColumn('forward_http_code');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.dropColumn("forward_http_code");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 		});
 };
 };
+
+export { up, down };

+ 33 - 30
backend/migrations/20210423103500_stream_domain.js

@@ -1,40 +1,43 @@
-const migrate_name = 'stream_domain';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "stream_domain";
 
 
 /**
 /**
-	* Migrate
-	*
-	* @see http://knexjs.org/#Schema
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+ * Migrate
+ *
+ * @see http://knexjs.org/#Schema
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('stream', (table) => {
-		table.renameColumn('forward_ip', 'forwarding_host');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.renameColumn("forward_ip", "forwarding_host");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 		});
 };
 };
 
 
 /**
 /**
-	* Undo Migrate
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+ * Undo Migrate
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 
-	return knex.schema.table('stream', (table) => {
-		table.renameColumn('forwarding_host', 'forward_ip');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.renameColumn("forwarding_host", "forward_ip");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 		});
 };
 };
+
+export { up, down };

+ 27 - 25
backend/migrations/20211108145214_regenerate_default_host.js

@@ -1,17 +1,19 @@
-const migrate_name  = 'stream_domain';
-const logger        = require('../logger').migrate;
-const internalNginx = require('../internal/nginx');
+import internalNginx from "../internal/nginx.js";
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "stream_domain";
 
 
 async function regenerateDefaultHost(knex) {
 async function regenerateDefaultHost(knex) {
-	const row = await knex('setting').select('*').where('id', 'default-site').first();
+	const row = await knex("setting").select("*").where("id", "default-site").first();
 
 
 	if (!row) {
 	if (!row) {
 		return Promise.resolve();
 		return Promise.resolve();
 	}
 	}
 
 
-	return internalNginx.deleteConfig('default')
+	return internalNginx
+		.deleteConfig("default")
 		.then(() => {
 		.then(() => {
-			return internalNginx.generateConfig('default', row);
+			return internalNginx.generateConfig("default", row);
 		})
 		})
 		.then(() => {
 		.then(() => {
 			return internalNginx.test();
 			return internalNginx.test();
@@ -22,29 +24,29 @@ async function regenerateDefaultHost(knex) {
 }
 }
 
 
 /**
 /**
-	* Migrate
-	*
-	* @see http://knexjs.org/#Schema
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.up = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+ * Migrate
+ *
+ * @see http://knexjs.org/#Schema
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
 	return regenerateDefaultHost(knex);
 	return regenerateDefaultHost(knex);
 };
 };
 
 
 /**
 /**
-	* Undo Migrate
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.down = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+ * Undo Migrate
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 
 	return regenerateDefaultHost(knex);
 	return regenerateDefaultHost(knex);
-};
+};
+
+export { up, down };

+ 21 - 16
backend/migrations/20240427161436_stream_ssl.js

@@ -1,5 +1,6 @@
-const migrate_name = 'stream_ssl';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "stream_ssl";
 
 
 /**
 /**
  * Migrate
  * Migrate
@@ -9,14 +10,15 @@ const logger       = require('../logger').migrate;
  * @param   {Object} knex
  * @param   {Object} knex
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.up = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 
-	return knex.schema.table('stream', (table) => {
-		table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 		});
 };
 };
 
 
@@ -26,13 +28,16 @@ exports.up = function (knex) {
  * @param   {Object} knex
  * @param   {Object} knex
  * @returns {Promise}
  * @returns {Promise}
  */
  */
-exports.down = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 
-	return knex.schema.table('stream', (table) => {
-		table.dropColumn('certificate_id');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.dropColumn("certificate_id");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 		});
 };
 };
+
+export { up, down };

+ 51 - 56
backend/models/access_list.js

@@ -1,103 +1,98 @@
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const db               = require('../db');
-const helpers          = require('../lib/helpers');
-const Model            = require('objection').Model;
-const User             = require('./user');
-const AccessListAuth   = require('./access_list_auth');
-const AccessListClient = require('./access_list_client');
-const now              = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import AccessListAuth from "./access_list_auth.js";
+import AccessListClient from "./access_list_client.js";
+import now from "./now_helper.js";
+import ProxyHostModel from "./proxy_host.js";
+import User from "./user.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
-const boolFields = [
-	'is_deleted',
-	'satisfy_any',
-	'pass_auth',
-];
+const boolFields = ["is_deleted", "satisfy_any", "pass_auth"];
 
 
 class AccessList extends Model {
 class AccessList extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Default for meta
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 			this.meta = {};
 		}
 		}
 	}
 	}
 
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 		this.modified_on = now();
 	}
 	}
 
 
 	$parseDatabaseJson(json) {
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 	}
 
 
 	$formatDatabaseJson(json) {
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 	}
 
 
-	static get name () {
-		return 'AccessList';
+	static get name() {
+		return "AccessList";
 	}
 	}
 
 
-	static get tableName () {
-		return 'access_list';
+	static get tableName() {
+		return "access_list";
 	}
 	}
 
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 	}
 
 
-	static get relationMappings () {
-		const ProxyHost = require('./proxy_host');
-
+	static get relationMappings() {
 		return {
 		return {
 			owner: {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
 				modelClass: User,
-				join:       {
-					from: 'access_list.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "access_list.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			},
 			items: {
 			items: {
-				relation:   Model.HasManyRelation,
+				relation: Model.HasManyRelation,
 				modelClass: AccessListAuth,
 				modelClass: AccessListAuth,
-				join:       {
-					from: 'access_list.id',
-					to:   'access_list_auth.access_list_id'
-				}
+				join: {
+					from: "access_list.id",
+					to: "access_list_auth.access_list_id",
+				},
 			},
 			},
 			clients: {
 			clients: {
-				relation:   Model.HasManyRelation,
+				relation: Model.HasManyRelation,
 				modelClass: AccessListClient,
 				modelClass: AccessListClient,
-				join:       {
-					from: 'access_list.id',
-					to:   'access_list_client.access_list_id'
-				}
+				join: {
+					from: "access_list.id",
+					to: "access_list_client.access_list_id",
+				},
 			},
 			},
 			proxy_hosts: {
 			proxy_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: ProxyHost,
-				join:       {
-					from: 'access_list.id',
-					to:   'proxy_host.access_list_id'
+				relation: Model.HasManyRelation,
+				modelClass: ProxyHostModel,
+				join: {
+					from: "access_list.id",
+					to: "proxy_host.access_list_id",
+				},
+				modify: (qb) => {
+					qb.where("proxy_host.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('proxy_host.is_deleted', 0);
-				}
-			}
+			},
 		};
 		};
 	}
 	}
 }
 }
 
 
-module.exports = AccessList;
+export default AccessList;

+ 25 - 24
backend/models/access_list_auth.js

@@ -1,54 +1,55 @@
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import accessListModel from "./access_list.js";
+import now from "./now_helper.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
 class AccessListAuth extends Model {
 class AccessListAuth extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Default for meta
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 			this.meta = {};
 		}
 		}
 	}
 	}
 
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 		this.modified_on = now();
 	}
 	}
 
 
-	static get name () {
-		return 'AccessListAuth';
+	static get name() {
+		return "AccessListAuth";
 	}
 	}
 
 
-	static get tableName () {
-		return 'access_list_auth';
+	static get tableName() {
+		return "access_list_auth";
 	}
 	}
 
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 	}
 
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 		return {
 			access_list: {
 			access_list: {
-				relation:   Model.HasOneRelation,
-				modelClass: require('./access_list'),
-				join:       {
-					from: 'access_list_auth.access_list_id',
-					to:   'access_list.id'
+				relation: Model.HasOneRelation,
+				modelClass: accessListModel,
+				join: {
+					from: "access_list_auth.access_list_id",
+					to: "access_list.id",
 				},
 				},
-				modify: function (qb) {
-					qb.where('access_list.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("access_list.is_deleted", 0);
+				},
+			},
 		};
 		};
 	}
 	}
 }
 }
 
 
-module.exports = AccessListAuth;
+export default AccessListAuth;

+ 25 - 24
backend/models/access_list_client.js

@@ -1,54 +1,55 @@
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import accessListModel from "./access_list.js";
+import now from "./now_helper.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
 class AccessListClient extends Model {
 class AccessListClient extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Default for meta
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 			this.meta = {};
 		}
 		}
 	}
 	}
 
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 		this.modified_on = now();
 	}
 	}
 
 
-	static get name () {
-		return 'AccessListClient';
+	static get name() {
+		return "AccessListClient";
 	}
 	}
 
 
-	static get tableName () {
-		return 'access_list_client';
+	static get tableName() {
+		return "access_list_client";
 	}
 	}
 
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 	}
 
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 		return {
 			access_list: {
 			access_list: {
-				relation:   Model.HasOneRelation,
-				modelClass: require('./access_list'),
-				join:       {
-					from: 'access_list_client.access_list_id',
-					to:   'access_list.id'
+				relation: Model.HasOneRelation,
+				modelClass: accessListModel,
+				join: {
+					from: "access_list_client.access_list_id",
+					to: "access_list.id",
 				},
 				},
-				modify: function (qb) {
-					qb.where('access_list.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("access_list.is_deleted", 0);
+				},
+			},
 		};
 		};
 	}
 	}
 }
 }
 
 
-module.exports = AccessListClient;
+export default AccessListClient;

+ 22 - 22
backend/models/audit-log.js

@@ -1,52 +1,52 @@
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const User  = require('./user');
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
 class AuditLog extends Model {
 class AuditLog extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Default for meta
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 			this.meta = {};
 		}
 		}
 	}
 	}
 
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 		this.modified_on = now();
 	}
 	}
 
 
-	static get name () {
-		return 'AuditLog';
+	static get name() {
+		return "AuditLog";
 	}
 	}
 
 
-	static get tableName () {
-		return 'audit_log';
+	static get tableName() {
+		return "audit_log";
 	}
 	}
 
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 	}
 
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 		return {
 			user: {
 			user: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
 				modelClass: User,
-				join:       {
-					from: 'audit_log.user_id',
-					to:   'user.id'
-				}
-			}
+				join: {
+					from: "audit_log.user_id",
+					to: "user.id",
+				},
+			},
 		};
 		};
 	}
 	}
 }
 }
 
 
-module.exports = AuditLog;
+export default AuditLog;

+ 36 - 42
backend/models/auth.js

@@ -1,59 +1,53 @@
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const bcrypt  = require('bcrypt');
-const db      = require('../db');
-const helpers = require('../lib/helpers');
-const Model   = require('objection').Model;
-const User    = require('./user');
-const now     = require('./now_helper');
+import bcrypt from "bcrypt";
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
-const boolFields = [
-	'is_deleted',
-];
+const boolFields = ["is_deleted"];
 
 
-function encryptPassword () {
-	/* jshint -W040 */
-	let _this = this;
-
-	if (_this.type === 'password' && _this.secret) {
-		return bcrypt.hash(_this.secret, 13)
-			.then(function (hash) {
-				_this.secret = hash;
-			});
+function encryptPassword() {
+	if (this.type === "password" && this.secret) {
+		return bcrypt.hash(this.secret, 13).then((hash) => {
+			this.secret = hash;
+		});
 	}
 	}
 
 
 	return null;
 	return null;
 }
 }
 
 
 class Auth extends Model {
 class Auth extends Model {
-	$beforeInsert (queryContext) {
-		this.created_on  = now();
+	$beforeInsert(queryContext) {
+		this.created_on = now();
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Default for meta
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 			this.meta = {};
 		}
 		}
 
 
 		return encryptPassword.apply(this, queryContext);
 		return encryptPassword.apply(this, queryContext);
 	}
 	}
 
 
-	$beforeUpdate (queryContext) {
+	$beforeUpdate(queryContext) {
 		this.modified_on = now();
 		this.modified_on = now();
 		return encryptPassword.apply(this, queryContext);
 		return encryptPassword.apply(this, queryContext);
 	}
 	}
 
 
 	$parseDatabaseJson(json) {
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 	}
 
 
 	$formatDatabaseJson(json) {
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 	}
 
 
 	/**
 	/**
@@ -62,37 +56,37 @@ class Auth extends Model {
 	 * @param {String} password
 	 * @param {String} password
 	 * @returns {Promise}
 	 * @returns {Promise}
 	 */
 	 */
-	verifyPassword (password) {
+	verifyPassword(password) {
 		return bcrypt.compare(password, this.secret);
 		return bcrypt.compare(password, this.secret);
 	}
 	}
 
 
-	static get name () {
-		return 'Auth';
+	static get name() {
+		return "Auth";
 	}
 	}
 
 
-	static get tableName () {
-		return 'auth';
+	static get tableName() {
+		return "auth";
 	}
 	}
 
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 	}
 
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 		return {
 			user: {
 			user: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
 				modelClass: User,
-				join:       {
-					from: 'auth.user_id',
-					to:   'user.id'
+				join: {
+					from: "auth.user_id",
+					to: "user.id",
 				},
 				},
 				filter: {
 				filter: {
-					is_deleted: 0
-				}
-			}
+					is_deleted: 0,
+				},
+			},
 		};
 		};
 	}
 	}
 }
 }
 
 
-module.exports = Auth;
+export default Auth;

+ 73 - 64
backend/models/certificate.js

@@ -1,124 +1,133 @@
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const db      = require('../db');
-const helpers = require('../lib/helpers');
-const Model   = require('objection').Model;
-const now     = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import deadHostModel from "./dead_host.js";
+import now from "./now_helper.js";
+import proxyHostModel from "./proxy_host.js";
+import redirectionHostModel from "./redirection_host.js";
+import streamModel from "./stream.js";
+import userModel from "./user.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
-const boolFields = [
-	'is_deleted',
-];
+const boolFields = ["is_deleted"];
 
 
 class Certificate extends Model {
 class Certificate extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Default for expires_on
 		// Default for expires_on
-		if (typeof this.expires_on === 'undefined') {
+		if (typeof this.expires_on === "undefined") {
 			this.expires_on = now();
 			this.expires_on = now();
 		}
 		}
 
 
 		// Default for domain_names
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 			this.domain_names = [];
 		}
 		}
 
 
 		// Default for meta
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 			this.meta = {};
 		}
 		}
 
 
 		this.domain_names.sort();
 		this.domain_names.sort();
 	}
 	}
 
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Sort domain_names
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 			this.domain_names.sort();
 		}
 		}
 	}
 	}
 
 
 	$parseDatabaseJson(json) {
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 	}
 
 
 	$formatDatabaseJson(json) {
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 	}
 
 
-	static get name () {
-		return 'Certificate';
+	static get name() {
+		return "Certificate";
 	}
 	}
 
 
-	static get tableName () {
-		return 'certificate';
+	static get tableName() {
+		return "certificate";
 	}
 	}
 
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta"];
 	}
 	}
 
 
-	static get relationMappings () {
-		const ProxyHost       = require('./proxy_host');
-		const DeadHost        = require('./dead_host');
-		const User            = require('./user');
-		const RedirectionHost = require('./redirection_host');
-
+	static get relationMappings() {
 		return {
 		return {
 			owner: {
 			owner: {
-				relation:   Model.HasOneRelation,
-				modelClass: User,
-				join:       {
-					from: 'certificate.owner_user_id',
-					to:   'user.id'
+				relation: Model.HasOneRelation,
+				modelClass: userModel,
+				join: {
+					from: "certificate.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			},
 			proxy_hosts: {
 			proxy_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: ProxyHost,
-				join:       {
-					from: 'certificate.id',
-					to:   'proxy_host.certificate_id'
+				relation: Model.HasManyRelation,
+				modelClass: proxyHostModel,
+				join: {
+					from: "certificate.id",
+					to: "proxy_host.certificate_id",
+				},
+				modify: (qb) => {
+					qb.where("proxy_host.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('proxy_host.is_deleted', 0);
-				}
 			},
 			},
 			dead_hosts: {
 			dead_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: DeadHost,
-				join:       {
-					from: 'certificate.id',
-					to:   'dead_host.certificate_id'
+				relation: Model.HasManyRelation,
+				modelClass: deadHostModel,
+				join: {
+					from: "certificate.id",
+					to: "dead_host.certificate_id",
+				},
+				modify: (qb) => {
+					qb.where("dead_host.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('dead_host.is_deleted', 0);
-				}
 			},
 			},
 			redirection_hosts: {
 			redirection_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: RedirectionHost,
-				join:       {
-					from: 'certificate.id',
-					to:   'redirection_host.certificate_id'
+				relation: Model.HasManyRelation,
+				modelClass: redirectionHostModel,
+				join: {
+					from: "certificate.id",
+					to: "redirection_host.certificate_id",
+				},
+				modify: (qb) => {
+					qb.where("redirection_host.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('redirection_host.is_deleted', 0);
-				}
-			}
+			},
+			streams: {
+				relation: Model.HasManyRelation,
+				modelClass: streamModel,
+				join: {
+					from: "certificate.id",
+					to: "stream.certificate_id",
+				},
+				modify: (qb) => {
+					qb.where("stream.is_deleted", 0);
+				},
+			},
 		};
 		};
 	}
 	}
 }
 }
 
 
-module.exports = Certificate;
+export default Certificate;

+ 40 - 47
backend/models/dead_host.js

@@ -1,99 +1,92 @@
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const Model       = require('objection').Model;
-const User        = require('./user');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
-const boolFields = [
-	'is_deleted',
-	'ssl_forced',
-	'http2_support',
-	'enabled',
-	'hsts_enabled',
-	'hsts_subdomains',
-];
+const boolFields = ["is_deleted", "ssl_forced", "http2_support", "enabled", "hsts_enabled", "hsts_subdomains"];
 
 
 class DeadHost extends Model {
 class DeadHost extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Default for domain_names
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 			this.domain_names = [];
 		}
 		}
 
 
 		// Default for meta
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 			this.meta = {};
 		}
 		}
 
 
 		this.domain_names.sort();
 		this.domain_names.sort();
 	}
 	}
 
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Sort domain_names
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 			this.domain_names.sort();
 		}
 		}
 	}
 	}
 
 
 	$parseDatabaseJson(json) {
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 	}
 
 
 	$formatDatabaseJson(json) {
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 	}
 
 
-	static get name () {
-		return 'DeadHost';
+	static get name() {
+		return "DeadHost";
 	}
 	}
 
 
-	static get tableName () {
-		return 'dead_host';
+	static get tableName() {
+		return "dead_host";
 	}
 	}
 
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta"];
 	}
 	}
 
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 		return {
 			owner: {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
 				modelClass: User,
-				join:       {
-					from: 'dead_host.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "dead_host.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			},
 			certificate: {
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
 				modelClass: Certificate,
-				join:       {
-					from: 'dead_host.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "dead_host.certificate_id",
+					to: "certificate.id",
+				},
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+			},
 		};
 		};
 	}
 	}
 }
 }
 
 
-module.exports = DeadHost;
+export default DeadHost;

+ 6 - 7
backend/models/now_helper.js

@@ -1,13 +1,12 @@
-const db     = require('../db');
-const config = require('../lib/config');
-const Model  = require('objection').Model;
+import { Model } from "objection";
+import db from "../db.js";
+import { isSqlite } from "../lib/config.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
-module.exports = function () {
-	if (config.isSqlite()) {
-		// eslint-disable-next-line
+export default () => {
+	if (isSqlite()) {
 		return Model.raw("datetime('now','localtime')");
 		return Model.raw("datetime('now','localtime')");
 	}
 	}
-	return Model.raw('NOW()');
+	return Model.raw("NOW()");
 };
 };

+ 56 - 56
backend/models/proxy_host.js

@@ -1,114 +1,114 @@
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const Model       = require('objection').Model;
-const User        = require('./user');
-const AccessList  = require('./access_list');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import AccessList from "./access_list.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
 const boolFields = [
 const boolFields = [
-	'is_deleted',
-	'ssl_forced',
-	'caching_enabled',
-	'block_exploits',
-	'allow_websocket_upgrade',
-	'http2_support',
-	'enabled',
-	'hsts_enabled',
-	'hsts_subdomains',
+	"is_deleted",
+	"ssl_forced",
+	"caching_enabled",
+	"block_exploits",
+	"allow_websocket_upgrade",
+	"http2_support",
+	"enabled",
+	"hsts_enabled",
+	"hsts_subdomains",
 ];
 ];
 
 
 class ProxyHost extends Model {
 class ProxyHost extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Default for domain_names
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 			this.domain_names = [];
 		}
 		}
 
 
 		// Default for meta
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 			this.meta = {};
 		}
 		}
 
 
 		this.domain_names.sort();
 		this.domain_names.sort();
 	}
 	}
 
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Sort domain_names
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 			this.domain_names.sort();
 		}
 		}
 	}
 	}
 
 
 	$parseDatabaseJson(json) {
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 	}
 
 
 	$formatDatabaseJson(json) {
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 	}
 
 
-	static get name () {
-		return 'ProxyHost';
+	static get name() {
+		return "ProxyHost";
 	}
 	}
 
 
-	static get tableName () {
-		return 'proxy_host';
+	static get tableName() {
+		return "proxy_host";
 	}
 	}
 
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta', 'locations'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta", "locations"];
 	}
 	}
 
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 		return {
 			owner: {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
 				modelClass: User,
-				join:       {
-					from: 'proxy_host.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "proxy_host.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			},
 			access_list: {
 			access_list: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: AccessList,
 				modelClass: AccessList,
-				join:       {
-					from: 'proxy_host.access_list_id',
-					to:   'access_list.id'
+				join: {
+					from: "proxy_host.access_list_id",
+					to: "access_list.id",
+				},
+				modify: (qb) => {
+					qb.where("access_list.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('access_list.is_deleted', 0);
-				}
 			},
 			},
 			certificate: {
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
 				modelClass: Certificate,
-				join:       {
-					from: 'proxy_host.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "proxy_host.certificate_id",
+					to: "certificate.id",
 				},
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
+				},
+			},
 		};
 		};
 	}
 	}
 }
 }
 
 
-module.exports = ProxyHost;
+export default ProxyHost;

+ 47 - 48
backend/models/redirection_host.js

@@ -1,102 +1,101 @@
-
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const Model       = require('objection').Model;
-const User        = require('./user');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
 const boolFields = [
 const boolFields = [
-	'is_deleted',
-	'enabled',
-	'preserve_path',
-	'ssl_forced',
-	'block_exploits',
-	'hsts_enabled',
-	'hsts_subdomains',
-	'http2_support',
+	"is_deleted",
+	"enabled",
+	"preserve_path",
+	"ssl_forced",
+	"block_exploits",
+	"hsts_enabled",
+	"hsts_subdomains",
+	"http2_support",
 ];
 ];
 
 
 class RedirectionHost extends Model {
 class RedirectionHost extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Default for domain_names
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 			this.domain_names = [];
 		}
 		}
 
 
 		// Default for meta
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 			this.meta = {};
 		}
 		}
 
 
 		this.domain_names.sort();
 		this.domain_names.sort();
 	}
 	}
 
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Sort domain_names
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 			this.domain_names.sort();
 		}
 		}
 	}
 	}
 
 
 	$parseDatabaseJson(json) {
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 	}
 
 
 	$formatDatabaseJson(json) {
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 	}
 
 
-	static get name () {
-		return 'RedirectionHost';
+	static get name() {
+		return "RedirectionHost";
 	}
 	}
 
 
-	static get tableName () {
-		return 'redirection_host';
+	static get tableName() {
+		return "redirection_host";
 	}
 	}
 
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta"];
 	}
 	}
 
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 		return {
 			owner: {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
 				modelClass: User,
-				join:       {
-					from: 'redirection_host.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "redirection_host.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			},
 			certificate: {
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
 				modelClass: Certificate,
-				join:       {
-					from: 'redirection_host.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "redirection_host.certificate_id",
+					to: "certificate.id",
 				},
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
+				},
+			},
 		};
 		};
 	}
 	}
 }
 }
 
 
-module.exports = RedirectionHost;
+export default RedirectionHost;

+ 3 - 3
backend/models/setting.js

@@ -1,8 +1,8 @@
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const db    = require('../db');
-const Model = require('objection').Model;
+import { Model } from "objection";
+import db from "../db.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
@@ -27,4 +27,4 @@ class Setting extends Model {
 	}
 	}
 }
 }
 
 
-module.exports = Setting;
+export default Setting;

+ 38 - 43
backend/models/stream.js

@@ -1,82 +1,77 @@
-const Model       = require('objection').Model;
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const User        = require('./user');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
-const boolFields = [
-	'is_deleted',
-	'enabled',
-	'tcp_forwarding',
-	'udp_forwarding',
-];
+const boolFields = ["is_deleted", "enabled", "tcp_forwarding", "udp_forwarding"];
 
 
 class Stream extends Model {
 class Stream extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Default for meta
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 			this.meta = {};
 		}
 		}
 	}
 	}
 
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 		this.modified_on = now();
 	}
 	}
 
 
 	$parseDatabaseJson(json) {
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 	}
 
 
 	$formatDatabaseJson(json) {
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 	}
 
 
-	static get name () {
-		return 'Stream';
+	static get name() {
+		return "Stream";
 	}
 	}
 
 
-	static get tableName () {
-		return 'stream';
+	static get tableName() {
+		return "stream";
 	}
 	}
 
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 	}
 
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 		return {
 			owner: {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
 				modelClass: User,
-				join:       {
-					from: 'stream.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "stream.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			},
 			certificate: {
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
 				modelClass: Certificate,
-				join:       {
-					from: 'stream.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "stream.certificate_id",
+					to: "certificate.id",
+				},
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
 				},
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+			},
 		};
 		};
 	}
 	}
 }
 }
 
 
-module.exports = Stream;
+export default Stream;

+ 59 - 58
backend/models/token.js

@@ -3,17 +3,17 @@
  and then has abilities after that.
  and then has abilities after that.
  */
  */
 
 
-const _      = require('lodash');
-const jwt    = require('jsonwebtoken');
-const crypto = require('crypto');
-const config = require('../lib/config');
-const error  = require('../lib/error');
-const logger = require('../logger').global;
-const ALGO   = 'RS256';
+import crypto from "node:crypto";
+import jwt from "jsonwebtoken";
+import _ from "lodash";
+import { getPrivateKey, getPublicKey } from "../lib/config.js";
+import errs from "../lib/error.js";
+import { global as logger } from "../logger.js";
 
 
-module.exports = function () {
+const ALGO = "RS256";
 
 
-	let token_data = {};
+export default () => {
+	let tokenData = {};
 
 
 	const self = {
 	const self = {
 		/**
 		/**
@@ -21,28 +21,26 @@ module.exports = function () {
 		 * @returns {Promise}
 		 * @returns {Promise}
 		 */
 		 */
 		create: (payload) => {
 		create: (payload) => {
-			if (!config.getPrivateKey()) {
-				logger.error('Private key is empty!');
+			if (!getPrivateKey()) {
+				logger.error("Private key is empty!");
 			}
 			}
 			// sign with RSA SHA256
 			// sign with RSA SHA256
 			const options = {
 			const options = {
 				algorithm: ALGO,
 				algorithm: ALGO,
-				expiresIn: payload.expiresIn || '1d'
+				expiresIn: payload.expiresIn || "1d",
 			};
 			};
 
 
-			payload.jti = crypto.randomBytes(12)
-				.toString('base64')
-				.substring(-8);
+			payload.jti = crypto.randomBytes(12).toString("base64").substring(-8);
 
 
 			return new Promise((resolve, reject) => {
 			return new Promise((resolve, reject) => {
-				jwt.sign(payload, config.getPrivateKey(), options, (err, token) => {
+				jwt.sign(payload, getPrivateKey(), options, (err, token) => {
 					if (err) {
 					if (err) {
 						reject(err);
 						reject(err);
 					} else {
 					} else {
-						token_data = payload;
+						tokenData = payload;
 						resolve({
 						resolve({
-							token:   token,
-							payload: payload
+							token: token,
+							payload: payload,
 						});
 						});
 					}
 					}
 				});
 				});
@@ -53,42 +51,47 @@ module.exports = function () {
 		 * @param {String} token
 		 * @param {String} token
 		 * @returns {Promise}
 		 * @returns {Promise}
 		 */
 		 */
-		load: function (token) {
-			if (!config.getPublicKey()) {
-				logger.error('Public key is empty!');
+		load: (token) => {
+			if (!getPublicKey()) {
+				logger.error("Public key is empty!");
 			}
 			}
 			return new Promise((resolve, reject) => {
 			return new Promise((resolve, reject) => {
 				try {
 				try {
-					if (!token || token === null || token === 'null') {
-						reject(new error.AuthError('Empty token'));
+					if (!token || token === null || token === "null") {
+						reject(new errs.AuthError("Empty token"));
 					} else {
 					} else {
-						jwt.verify(token, config.getPublicKey(), {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => {
-							if (err) {
-
-								if (err.name === 'TokenExpiredError') {
-									reject(new error.AuthError('Token has expired', err));
+						jwt.verify(
+							token,
+							getPublicKey(),
+							{ ignoreExpiration: false, algorithms: [ALGO] },
+							(err, result) => {
+								if (err) {
+									if (err.name === "TokenExpiredError") {
+										reject(new errs.AuthError("Token has expired", err));
+									} else {
+										reject(err);
+									}
 								} else {
 								} else {
-									reject(err);
+									tokenData = result;
+
+									// Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
+									// For 30 days at least, we need to replace 'all' with user.
+									if (
+										typeof tokenData.scope !== "undefined" &&
+										_.indexOf(tokenData.scope, "all") !== -1
+									) {
+										tokenData.scope = ["user"];
+									}
+
+									resolve(tokenData);
 								}
 								}
-
-							} else {
-								token_data = result;
-
-								// Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
-								// For 30 days at least, we need to replace 'all' with user.
-								if ((typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'all') !== -1)) {
-									token_data.scope = ['user'];
-								}
-
-								resolve(token_data);
-							}
-						});
+							},
+						);
 					}
 					}
 				} catch (err) {
 				} catch (err) {
 					reject(err);
 					reject(err);
 				}
 				}
 			});
 			});
-
 		},
 		},
 
 
 		/**
 		/**
@@ -97,17 +100,15 @@ module.exports = function () {
 		 * @param   {String}  scope
 		 * @param   {String}  scope
 		 * @returns {Boolean}
 		 * @returns {Boolean}
 		 */
 		 */
-		hasScope: function (scope) {
-			return typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, scope) !== -1;
-		},
+		hasScope: (scope) => typeof tokenData.scope !== "undefined" && _.indexOf(tokenData.scope, scope) !== -1,
 
 
 		/**
 		/**
 		 * @param  {String}  key
 		 * @param  {String}  key
 		 * @return {*}
 		 * @return {*}
 		 */
 		 */
-		get: function (key) {
-			if (typeof token_data[key] !== 'undefined') {
-				return token_data[key];
+		get: (key) => {
+			if (typeof tokenData[key] !== "undefined") {
+				return tokenData[key];
 			}
 			}
 
 
 			return null;
 			return null;
@@ -117,22 +118,22 @@ module.exports = function () {
 		 * @param  {String}  key
 		 * @param  {String}  key
 		 * @param  {*}       value
 		 * @param  {*}       value
 		 */
 		 */
-		set: function (key, value) {
-			token_data[key] = value;
+		set: (key, value) => {
+			tokenData[key] = value;
 		},
 		},
 
 
 		/**
 		/**
-		 * @param   [default_value]
+		 * @param   [defaultValue]
 		 * @returns {Integer}
 		 * @returns {Integer}
 		 */
 		 */
-		getUserId: (default_value) => {
-			const attrs = self.get('attrs');
-			if (attrs && typeof attrs.id !== 'undefined' && attrs.id) {
+		getUserId: (defaultValue) => {
+			const attrs = self.get("attrs");
+			if (attrs?.id) {
 				return attrs.id;
 				return attrs.id;
 			}
 			}
 
 
-			return default_value || 0;
-		}
+			return defaultValue || 0;
+		},
 	};
 	};
 
 
 	return self;
 	return self;

+ 28 - 32
backend/models/user.js

@@ -1,69 +1,65 @@
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const db             = require('../db');
-const helpers        = require('../lib/helpers');
-const Model          = require('objection').Model;
-const UserPermission = require('./user_permission');
-const now            = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import now from "./now_helper.js";
+import UserPermission from "./user_permission.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
-const boolFields = [
-	'is_deleted',
-	'is_disabled',
-];
+const boolFields = ["is_deleted", "is_disabled"];
 
 
 class User extends Model {
 class User extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 		this.modified_on = now();
 
 
 		// Default for roles
 		// Default for roles
-		if (typeof this.roles === 'undefined') {
+		if (typeof this.roles === "undefined") {
 			this.roles = [];
 			this.roles = [];
 		}
 		}
 	}
 	}
 
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 		this.modified_on = now();
 	}
 	}
 
 
 	$parseDatabaseJson(json) {
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 	}
 
 
 	$formatDatabaseJson(json) {
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 	}
 
 
-	static get name () {
-		return 'User';
+	static get name() {
+		return "User";
 	}
 	}
 
 
-	static get tableName () {
-		return 'user';
+	static get tableName() {
+		return "user";
 	}
 	}
 
 
-	static get jsonAttributes () {
-		return ['roles'];
+	static get jsonAttributes() {
+		return ["roles"];
 	}
 	}
 
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 		return {
 			permissions: {
 			permissions: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: UserPermission,
 				modelClass: UserPermission,
-				join:       {
-					from: 'user.id',
-					to:   'user_permission.user_id'
-				}
-			}
+				join: {
+					from: "user.id",
+					to: "user_permission.user_id",
+				},
+			},
 		};
 		};
 	}
 	}
-
 }
 }
 
 
-module.exports = User;
+export default User;

+ 4 - 4
backend/models/user_permission.js

@@ -1,9 +1,9 @@
 // Objection Docs:
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 // http://vincit.github.io/objection.js/
 
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import now from "./now_helper.js";
 
 
 Model.knex(db);
 Model.knex(db);
 
 
@@ -26,4 +26,4 @@ class UserPermission extends Model {
 	}
 	}
 }
 }
 
 
-module.exports = UserPermission;
+export default UserPermission;

+ 1 - 1
backend/nodemon.json

@@ -3,5 +3,5 @@
   "ignore": [
   "ignore": [
     "data"
     "data"
   ],
   ],
-  "ext": "js json ejs"
+  "ext": "js json ejs cjs"
 }
 }

+ 19 - 18
backend/package.json

@@ -1,8 +1,16 @@
 {
 {
 	"name": "nginx-proxy-manager",
 	"name": "nginx-proxy-manager",
-	"version": "0.0.0",
+	"version": "2.0.0",
 	"description": "A beautiful interface for creating Nginx endpoints",
 	"description": "A beautiful interface for creating Nginx endpoints",
+	"author": "Jamie Curnow <[email protected]>",
+	"license": "MIT",
 	"main": "index.js",
 	"main": "index.js",
+	"type": "module",
+	"scripts": {
+		"lint": "biome lint",
+		"prettier": "biome format --write .",
+		"validate-schema": "node validate-schema.js"
+	},
 	"dependencies": {
 	"dependencies": {
 		"@apidevtools/json-schema-ref-parser": "^11.7.0",
 		"@apidevtools/json-schema-ref-parser": "^11.7.0",
 		"ajv": "^8.17.1",
 		"ajv": "^8.17.1",
@@ -18,31 +26,24 @@
 		"knex": "2.4.2",
 		"knex": "2.4.2",
 		"liquidjs": "10.6.1",
 		"liquidjs": "10.6.1",
 		"lodash": "^4.17.21",
 		"lodash": "^4.17.21",
-		"moment": "^2.29.4",
-		"mysql2": "^3.11.1",
-		"node-rsa": "^1.0.8",
+		"moment": "^2.30.1",
+		"mysql2": "^3.15.3",
+		"node-rsa": "^1.1.1",
 		"objection": "3.0.1",
 		"objection": "3.0.1",
 		"path": "^0.12.7",
 		"path": "^0.12.7",
-		"pg": "^8.13.1",
+		"pg": "^8.16.3",
 		"signale": "1.4.0",
 		"signale": "1.4.0",
-		"sqlite3": "5.1.6",
+		"sqlite3": "^5.1.7",
 		"temp-write": "^4.0.0"
 		"temp-write": "^4.0.0"
 	},
 	},
-	"signale": {
-		"displayDate": true,
-		"displayTimestamp": true
-	},
-	"author": "Jamie Curnow <[email protected]>",
-	"license": "MIT",
 	"devDependencies": {
 	"devDependencies": {
 		"@apidevtools/swagger-parser": "^10.1.0",
 		"@apidevtools/swagger-parser": "^10.1.0",
+		"@biomejs/biome": "^2.3.1",
 		"chalk": "4.1.2",
 		"chalk": "4.1.2",
-		"eslint": "^8.36.0",
-		"eslint-plugin-align-assignments": "^1.1.2",
-		"nodemon": "^2.0.2",
-		"prettier": "^2.0.4"
+		"nodemon": "^2.0.2"
 	},
 	},
-	"scripts": {
-		"validate-schema": "node validate-schema.js"
+	"signale": {
+		"displayDate": true,
+		"displayTimestamp": true
 	}
 	}
 }
 }

+ 86 - 31
backend/routes/audit-log.js

@@ -1,19 +1,20 @@
-const express          = require('express');
-const validator        = require('../lib/validator');
-const jwtdecode        = require('../lib/express/jwt-decode');
-const internalAuditLog = require('../internal/audit-log');
+import express from "express";
+import internalAuditLog from "../internal/audit-log.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import validator from "../lib/validator/index.js";
+import { express as logger } from "../logger.js";
 
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 /**
 /**
  * /api/audit-log
  * /api/audit-log
  */
  */
 router
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -24,29 +25,83 @@ router
 	 *
 	 *
 	 * Retrieve all logs
 	 * Retrieve all logs
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalAuditLog.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const rows = await internalAuditLog.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	});
+
+/**
+ * Specific audit log entry
+ *
+ * /api/audit-log/123
+ */
+router
+	.route("/:event_id")
+	.options((_, res) => {
+		res.sendStatus(204);
+	})
+	.all(jwtdecode())
+
+	/**
+	 * GET /api/audit-log/123
+	 *
+	 * Retrieve a specific entry
+	 */
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["event_id"],
+					additionalProperties: false,
+					properties: {
+						event_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
+				},
+				{
+					event_id: req.params.event_id,
+					expand:
+						typeof req.query.expand === "string"
+							? req.query.expand.split(",")
+							: null,
+				},
+			);
+
+			const item = await internalAuditLog.get(res.locals.access, {
+				id: data.event_id,
+				expand: data.expand,
+			});
+			res.status(200).send(item);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 44 - 29
backend/routes/main.js

@@ -1,51 +1,66 @@
-const express = require('express');
-const pjson   = require('../package.json');
-const error   = require('../lib/error');
+import express from "express";
+import errs from "../lib/error.js";
+import pjson from "../package.json" with { type: "json" };
+import { isSetup } from "../setup.js";
+import auditLogRoutes from "./audit-log.js";
+import accessListsRoutes from "./nginx/access_lists.js";
+import certificatesHostsRoutes from "./nginx/certificates.js";
+import deadHostsRoutes from "./nginx/dead_hosts.js";
+import proxyHostsRoutes from "./nginx/proxy_hosts.js";
+import redirectionHostsRoutes from "./nginx/redirection_hosts.js";
+import streamsRoutes from "./nginx/streams.js";
+import reportsRoutes from "./reports.js";
+import schemaRoutes from "./schema.js";
+import settingsRoutes from "./settings.js";
+import tokensRoutes from "./tokens.js";
+import usersRoutes from "./users.js";
 
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 /**
 /**
  * Health Check
  * Health Check
  * GET /api
  * GET /api
  */
  */
-router.get('/', (req, res/*, next*/) => {
-	let version = pjson.version.split('-').shift().split('.');
+router.get("/", async (_, res /*, next*/) => {
+	const version = pjson.version.split("-").shift().split(".");
+	const setup = await isSetup();
 
 
 	res.status(200).send({
 	res.status(200).send({
-		status:  'OK',
+		status: "OK",
+		setup,
 		version: {
 		version: {
-			major:    parseInt(version.shift(), 10),
-			minor:    parseInt(version.shift(), 10),
-			revision: parseInt(version.shift(), 10)
-		}
+			major: Number.parseInt(version.shift(), 10),
+			minor: Number.parseInt(version.shift(), 10),
+			revision: Number.parseInt(version.shift(), 10),
+		},
 	});
 	});
 });
 });
 
 
-router.use('/schema', require('./schema'));
-router.use('/tokens', require('./tokens'));
-router.use('/users', require('./users'));
-router.use('/audit-log', require('./audit-log'));
-router.use('/reports', require('./reports'));
-router.use('/settings', require('./settings'));
-router.use('/nginx/proxy-hosts', require('./nginx/proxy_hosts'));
-router.use('/nginx/redirection-hosts', require('./nginx/redirection_hosts'));
-router.use('/nginx/dead-hosts', require('./nginx/dead_hosts'));
-router.use('/nginx/streams', require('./nginx/streams'));
-router.use('/nginx/access-lists', require('./nginx/access_lists'));
-router.use('/nginx/certificates', require('./nginx/certificates'));
+router.use("/schema", schemaRoutes);
+router.use("/tokens", tokensRoutes);
+router.use("/users", usersRoutes);
+router.use("/audit-log", auditLogRoutes);
+router.use("/reports", reportsRoutes);
+router.use("/settings", settingsRoutes);
+router.use("/nginx/proxy-hosts", proxyHostsRoutes);
+router.use("/nginx/redirection-hosts", redirectionHostsRoutes);
+router.use("/nginx/dead-hosts", deadHostsRoutes);
+router.use("/nginx/streams", streamsRoutes);
+router.use("/nginx/access-lists", accessListsRoutes);
+router.use("/nginx/certificates", certificatesHostsRoutes);
 
 
 /**
 /**
  * API 404 for all other routes
  * API 404 for all other routes
  *
  *
  * ALL /api/*
  * ALL /api/*
  */
  */
-router.all(/(.+)/, function (req, _, next) {
-	req.params.page = req.params['0'];
-	next(new error.ItemNotFoundError(req.params.page));
+router.all(/(.+)/, (req, _, next) => {
+	req.params.page = req.params["0"];
+	next(new errs.ItemNotFoundError(req.params.page));
 });
 });
 
 
-module.exports = router;
+export default router;

+ 95 - 89
backend/routes/nginx/access_lists.js

@@ -1,22 +1,23 @@
-const express            = require('express');
-const validator          = require('../../lib/validator');
-const jwtdecode          = require('../../lib/express/jwt-decode');
-const apiValidator       = require('../../lib/validator/api');
-const internalAccessList = require('../../internal/access-list');
-const schema             = require('../../schema');
+import express from "express";
+import internalAccessList from "../../internal/access-list.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 /**
 /**
  * /api/nginx/access-lists
  * /api/nginx/access-lists
  */
  */
 router
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode())
 	.all(jwtdecode())
@@ -26,29 +27,31 @@ router
 	 *
 	 *
 	 * Retrieve all access-lists
 	 * Retrieve all access-lists
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
+				},
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
 				},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalAccessList.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+			);
+			const rows = await internalAccessList.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -56,16 +59,15 @@ router
 	 *
 	 *
 	 * Create a new access-list
 	 * Create a new access-list
 	 */
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/access-lists', 'post'), req.body)
-			.then((payload) => {
-				return internalAccessList.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/access-lists", "post"), req.body);
+			const result = await internalAccessList.create(res.locals.access, payload);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -74,7 +76,7 @@ router
  * /api/nginx/access-lists/123
  * /api/nginx/access-lists/123
  */
  */
 router
 router
-	.route('/:list_id')
+	.route("/:list_id")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -85,33 +87,35 @@ router
 	 *
 	 *
 	 * Retrieve a specific access-list
 	 * Retrieve a specific access-list
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['list_id'],
-			additionalProperties: false,
-			properties:           {
-				list_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["list_id"],
+					additionalProperties: false,
+					properties: {
+						list_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
+				},
+				{
+					list_id: req.params.list_id,
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
 				},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			list_id: req.params.list_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalAccessList.get(res.locals.access, {
-					id:     parseInt(data.list_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+			);
+			const row = await internalAccessList.get(res.locals.access, {
+				id: Number.parseInt(data.list_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -119,17 +123,16 @@ router
 	 *
 	 *
 	 * Update and existing access-list
 	 * Update and existing access-list
 	 */
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/access-lists/{listID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = parseInt(req.params.list_id, 10);
-				return internalAccessList.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/access-lists/{listID}", "put"), req.body);
+			payload.id = Number.parseInt(req.params.list_id, 10);
+			const result = await internalAccessList.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -137,13 +140,16 @@ router
 	 *
 	 *
 	 * Delete and existing access-list
 	 * Delete and existing access-list
 	 */
 	 */
-	.delete((req, res, next) => {
-		internalAccessList.delete(res.locals.access, {id: parseInt(req.params.list_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalAccessList.delete(res.locals.access, {
+				id: Number.parseInt(req.params.list_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 222 - 155
backend/routes/nginx/certificates.js

@@ -1,22 +1,24 @@
-const express             = require('express');
-const error               = require('../../lib/error');
-const validator           = require('../../lib/validator');
-const jwtdecode           = require('../../lib/express/jwt-decode');
-const apiValidator        = require('../../lib/validator/api');
-const internalCertificate = require('../../internal/certificate');
-const schema              = require('../../schema');
+import express from "express";
+import dnsPlugins from "../../certbot/dns-plugins.json" with { type: "json" };
+import internalCertificate from "../../internal/certificate.js";
+import errs from "../../lib/error.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
 
 const router = express.Router({
 const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 /**
 /**
  * /api/nginx/certificates
  * /api/nginx/certificates
  */
  */
 router
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -27,29 +29,38 @@ router
 	 *
 	 *
 	 * Retrieve all certificates
 	 * Retrieve all certificates
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalCertificate.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+				{
+					expand:
+						typeof req.query.expand === "string"
+							? req.query.expand.split(",")
+							: null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const rows = await internalCertificate.getAll(
+				res.locals.access,
+				data.expand,
+				data.query,
+			);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -57,17 +68,56 @@ router
 	 *
 	 *
 	 * Create a new certificate
 	 * Create a new certificate
 	 */
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/certificates', 'post'), req.body)
-			.then((payload) => {
-				req.setTimeout(900000); // 15 minutes timeout
-				return internalCertificate.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/nginx/certificates", "post"),
+				req.body,
+			);
+			req.setTimeout(900000); // 15 minutes timeout
+			const result = await internalCertificate.create(
+				res.locals.access,
+				payload,
+			);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	});
+
+/**
+ * /api/nginx/certificates/dns-providers
+ */
+router
+	.route("/dns-providers")
+	.options((_, res) => {
+		res.sendStatus(204);
+	})
+	.all(jwtdecode())
+
+	/**
+	 * GET /api/nginx/certificates/dns-providers
+	 *
+	 * Get list of all supported DNS providers
+	 */
+	.get(async (req, res, next) => {
+		try {
+			if (!res.locals.access.token.getUserId()) {
+				throw new errs.PermissionError("Login required");
+			}
+			const clean = Object.keys(dnsPlugins).map((key) => ({
+				id: key,
+				name: dnsPlugins[key].name,
+				credentials: dnsPlugins[key].credentials,
+			}));
+
+			clean.sort((a, b) => a.name.localeCompare(b.name));
+			res.status(200).send(clean);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -76,29 +126,68 @@ router
  * /api/nginx/certificates/test-http
  * /api/nginx/certificates/test-http
  */
  */
 router
 router
-	.route('/test-http')
+	.route("/test-http")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode())
 	.all(jwtdecode())
 
 
 	/**
 	/**
-	 * GET /api/nginx/certificates/test-http
+	 * POST /api/nginx/certificates/test-http
 	 *
 	 *
 	 * Test HTTP challenge for domains
 	 * Test HTTP challenge for domains
 	 */
 	 */
-	.get((req, res, next) => {
-		if (req.query.domains === undefined) {
-			next(new error.ValidationError('Domains are required as query parameters'));
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/nginx/certificates/test-http", "post"),
+				req.body,
+			);
+			req.setTimeout(60000); // 1 minute timeout
+
+			const result = await internalCertificate.testHttpsChallenge(
+				res.locals.access,
+				payload,
+			);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	});
+
+/**
+ * Validate Certs before saving
+ *
+ * /api/nginx/certificates/validate
+ */
+router
+	.route("/validate")
+	.options((_, res) => {
+		res.sendStatus(204);
+	})
+	.all(jwtdecode())
+
+	/**
+	 * POST /api/nginx/certificates/validate
+	 *
+	 * Validate certificates
+	 */
+	.post(async (req, res, next) => {
+		if (!req.files) {
+			res.status(400).send({ error: "No files were uploaded" });
 			return;
 			return;
 		}
 		}
 
 
-		internalCertificate.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+		try {
+			const result = await internalCertificate.validate({
+				files: req.files,
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -107,7 +196,7 @@ router
  * /api/nginx/certificates/123
  * /api/nginx/certificates/123
  */
  */
 router
 router
-	.route('/:certificate_id')
+	.route("/:certificate_id")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -118,33 +207,38 @@ router
 	 *
 	 *
 	 * Retrieve a specific certificate
 	 * Retrieve a specific certificate
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['certificate_id'],
-			additionalProperties: false,
-			properties:           {
-				certificate_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["certificate_id"],
+					additionalProperties: false,
+					properties: {
+						certificate_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
 				},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			certificate_id: req.params.certificate_id,
-			expand:         (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalCertificate.get(res.locals.access, {
-					id:     parseInt(data.certificate_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+				{
+					certificate_id: req.params.certificate_id,
+					expand:
+						typeof req.query.expand === "string"
+							? req.query.expand.split(",")
+							: null,
+				},
+			);
+			const row = await internalCertificate.get(res.locals.access, {
+				id: Number.parseInt(data.certificate_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -152,13 +246,16 @@ router
 	 *
 	 *
 	 * Update and existing certificate
 	 * Update and existing certificate
 	 */
 	 */
-	.delete((req, res, next) => {
-		internalCertificate.delete(res.locals.access, {id: parseInt(req.params.certificate_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalCertificate.delete(res.locals.access, {
+				id: Number.parseInt(req.params.certificate_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -167,7 +264,7 @@ router
  * /api/nginx/certificates/123/upload
  * /api/nginx/certificates/123/upload
  */
  */
 router
 router
-	.route('/:certificate_id/upload')
+	.route("/:certificate_id/upload")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -178,20 +275,21 @@ router
 	 *
 	 *
 	 * Upload certificates
 	 * Upload certificates
 	 */
 	 */
-	.post((req, res, next) => {
+	.post(async (req, res, next) => {
 		if (!req.files) {
 		if (!req.files) {
-			res.status(400)
-				.send({error: 'No files were uploaded'});
-		} else {
-			internalCertificate.upload(res.locals.access, {
-				id:    parseInt(req.params.certificate_id, 10),
-				files: req.files
-			})
-				.then((result) => {
-					res.status(200)
-						.send(result);
-				})
-				.catch(next);
+			res.status(400).send({ error: "No files were uploaded" });
+			return;
+		}
+
+		try {
+			const result = await internalCertificate.upload(res.locals.access, {
+				id: Number.parseInt(req.params.certificate_id, 10),
+				files: req.files,
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
 		}
 		}
 	});
 	});
 
 
@@ -201,7 +299,7 @@ router
  * /api/nginx/certificates/123/renew
  * /api/nginx/certificates/123/renew
  */
  */
 router
 router
-	.route('/:certificate_id/renew')
+	.route("/:certificate_id/renew")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -212,16 +310,17 @@ router
 	 *
 	 *
 	 * Renew certificate
 	 * Renew certificate
 	 */
 	 */
-	.post((req, res, next) => {
+	.post(async (req, res, next) => {
 		req.setTimeout(900000); // 15 minutes timeout
 		req.setTimeout(900000); // 15 minutes timeout
-		internalCertificate.renew(res.locals.access, {
-			id: parseInt(req.params.certificate_id, 10)
-		})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+		try {
+			const result = await internalCertificate.renew(res.locals.access, {
+				id: Number.parseInt(req.params.certificate_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -230,7 +329,7 @@ router
  * /api/nginx/certificates/123/download
  * /api/nginx/certificates/123/download
  */
  */
 router
 router
-	.route('/:certificate_id/download')
+	.route("/:certificate_id/download")
 	.options((_req, res) => {
 	.options((_req, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -241,48 +340,16 @@ router
 	 *
 	 *
 	 * Renew certificate
 	 * Renew certificate
 	 */
 	 */
-	.get((req, res, next) => {
-		internalCertificate.download(res.locals.access, {
-			id: parseInt(req.params.certificate_id, 10)
-		})
-			.then((result) => {
-				res.status(200)
-					.download(result.fileName);
-			})
-			.catch(next);
-	});
-
-/**
- * Validate Certs before saving
- *
- * /api/nginx/certificates/validate
- */
-router
-	.route('/validate')
-	.options((_, res) => {
-		res.sendStatus(204);
-	})
-	.all(jwtdecode())
-
-	/**
-	 * POST /api/nginx/certificates/validate
-	 *
-	 * Validate certificates
-	 */
-	.post((req, res, next) => {
-		if (!req.files) {
-			res.status(400)
-				.send({error: 'No files were uploaded'});
-		} else {
-			internalCertificate.validate({
-				files: req.files
-			})
-				.then((result) => {
-					res.status(200)
-						.send(result);
-				})
-				.catch(next);
+	.get(async (req, res, next) => {
+		try {
+			const result = await internalCertificate.download(res.locals.access, {
+				id: Number.parseInt(req.params.certificate_id, 10),
+			});
+			res.status(200).download(result.fileName);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
 		}
 		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 116 - 106
backend/routes/nginx/dead_hosts.js

@@ -1,21 +1,22 @@
-const express          = require('express');
-const validator        = require('../../lib/validator');
-const jwtdecode        = require('../../lib/express/jwt-decode');
-const apiValidator     = require('../../lib/validator/api');
-const internalDeadHost = require('../../internal/dead-host');
-const schema           = require('../../schema');
+import express from "express";
+import internalDeadHost from "../../internal/dead-host.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 /**
 /**
  * /api/nginx/dead-hosts
  * /api/nginx/dead-hosts
  */
  */
 router
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -26,29 +27,31 @@ router
 	 *
 	 *
 	 * Retrieve all dead-hosts
 	 * Retrieve all dead-hosts
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalDeadHost.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const rows = await internalDeadHost.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -56,16 +59,15 @@ router
 	 *
 	 *
 	 * Create a new dead-host
 	 * Create a new dead-host
 	 */
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/dead-hosts', 'post'), req.body)
-			.then((payload) => {
-				return internalDeadHost.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/dead-hosts", "post"), req.body);
+			const result = await internalDeadHost.create(res.locals.access, payload);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -74,8 +76,8 @@ router
  * /api/nginx/dead-hosts/123
  * /api/nginx/dead-hosts/123
  */
  */
 router
 router
-	.route('/:host_id')
-	.options((req, res) => {
+	.route("/:host_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode())
 	.all(jwtdecode())
@@ -85,65 +87,69 @@ router
 	 *
 	 *
 	 * Retrieve a specific dead-host
 	 * Retrieve a specific dead-host
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['host_id'],
-			additionalProperties: false,
-			properties:           {
-				host_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["host_id"],
+					additionalProperties: false,
+					properties: {
+						host_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
 				},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			host_id: req.params.host_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalDeadHost.get(res.locals.access, {
-					id:     parseInt(data.host_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+				{
+					host_id: req.params.host_id,
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				},
+			);
+			const row = await internalDeadHost.get(res.locals.access, {
+				id: Number.parseInt(data.host_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
 	 * PUT /api/nginx/dead-hosts/123
 	 * PUT /api/nginx/dead-hosts/123
 	 *
 	 *
-	 * Update and existing dead-host
+	 * Update an existing dead-host
 	 */
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/dead-hosts/{hostID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = parseInt(req.params.host_id, 10);
-				return internalDeadHost.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/dead-hosts/{hostID}", "put"), req.body);
+			payload.id = Number.parseInt(req.params.host_id, 10);
+			const result = await internalDeadHost.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
 	 * DELETE /api/nginx/dead-hosts/123
 	 * DELETE /api/nginx/dead-hosts/123
 	 *
 	 *
-	 * Update and existing dead-host
+	 * Delete a dead-host
 	 */
 	 */
-	.delete((req, res, next) => {
-		internalDeadHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalDeadHost.delete(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -152,7 +158,7 @@ router
  * /api/nginx/dead-hosts/123/enable
  * /api/nginx/dead-hosts/123/enable
  */
  */
 router
 router
-	.route('/:host_id/enable')
+	.route("/:host_id/enable")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -161,13 +167,16 @@ router
 	/**
 	/**
 	 * POST /api/nginx/dead-hosts/123/enable
 	 * POST /api/nginx/dead-hosts/123/enable
 	 */
 	 */
-	.post((req, res, next) => {
-		internalDeadHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalDeadHost.enable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -176,7 +185,7 @@ router
  * /api/nginx/dead-hosts/123/disable
  * /api/nginx/dead-hosts/123/disable
  */
  */
 router
 router
-	.route('/:host_id/disable')
+	.route("/:host_id/disable")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -186,12 +195,13 @@ router
 	 * POST /api/nginx/dead-hosts/123/disable
 	 * POST /api/nginx/dead-hosts/123/disable
 	 */
 	 */
 	.post((req, res, next) => {
 	.post((req, res, next) => {
-		internalDeadHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+		try {
+			const result = internalDeadHost.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) });
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 118 - 106
backend/routes/nginx/proxy_hosts.js

@@ -1,22 +1,23 @@
-const express           = require('express');
-const validator         = require('../../lib/validator');
-const jwtdecode         = require('../../lib/express/jwt-decode');
-const apiValidator      = require('../../lib/validator/api');
-const internalProxyHost = require('../../internal/proxy-host');
-const schema            = require('../../schema');
+import express from "express";
+import internalProxyHost from "../../internal/proxy-host.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 /**
 /**
  * /api/nginx/proxy-hosts
  * /api/nginx/proxy-hosts
  */
  */
 router
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode())
 	.all(jwtdecode())
@@ -26,29 +27,31 @@ router
 	 *
 	 *
 	 * Retrieve all proxy-hosts
 	 * Retrieve all proxy-hosts
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
+				},
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
 				},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalProxyHost.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+			);
+			const rows = await internalProxyHost.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -56,16 +59,15 @@ router
 	 *
 	 *
 	 * Create a new proxy-host
 	 * Create a new proxy-host
 	 */
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/proxy-hosts', 'post'), req.body)
-			.then((payload) => {
-				return internalProxyHost.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/proxy-hosts", "post"), req.body);
+			const result = await internalProxyHost.create(res.locals.access, payload);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err} ${JSON.stringify(err.debug, null, 2)}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -74,8 +76,8 @@ router
  * /api/nginx/proxy-hosts/123
  * /api/nginx/proxy-hosts/123
  */
  */
 router
 router
-	.route('/:host_id')
-	.options((req, res) => {
+	.route("/:host_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode())
 	.all(jwtdecode())
@@ -85,33 +87,35 @@ router
 	 *
 	 *
 	 * Retrieve a specific proxy-host
 	 * Retrieve a specific proxy-host
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['host_id'],
-			additionalProperties: false,
-			properties:           {
-				host_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["host_id"],
+					additionalProperties: false,
+					properties: {
+						host_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
+				},
+				{
+					host_id: req.params.host_id,
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
 				},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			host_id: req.params.host_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalProxyHost.get(res.locals.access, {
-					id:     parseInt(data.host_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+			);
+			const row = await internalProxyHost.get(res.locals.access, {
+				id: Number.parseInt(data.host_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -119,17 +123,16 @@ router
 	 *
 	 *
 	 * Update and existing proxy-host
 	 * Update and existing proxy-host
 	 */
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/proxy-hosts/{hostID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = parseInt(req.params.host_id, 10);
-				return internalProxyHost.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/proxy-hosts/{hostID}", "put"), req.body);
+			payload.id = Number.parseInt(req.params.host_id, 10);
+			const result = await internalProxyHost.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -137,13 +140,16 @@ router
 	 *
 	 *
 	 * Update and existing proxy-host
 	 * Update and existing proxy-host
 	 */
 	 */
-	.delete((req, res, next) => {
-		internalProxyHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalProxyHost.delete(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -152,7 +158,7 @@ router
  * /api/nginx/proxy-hosts/123/enable
  * /api/nginx/proxy-hosts/123/enable
  */
  */
 router
 router
-	.route('/:host_id/enable')
+	.route("/:host_id/enable")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -161,13 +167,16 @@ router
 	/**
 	/**
 	 * POST /api/nginx/proxy-hosts/123/enable
 	 * POST /api/nginx/proxy-hosts/123/enable
 	 */
 	 */
-	.post((req, res, next) => {
-		internalProxyHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalProxyHost.enable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -176,7 +185,7 @@ router
  * /api/nginx/proxy-hosts/123/disable
  * /api/nginx/proxy-hosts/123/disable
  */
  */
 router
 router
-	.route('/:host_id/disable')
+	.route("/:host_id/disable")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -185,13 +194,16 @@ router
 	/**
 	/**
 	 * POST /api/nginx/proxy-hosts/123/disable
 	 * POST /api/nginx/proxy-hosts/123/disable
 	 */
 	 */
-	.post((req, res, next) => {
-		internalProxyHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalProxyHost.disable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 123 - 108
backend/routes/nginx/redirection_hosts.js

@@ -1,22 +1,23 @@
-const express                 = require('express');
-const validator               = require('../../lib/validator');
-const jwtdecode               = require('../../lib/express/jwt-decode');
-const apiValidator            = require('../../lib/validator/api');
-const internalRedirectionHost = require('../../internal/redirection-host');
-const schema                  = require('../../schema');
+import express from "express";
+import internalRedirectionHost from "../../internal/redirection-host.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 /**
 /**
  * /api/nginx/redirection-hosts
  * /api/nginx/redirection-hosts
  */
  */
 router
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode())
 	.all(jwtdecode())
@@ -26,29 +27,31 @@ router
 	 *
 	 *
 	 * Retrieve all redirection-hosts
 	 * Retrieve all redirection-hosts
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const rows = await internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -56,16 +59,15 @@ router
 	 *
 	 *
 	 * Create a new redirection-host
 	 * Create a new redirection-host
 	 */
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/redirection-hosts', 'post'), req.body)
-			.then((payload) => {
-				return internalRedirectionHost.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/redirection-hosts", "post"), req.body);
+			const result = await internalRedirectionHost.create(res.locals.access, payload);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -74,8 +76,8 @@ router
  * /api/nginx/redirection-hosts/123
  * /api/nginx/redirection-hosts/123
  */
  */
 router
 router
-	.route('/:host_id')
-	.options((req, res) => {
+	.route("/:host_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode())
 	.all(jwtdecode())
@@ -85,33 +87,35 @@ router
 	 *
 	 *
 	 * Retrieve a specific redirection-host
 	 * Retrieve a specific redirection-host
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['host_id'],
-			additionalProperties: false,
-			properties:           {
-				host_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["host_id"],
+					additionalProperties: false,
+					properties: {
+						host_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
+				},
+				{
+					host_id: req.params.host_id,
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
 				},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			host_id: req.params.host_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalRedirectionHost.get(res.locals.access, {
-					id:     parseInt(data.host_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+			);
+			const row = await internalRedirectionHost.get(res.locals.access, {
+				id: Number.parseInt(data.host_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -119,17 +123,19 @@ router
 	 *
 	 *
 	 * Update and existing redirection-host
 	 * Update and existing redirection-host
 	 */
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/redirection-hosts/{hostID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = parseInt(req.params.host_id, 10);
-				return internalRedirectionHost.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/nginx/redirection-hosts/{hostID}", "put"),
+				req.body,
+			);
+			payload.id = Number.parseInt(req.params.host_id, 10);
+			const result = await internalRedirectionHost.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -137,13 +143,16 @@ router
 	 *
 	 *
 	 * Update and existing redirection-host
 	 * Update and existing redirection-host
 	 */
 	 */
-	.delete((req, res, next) => {
-		internalRedirectionHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalRedirectionHost.delete(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -152,8 +161,8 @@ router
  * /api/nginx/redirection-hosts/123/enable
  * /api/nginx/redirection-hosts/123/enable
  */
  */
 router
 router
-	.route('/:host_id/enable')
-	.options((req, res) => {
+	.route("/:host_id/enable")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode())
 	.all(jwtdecode())
@@ -161,13 +170,16 @@ router
 	/**
 	/**
 	 * POST /api/nginx/redirection-hosts/123/enable
 	 * POST /api/nginx/redirection-hosts/123/enable
 	 */
 	 */
-	.post((req, res, next) => {
-		internalRedirectionHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalRedirectionHost.enable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -176,8 +188,8 @@ router
  * /api/nginx/redirection-hosts/123/disable
  * /api/nginx/redirection-hosts/123/disable
  */
  */
 router
 router
-	.route('/:host_id/disable')
-	.options((req, res) => {
+	.route("/:host_id/disable")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode())
 	.all(jwtdecode())
@@ -185,13 +197,16 @@ router
 	/**
 	/**
 	 * POST /api/nginx/redirection-hosts/123/disable
 	 * POST /api/nginx/redirection-hosts/123/disable
 	 */
 	 */
-	.post((req, res, next) => {
-		internalRedirectionHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalRedirectionHost.disable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 118 - 106
backend/routes/nginx/streams.js

@@ -1,22 +1,23 @@
-const express        = require('express');
-const validator      = require('../../lib/validator');
-const jwtdecode      = require('../../lib/express/jwt-decode');
-const apiValidator   = require('../../lib/validator/api');
-const internalStream = require('../../internal/stream');
-const schema         = require('../../schema');
+import express from "express";
+import internalStream from "../../internal/stream.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { express as logger } from "../../logger.js";
+import { getValidationSchema } from "../../schema/index.js";
 
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 /**
 /**
  * /api/nginx/streams
  * /api/nginx/streams
  */
  */
 router
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
 	.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
@@ -26,29 +27,31 @@ router
 	 *
 	 *
 	 * Retrieve all streams
 	 * Retrieve all streams
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
+				},
+				{
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
 				},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalStream.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+			);
+			const rows = await internalStream.getAll(res.locals.access, data.expand, data.query);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -56,16 +59,15 @@ router
 	 *
 	 *
 	 * Create a new stream
 	 * Create a new stream
 	 */
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/streams', 'post'), req.body)
-			.then((payload) => {
-				return internalStream.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/streams", "post"), req.body);
+			const result = await internalStream.create(res.locals.access, payload);
+			res.status(201).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -74,8 +76,8 @@ router
  * /api/nginx/streams/123
  * /api/nginx/streams/123
  */
  */
 router
 router
-	.route('/:stream_id')
-	.options((req, res) => {
+	.route("/:stream_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
 	.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
@@ -85,33 +87,35 @@ router
 	 *
 	 *
 	 * Retrieve a specific stream
 	 * Retrieve a specific stream
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['stream_id'],
-			additionalProperties: false,
-			properties:           {
-				stream_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["stream_id"],
+					additionalProperties: false,
+					properties: {
+						stream_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
+				},
+				{
+					stream_id: req.params.stream_id,
+					expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
 				},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			stream_id: req.params.stream_id,
-			expand:    (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalStream.get(res.locals.access, {
-					id:     parseInt(data.stream_id, 10),
-					expand: data.expand
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+			);
+			const row = await internalStream.get(res.locals.access, {
+				id: Number.parseInt(data.stream_id, 10),
+				expand: data.expand,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -119,17 +123,16 @@ router
 	 *
 	 *
 	 * Update and existing stream
 	 * Update and existing stream
 	 */
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/streams/{streamID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = parseInt(req.params.stream_id, 10);
-				return internalStream.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/nginx/streams/{streamID}", "put"), req.body);
+			payload.id = Number.parseInt(req.params.stream_id, 10);
+			const result = await internalStream.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -137,13 +140,16 @@ router
 	 *
 	 *
 	 * Update and existing stream
 	 * Update and existing stream
 	 */
 	 */
-	.delete((req, res, next) => {
-		internalStream.delete(res.locals.access, {id: parseInt(req.params.stream_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalStream.delete(res.locals.access, {
+				id: Number.parseInt(req.params.stream_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -152,7 +158,7 @@ router
  * /api/nginx/streams/123/enable
  * /api/nginx/streams/123/enable
  */
  */
 router
 router
-	.route('/:host_id/enable')
+	.route("/:host_id/enable")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -161,13 +167,16 @@ router
 	/**
 	/**
 	 * POST /api/nginx/streams/123/enable
 	 * POST /api/nginx/streams/123/enable
 	 */
 	 */
-	.post((req, res, next) => {
-		internalStream.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalStream.enable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -176,7 +185,7 @@ router
  * /api/nginx/streams/123/disable
  * /api/nginx/streams/123/disable
  */
  */
 router
 router
-	.route('/:host_id/disable')
+	.route("/:host_id/disable")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -185,13 +194,16 @@ router
 	/**
 	/**
 	 * POST /api/nginx/streams/123/disable
 	 * POST /api/nginx/streams/123/disable
 	 */
 	 */
-	.post((req, res, next) => {
-		internalStream.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalStream.disable(res.locals.access, {
+				id: Number.parseInt(req.params.host_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 18 - 15
backend/routes/reports.js

@@ -1,29 +1,32 @@
-const express        = require('express');
-const jwtdecode      = require('../lib/express/jwt-decode');
-const internalReport = require('../internal/report');
+import express from "express";
+import internalReport from "../internal/report.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import { express as logger } from "../logger.js";
 
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 router
 router
-	.route('/hosts')
+	.route("/hosts")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
+	.all(jwtdecode())
 
 
 	/**
 	/**
 	 * GET /reports/hosts
 	 * GET /reports/hosts
 	 */
 	 */
-	.get(jwtdecode(), (_, res, next) => {
-		internalReport.getHostsReport(res.locals.access)
-			.then((data) => {
-				res.status(200)
-					.send(data);
-			})
-			.catch(next);
+	.get(async (req, res, next) => {
+		try {
+			const data = await internalReport.getHostsReport(res.locals.access);
+			res.status(200).send(data);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 25 - 19
backend/routes/schema.js

@@ -1,15 +1,16 @@
-const express = require('express');
-const schema  = require('../schema');
-const PACKAGE = require('../package.json');
+import express from "express";
+import { express as logger } from "../logger.js";
+import PACKAGE from "../package.json" with { type: "json" };
+import { getCompiledSchema } from "../schema/index.js";
 
 
 const router = express.Router({
 const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 router
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -18,21 +19,26 @@ router
 	 * GET /schema
 	 * GET /schema
 	 */
 	 */
 	.get(async (req, res) => {
 	.get(async (req, res) => {
-		let swaggerJSON = await schema.getCompiledSchema();
+		try {
+			const swaggerJSON = await getCompiledSchema();
 
 
-		let proto = req.protocol;
-		if (typeof req.headers['x-forwarded-proto'] !== 'undefined' && req.headers['x-forwarded-proto']) {
-			proto = req.headers['x-forwarded-proto'];
-		}
+			let proto = req.protocol;
+			if (typeof req.headers["x-forwarded-proto"] !== "undefined" && req.headers["x-forwarded-proto"]) {
+				proto = req.headers["x-forwarded-proto"];
+			}
 
 
-		let origin = proto + '://' + req.hostname;
-		if (typeof req.headers.origin !== 'undefined' && req.headers.origin) {
-			origin = req.headers.origin;
-		}
+			let origin = `${proto}://${req.hostname}`;
+			if (typeof req.headers.origin !== "undefined" && req.headers.origin) {
+				origin = req.headers.origin;
+			}
 
 
-		swaggerJSON.info.version   = PACKAGE.version;
-		swaggerJSON.servers[0].url = origin + '/api';
-		res.status(200).send(swaggerJSON);
+			swaggerJSON.info.version = PACKAGE.version;
+			swaggerJSON.servers[0].url = `${origin}/api`;
+			res.status(200).send(swaggerJSON);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 56 - 53
backend/routes/settings.js

@@ -1,21 +1,22 @@
-const express         = require('express');
-const validator       = require('../lib/validator');
-const jwtdecode       = require('../lib/express/jwt-decode');
-const apiValidator    = require('../lib/validator/api');
-const internalSetting = require('../internal/setting');
-const schema          = require('../schema');
+import express from "express";
+import internalSetting from "../internal/setting.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import apiValidator from "../lib/validator/api.js";
+import validator from "../lib/validator/index.js";
+import { express as logger } from "../logger.js";
+import { getValidationSchema } from "../schema/index.js";
 
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 /**
 /**
  * /api/settings
  * /api/settings
  */
  */
 router
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -26,13 +27,14 @@ router
 	 *
 	 *
 	 * Retrieve all settings
 	 * Retrieve all settings
 	 */
 	 */
-	.get((_, res, next) => {
-		internalSetting.getAll(res.locals.access)
-			.then((rows) => {
-				res.status(200)
-					.send(rows);
-			})
-			.catch(next);
+	.get(async (req, res, next) => {
+		try {
+			const rows = await internalSetting.getAll(res.locals.access);
+			res.status(200).send(rows);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -41,7 +43,7 @@ router
  * /api/settings/something
  * /api/settings/something
  */
  */
 router
 router
-	.route('/:setting_id')
+	.route("/:setting_id")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -52,29 +54,31 @@ router
 	 *
 	 *
 	 * Retrieve a specific setting
 	 * Retrieve a specific setting
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['setting_id'],
-			additionalProperties: false,
-			properties:           {
-				setting_id: {
-					type:      'string',
-					minLength: 1
-				}
-			}
-		}, {
-			setting_id: req.params.setting_id
-		})
-			.then((data) => {
-				return internalSetting.get(res.locals.access, {
-					id: data.setting_id
-				});
-			})
-			.then((row) => {
-				res.status(200)
-					.send(row);
-			})
-			.catch(next);
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["setting_id"],
+					additionalProperties: false,
+					properties: {
+						setting_id: {
+							type: "string",
+							minLength: 1,
+						},
+					},
+				},
+				{
+					setting_id: req.params.setting_id,
+				},
+			);
+			const row = await internalSetting.get(res.locals.access, {
+				id: data.setting_id,
+			});
+			res.status(200).send(row);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -82,17 +86,16 @@ router
 	 *
 	 *
 	 * Update and existing setting
 	 * Update and existing setting
 	 */
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/settings/{settingID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = req.params.setting_id;
-				return internalSetting.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(getValidationSchema("/settings/{settingID}", "put"), req.body);
+			payload.id = req.params.setting_id;
+			const result = await internalSetting.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 30 - 27
backend/routes/tokens.js

@@ -1,17 +1,18 @@
-const express       = require('express');
-const jwtdecode     = require('../lib/express/jwt-decode');
-const apiValidator  = require('../lib/validator/api');
-const internalToken = require('../internal/token');
-const schema        = require('../schema');
+import express from "express";
+import internalToken from "../internal/token.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import apiValidator from "../lib/validator/api.js";
+import { express as logger } from "../logger.js";
+import { getValidationSchema } from "../schema/index.js";
 
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 router
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -23,16 +24,17 @@ router
 	 * We also piggy back on to this method, allowing admins to get tokens
 	 * We also piggy back on to this method, allowing admins to get tokens
 	 * for services like Job board and Worker.
 	 * for services like Job board and Worker.
 	 */
 	 */
-	.get(jwtdecode(), (req, res, next) => {
-		internalToken.getFreshToken(res.locals.access, {
-			expiry: (typeof req.query.expiry !== 'undefined' ? req.query.expiry : null),
-			scope:  (typeof req.query.scope !== 'undefined' ? req.query.scope : null)
-		})
-			.then((data) => {
-				res.status(200)
-					.send(data);
-			})
-			.catch(next);
+	.get(jwtdecode(), async (req, res, next) => {
+		try {
+			const data = await internalToken.getFreshToken(res.locals.access, {
+				expiry: typeof req.query.expiry !== "undefined" ? req.query.expiry : null,
+				scope: typeof req.query.scope !== "undefined" ? req.query.scope : null,
+			});
+			res.status(200).send(data);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -41,13 +43,14 @@ router
 	 * Create a new Token
 	 * Create a new Token
 	 */
 	 */
 	.post(async (req, res, next) => {
 	.post(async (req, res, next) => {
-		apiValidator(schema.getValidationSchema('/tokens', 'post'), req.body)
-			.then(internalToken.getTokenFromEmail)
-			.then((data) => {
-				res.status(200)
-					.send(data);
-			})
-			.catch(next);
+		try {
+			const data = await apiValidator(getValidationSchema("/tokens", "post"), req.body);
+			const result = await internalToken.getTokenFromEmail(data);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 212 - 131
backend/routes/users.js

@@ -1,22 +1,27 @@
-const express      = require('express');
-const validator    = require('../lib/validator');
-const jwtdecode    = require('../lib/express/jwt-decode');
-const userIdFromMe = require('../lib/express/user-id-from-me');
-const internalUser = require('../internal/user');
-const apiValidator = require('../lib/validator/api');
-const schema       = require('../schema');
-
-let router = express.Router({
+import express from "express";
+import internalUser from "../internal/user.js";
+import Access from "../lib/access.js";
+import { isCI } from "../lib/config.js";
+import errs from "../lib/error.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import userIdFromMe from "../lib/express/user-id-from-me.js";
+import apiValidator from "../lib/validator/api.js";
+import validator from "../lib/validator/index.js";
+import { express as logger } from "../logger.js";
+import { getValidationSchema } from "../schema/index.js";
+import { isSetup } from "../setup.js";
+
+const router = express.Router({
 	caseSensitive: true,
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 });
 
 
 /**
 /**
  * /api/users
  * /api/users
  */
  */
 router
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -27,33 +32,38 @@ router
 	 *
 	 *
 	 * Retrieve all users
 	 * Retrieve all users
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					additionalProperties: false,
+					properties: {
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+						query: {
+							$ref: "common#/properties/query",
+						},
+					},
 				},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
-			.then((data) => {
-				return internalUser.getAll(res.locals.access, data.expand, data.query);
-			})
-			.then((users) => {
-				res.status(200)
-					.send(users);
-			})
-			.catch((err) => {
-				console.log(err);
-				next(err);
-			});
-		//.catch(next);
+				{
+					expand:
+						typeof req.query.expand === "string"
+							? req.query.expand.split(",")
+							: null,
+					query: typeof req.query.query === "string" ? req.query.query : null,
+				},
+			);
+			const users = await internalUser.getAll(
+				res.locals.access,
+				data.expand,
+				data.query,
+			);
+			res.status(200).send(users);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -61,16 +71,66 @@ router
 	 *
 	 *
 	 * Create a new User
 	 * Create a new User
 	 */
 	 */
-	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users', 'post'), req.body)
-			.then((payload) => {
-				return internalUser.create(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(201)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		const body = req.body;
+
+		try {
+			// If we are in setup mode, we don't check access for current user
+			const setup = await isSetup();
+			if (!setup) {
+				logger.info("Creating a new user in setup mode");
+				const access = new Access(null);
+				await access.load(true);
+				res.locals.access = access;
+
+				// We are in setup mode, set some defaults for this first new user, such as making
+				// them an admin.
+				body.is_disabled = false;
+				if (typeof body.roles !== "object" || body.roles === null) {
+					body.roles = [];
+				}
+				if (body.roles.indexOf("admin") === -1) {
+					body.roles.push("admin");
+				}
+			}
+
+			const payload = await apiValidator(
+				getValidationSchema("/users", "post"),
+				body,
+			);
+			const user = await internalUser.create(res.locals.access, payload);
+			res.status(201).send(user);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
+	})
+
+	/**
+	 * DELETE /api/users
+	 *
+	 * Deletes ALL users. This is NOT GENERALLY AVAILABLE!
+	 * (!) It is NOT an authenticated endpoint.
+	 * (!) Only CI should be able to call this endpoint. As a result,
+	 *
+	 * it will only work when the env vars DEBUG=true and CI=true
+	 *
+	 * Do NOT set those env vars in a production environment!
+	 */
+	.delete(async (_, res, next) => {
+		if (isCI()) {
+			try {
+				logger.warn("Deleting all users - CI environment detected, allowing this operation");
+				await internalUser.deleteAll();
+				res.status(200).send(true);
+			} catch (err) {
+				logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+				next(err);
+			}
+			return;
+		}
+
+		next(new errs.ItemNotFoundError());
 	});
 	});
 
 
 /**
 /**
@@ -79,7 +139,7 @@ router
  * /api/users/123
  * /api/users/123
  */
  */
 router
 router
-	.route('/:user_id')
+	.route("/:user_id")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -91,37 +151,43 @@ router
 	 *
 	 *
 	 * Retrieve a specific user
 	 * Retrieve a specific user
 	 */
 	 */
-	.get((req, res, next) => {
-		validator({
-			required:             ['user_id'],
-			additionalProperties: false,
-			properties:           {
-				user_id: {
-					$ref: 'common#/properties/id'
+	.get(async (req, res, next) => {
+		try {
+			const data = await validator(
+				{
+					required: ["user_id"],
+					additionalProperties: false,
+					properties: {
+						user_id: {
+							$ref: "common#/properties/id",
+						},
+						expand: {
+							$ref: "common#/properties/expand",
+						},
+					},
 				},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			user_id: req.params.user_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
-			.then((data) => {
-				return internalUser.get(res.locals.access, {
-					id:     data.user_id,
-					expand: data.expand,
-					omit:   internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id)
-				});
-			})
-			.then((user) => {
-				res.status(200)
-					.send(user);
-			})
-			.catch((err) => {
-				console.log(err);
-				next(err);
+				{
+					user_id: req.params.user_id,
+					expand:
+						typeof req.query.expand === "string"
+							? req.query.expand.split(",")
+							: null,
+				},
+			);
+
+			const user = await internalUser.get(res.locals.access, {
+				id: data.user_id,
+				expand: data.expand,
+				omit: internalUser.getUserOmisionsByAccess(
+					res.locals.access,
+					data.user_id,
+				),
 			});
 			});
+			res.status(200).send(user);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -129,17 +195,19 @@ router
 	 *
 	 *
 	 * Update and existing user
 	 * Update and existing user
 	 */
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users/{userID}', 'put'), req.body)
-			.then((payload) => {
-				payload.id = req.params.user_id;
-				return internalUser.update(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/users/{userID}", "put"),
+				req.body,
+			);
+			payload.id = req.params.user_id;
+			const result = await internalUser.update(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	})
 	})
 
 
 	/**
 	/**
@@ -147,13 +215,16 @@ router
 	 *
 	 *
 	 * Update and existing user
 	 * Update and existing user
 	 */
 	 */
-	.delete((req, res, next) => {
-		internalUser.delete(res.locals.access, {id: req.params.user_id})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.delete(async (req, res, next) => {
+		try {
+			const result = await internalUser.delete(res.locals.access, {
+				id: req.params.user_id,
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -162,8 +233,8 @@ router
  * /api/users/123/auth
  * /api/users/123/auth
  */
  */
 router
 router
-	.route('/:user_id/auth')
-	.options((req, res) => {
+	.route("/:user_id/auth")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode())
 	.all(jwtdecode())
@@ -174,17 +245,19 @@ router
 	 *
 	 *
 	 * Update password for a user
 	 * Update password for a user
 	 */
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users/{userID}/auth', 'put'), req.body)
-			.then((payload) => {
-				payload.id = req.params.user_id;
-				return internalUser.setPassword(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/users/{userID}/auth", "put"),
+				req.body,
+			);
+			payload.id = req.params.user_id;
+			const result = await internalUser.setPassword(res.locals.access, payload);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -193,8 +266,8 @@ router
  * /api/users/123/permissions
  * /api/users/123/permissions
  */
  */
 router
 router
-	.route('/:user_id/permissions')
-	.options((req, res) => {
+	.route("/:user_id/permissions")
+	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
 	.all(jwtdecode())
 	.all(jwtdecode())
@@ -205,17 +278,22 @@ router
 	 *
 	 *
 	 * Set some or all permissions for a user
 	 * Set some or all permissions for a user
 	 */
 	 */
-	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users/{userID}/permissions', 'put'), req.body)
-			.then((payload) => {
-				payload.id = req.params.user_id;
-				return internalUser.setPermissions(res.locals.access, payload);
-			})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.put(async (req, res, next) => {
+		try {
+			const payload = await apiValidator(
+				getValidationSchema("/users/{userID}/permissions", "put"),
+				req.body,
+			);
+			payload.id = req.params.user_id;
+			const result = await internalUser.setPermissions(
+				res.locals.access,
+				payload,
+			);
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
 /**
 /**
@@ -224,7 +302,7 @@ router
  * /api/users/123/login
  * /api/users/123/login
  */
  */
 router
 router
-	.route('/:user_id/login')
+	.route("/:user_id/login")
 	.options((_, res) => {
 	.options((_, res) => {
 		res.sendStatus(204);
 		res.sendStatus(204);
 	})
 	})
@@ -235,13 +313,16 @@ router
 	 *
 	 *
 	 * Log in as a user
 	 * Log in as a user
 	 */
 	 */
-	.post((req, res, next) => {
-		internalUser.loginAs(res.locals.access, {id: parseInt(req.params.user_id, 10)})
-			.then((result) => {
-				res.status(200)
-					.send(result);
-			})
-			.catch(next);
+	.post(async (req, res, next) => {
+		try {
+			const result = await internalUser.loginAs(res.locals.access, {
+				id: Number.parseInt(req.params.user_id, 10),
+			});
+			res.status(200).send(result);
+		} catch (err) {
+			logger.debug(`${req.method.toUpperCase()} ${req.path}: ${err}`);
+			next(err);
+		}
 	});
 	});
 
 
-module.exports = router;
+export default router;

+ 7 - 0
backend/schema/components/audit-log-list.json

@@ -0,0 +1,7 @@
+{
+	"type": "array",
+	"description": "Audit Log list",
+	"items": {
+		"$ref": "./audit-log-object.json"
+	}
+}

+ 13 - 1
backend/schema/components/audit-log-object.json

@@ -1,7 +1,16 @@
 {
 {
 	"type": "object",
 	"type": "object",
 	"description": "Audit Log object",
 	"description": "Audit Log object",
-	"required": ["id", "created_on", "modified_on", "user_id", "object_type", "object_id", "action", "meta"],
+	"required": [
+		"id",
+		"created_on",
+		"modified_on",
+		"user_id",
+		"object_type",
+		"object_id",
+		"action",
+		"meta"
+	],
 	"additionalProperties": false,
 	"additionalProperties": false,
 	"properties": {
 	"properties": {
 		"id": {
 		"id": {
@@ -27,6 +36,9 @@
 		},
 		},
 		"meta": {
 		"meta": {
 			"type": "object"
 			"type": "object"
+		},
+		"user": {
+			"$ref": "./user-object.json"
 		}
 		}
 	}
 	}
 }
 }

+ 0 - 6
backend/schema/components/certificate-object.json

@@ -62,15 +62,9 @@
 				"dns_provider_credentials": {
 				"dns_provider_credentials": {
 					"type": "string"
 					"type": "string"
 				},
 				},
-				"letsencrypt_agree": {
-					"type": "boolean"
-				},
 				"letsencrypt_certificate": {
 				"letsencrypt_certificate": {
 					"type": "object"
 					"type": "object"
 				},
 				},
-				"letsencrypt_email": {
-					"$ref": "../common.json#/properties/email"
-				},
 				"propagation_seconds": {
 				"propagation_seconds": {
 					"type": "integer",
 					"type": "integer",
 					"minimum": 0
 					"minimum": 0

+ 23 - 0
backend/schema/components/dns-providers-list.json

@@ -0,0 +1,23 @@
+{
+	"type": "array",
+	"description": "DNS Providers list",
+	"items": {
+		"type": "object",
+		"required": ["id", "name", "credentials"],
+		"additionalProperties": false,
+		"properties": {
+			"id": {
+				"type": "string",
+				"description": "Unique identifier for the DNS provider, matching the python package"
+			},
+			"name": {
+				"type": "string",
+				"description": "Human-readable name of the DNS provider"
+			},
+			"credentials": {
+				"type": "string",
+				"description": "Instructions on how to format the credentials for this DNS provider"
+			}
+		}
+	}
+}

+ 5 - 0
backend/schema/components/health-object.json

@@ -9,6 +9,11 @@
 			"description": "Healthy",
 			"description": "Healthy",
 			"example": "OK"
 			"example": "OK"
 		},
 		},
+		"setup": {
+			"type": "boolean",
+			"description": "Whether the initial setup has been completed",
+			"example": true
+		},
 		"version": {
 		"version": {
 			"type": "object",
 			"type": "object",
 			"description": "The version object",
 			"description": "The version object",

+ 1 - 1
backend/schema/components/stream-object.json

@@ -31,7 +31,7 @@
 				},
 				},
 				{
 				{
 					"type": "string",
 					"type": "string",
-					"format": "ipv4"
+					"format": "^[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}$"
 				},
 				},
 				{
 				{
 					"type": "string",
 					"type": "string",

+ 57 - 0
backend/schema/components/user-object.json

@@ -54,6 +54,63 @@
 			"items": {
 			"items": {
 				"type": "string"
 				"type": "string"
 			}
 			}
+		},
+		"permissions": {
+			"type": "object",
+			"description": "Permissions if expanded in request",
+			"required": [
+				"visibility",
+				"proxy_hosts",
+				"redirection_hosts",
+				"dead_hosts",
+				"streams",
+				"access_lists",
+				"certificates"
+			],
+			"properties": {
+				"visibility": {
+					"type": "string",
+					"description": "Visibility level",
+					"example": "all",
+					"pattern": "^(all|user)$"
+				},
+				"proxy_hosts": {
+					"type": "string",
+					"description": "Proxy Hosts access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				},
+				"redirection_hosts": {
+					"type": "string",
+					"description": "Redirection Hosts access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				},
+				"dead_hosts": {
+					"type": "string",
+					"description": "Dead Hosts access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				},
+				"streams": {
+					"type": "string",
+					"description": "Streams access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				},
+				"access_lists": {
+					"type": "string",
+					"description": "Access Lists access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				},
+				"certificates": {
+					"type": "string",
+					"description": "Certificates access level",
+					"example": "all",
+					"pattern": "^(manage|view|hidden)$"
+				}
+			}
 		}
 		}
 	}
 	}
 }
 }

+ 40 - 35
backend/schema/index.js

@@ -1,41 +1,46 @@
-const refParser = require('@apidevtools/json-schema-ref-parser');
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import $RefParser from "@apidevtools/json-schema-ref-parser";
 
 
-let compiledSchema = null;
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
 
 
-module.exports = {
+let compiledSchema = null;
 
 
-	/**
-	 * Compiles the schema, by dereferencing it, only once
-	 * and returns the memory cached value
-	 */
-	getCompiledSchema: async () => {
-		if (compiledSchema === null) {
-			compiledSchema = await refParser.dereference(__dirname + '/swagger.json', {
-				mutateInputSchema: false,
-			});
-		}
-		return compiledSchema;
-	},
+/**
+ * Compiles the schema, by dereferencing it, only once
+ * and returns the memory cached value
+ */
+const getCompiledSchema = async () => {
+	if (compiledSchema === null) {
+		compiledSchema = await $RefParser.dereference(`${__dirname}/swagger.json`, {
+			mutateInputSchema: false,
+		});
+	}
+	return compiledSchema;
+};
 
 
-	/**
-	 * Scans the schema for the validation schema for the given path and method
-	 * and returns it.
-	 *
-	 * @param {string} path
-	 * @param {string} method
-	 * @returns string|null
-	 */
-	getValidationSchema: (path, method) => {
-		if (compiledSchema !== null &&
-			typeof compiledSchema.paths[path] !== 'undefined' &&
-			typeof compiledSchema.paths[path][method] !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody.content !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody.content['application/json'] !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody.content['application/json'].schema !== 'undefined'
-		) {
-			return compiledSchema.paths[path][method].requestBody.content['application/json'].schema;
-		}
-		return null;
+/**
+ * Scans the schema for the validation schema for the given path and method
+ * and returns it.
+ *
+ * @param {string} path
+ * @param {string} method
+ * @returns string|null
+ */
+const getValidationSchema = (path, method) => {
+	if (
+		compiledSchema !== null &&
+		typeof compiledSchema.paths[path] !== "undefined" &&
+		typeof compiledSchema.paths[path][method] !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody.content !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody.content["application/json"] !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody.content["application/json"].schema !== "undefined"
+	) {
+		return compiledSchema.paths[path][method].requestBody.content["application/json"].schema;
 	}
 	}
+	return null;
 };
 };
+
+export { getCompiledSchema, getValidationSchema };

+ 3 - 3
backend/schema/paths/audit-log/get.json

@@ -1,6 +1,6 @@
 {
 {
-	"operationId": "getAuditLog",
-	"summary": "Get Audit Log",
+	"operationId": "getAuditLogs",
+	"summary": "Get Audit Logs",
 	"tags": ["Audit Log"],
 	"tags": ["Audit Log"],
 	"security": [
 	"security": [
 		{
 		{
@@ -44,7 +44,7 @@
 						}
 						}
 					},
 					},
 					"schema": {
 					"schema": {
-						"$ref": "../../components/audit-log-object.json"
+						"$ref": "../../components/audit-log-list.json"
 					}
 					}
 				}
 				}
 			}
 			}

+ 73 - 0
backend/schema/paths/audit-log/id/get.json

@@ -0,0 +1,73 @@
+{
+	"operationId": "getAuditLog",
+	"summary": "Get Audit Log Event",
+	"tags": [
+		"Audit Log"
+	],
+	"security": [
+		{
+			"BearerAuth": [
+				"audit-log"
+			]
+		}
+	],
+	"parameters": [
+		{
+			"in": "path",
+			"name": "id",
+			"schema": {
+				"type": "integer",
+				"minimum": 1
+			},
+			"required": true,
+			"example": 1
+		}
+	],
+	"responses": {
+		"200": {
+			"description": "200 response",
+			"content": {
+				"application/json": {
+					"examples": {
+						"default": {
+							"value": {
+								"id": 1,
+								"created_on": "2025-09-15T17:27:45.000Z",
+								"modified_on": "2025-09-15T17:27:45.000Z",
+								"user_id": 1,
+								"object_type": "user",
+								"object_id": 1,
+								"action": "created",
+								"meta": {
+									"id": 1,
+									"created_on": "2025-09-15T17:27:45.000Z",
+									"modified_on": "2025-09-15T17:27:45.000Z",
+									"is_disabled": false,
+									"email": "[email protected]",
+									"name": "Jamie",
+									"nickname": "Jamie",
+									"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
+									"roles": [
+										"admin"
+									],
+									"permissions": {
+										"visibility": "all",
+										"proxy_hosts": "manage",
+										"redirection_hosts": "manage",
+										"dead_hosts": "manage",
+										"streams": "manage",
+										"access_lists": "manage",
+										"certificates": "manage"
+									}
+								}
+							}
+						}
+					},
+					"schema": {
+						"$ref": "../../../components/audit-log-object.json"
+					}
+				}
+			}
+		}
+	}
+}

+ 1 - 0
backend/schema/paths/get.json

@@ -11,6 +11,7 @@
 						"default": {
 						"default": {
 							"value": {
 							"value": {
 								"status": "OK",
 								"status": "OK",
+								"setup": true,
 								"version": {
 								"version": {
 									"major": 2,
 									"major": 2,
 									"minor": 1,
 									"minor": 1,

برخی فایل ها در این مقایسه diff نمایش داده نمی شوند زیرا تعداد فایل ها بسیار زیاد است