Ver Fonte

Convert backend to ESM

- About 5 years overdue
- Remove eslint, use bomejs instead
Jamie Curnow há 3 meses atrás
pai
commit
330993f028
89 ficheiros alterados com 4593 adições e 4932 exclusões
  1. 0 73
      backend/.eslintrc.json
  2. 0 11
      backend/.prettierrc
  3. 45 43
      backend/app.js
  4. 91 0
      backend/biome.json
  5. 20 15
      backend/db.js
  6. 26 28
      backend/index.js
  7. 226 213
      backend/internal/access-list.js
  8. 35 36
      backend/internal/audit-log.js
  9. 336 322
      backend/internal/certificate.js
  10. 186 180
      backend/internal/dead-host.js
  11. 130 115
      backend/internal/host.js
  12. 65 56
      backend/internal/ip_ranges.js
  13. 96 103
      backend/internal/nginx.js
  14. 191 183
      backend/internal/proxy-host.js
  15. 195 182
      backend/internal/redirection-host.js
  16. 17 18
      backend/internal/report.js
  17. 35 43
      backend/internal/setting.js
  18. 156 153
      backend/internal/stream.js
  19. 88 95
      backend/internal/token.js
  20. 197 210
      backend/internal/user.js
  21. 164 167
      backend/lib/access.js
  22. 76 74
      backend/lib/certbot.js
  23. 144 144
      backend/lib/config.js
  24. 49 48
      backend/lib/error.js
  25. 8 7
      backend/lib/express/cors.js
  26. 6 6
      backend/lib/express/jwt-decode.js
  27. 5 5
      backend/lib/express/jwt.js
  28. 16 16
      backend/lib/express/pagination.js
  29. 2 3
      backend/lib/express/user-id-from-me.js
  30. 52 56
      backend/lib/helpers.js
  31. 25 21
      backend/lib/migrate_template.js
  32. 92 92
      backend/lib/utils.js
  33. 16 16
      backend/lib/validator/api.js
  34. 18 18
      backend/lib/validator/index.js
  35. 16 12
      backend/logger.js
  36. 11 13
      backend/migrate.js
  37. 134 133
      backend/migrations/20180618015850_initial.js
  38. 15 14
      backend/migrations/20180929054513_websockets.js
  39. 15 13
      backend/migrations/20181019052346_forward_host.js
  40. 19 18
      backend/migrations/20181113041458_http2_support.js
  41. 14 12
      backend/migrations/20181213013211_forward_scheme.js
  42. 23 21
      backend/migrations/20190104035154_disabled.js
  43. 14 12
      backend/migrations/20190215115310_customlocations.js
  44. 23 21
      backend/migrations/20190218060101_hsts.js
  45. 10 9
      backend/migrations/20190227065017_settings.js
  46. 27 28
      backend/migrations/20200410143839_access_list_client.js
  47. 14 12
      backend/migrations/20200410143840_access_list_client_fix.js
  48. 19 17
      backend/migrations/20201014143841_pass_auth.js
  49. 21 19
      backend/migrations/20210210154702_redirection_scheme.js
  50. 21 19
      backend/migrations/20210210154703_redirection_status_code.js
  51. 33 30
      backend/migrations/20210423103500_stream_domain.js
  52. 27 25
      backend/migrations/20211108145214_regenerate_default_host.js
  53. 21 16
      backend/migrations/20240427161436_stream_ssl.js
  54. 51 56
      backend/models/access_list.js
  55. 25 24
      backend/models/access_list_auth.js
  56. 25 24
      backend/models/access_list_client.js
  57. 22 22
      backend/models/audit-log.js
  58. 36 42
      backend/models/auth.js
  59. 61 64
      backend/models/certificate.js
  60. 40 47
      backend/models/dead_host.js
  61. 6 7
      backend/models/now_helper.js
  62. 56 56
      backend/models/proxy_host.js
  63. 47 48
      backend/models/redirection_host.js
  64. 3 3
      backend/models/setting.js
  65. 38 43
      backend/models/stream.js
  66. 52 51
      backend/models/token.js
  67. 28 32
      backend/models/user.js
  68. 4 4
      backend/models/user_permission.js
  69. 14 13
      backend/package.json
  70. 26 24
      backend/routes/audit-log.js
  71. 41 29
      backend/routes/main.js
  72. 58 56
      backend/routes/nginx/access_lists.js
  73. 86 85
      backend/routes/nginx/certificates.js
  74. 66 64
      backend/routes/nginx/dead_hosts.js
  75. 67 65
      backend/routes/nginx/proxy_hosts.js
  76. 69 67
      backend/routes/nginx/redirection_hosts.js
  77. 67 65
      backend/routes/nginx/streams.js
  78. 11 11
      backend/routes/reports.js
  79. 14 14
      backend/routes/schema.js
  80. 34 33
      backend/routes/settings.js
  81. 18 19
      backend/routes/tokens.js
  82. 69 69
      backend/routes/users.js
  83. 40 35
      backend/schema/index.js
  84. 5 5
      backend/scripts/install-certbot-plugins
  85. 62 66
      backend/setup.js
  86. 11 8
      backend/validate-schema.js
  87. 54 483
      backend/yarn.lock
  88. 1 1
      scripts/ci/frontend-build
  89. 1 1
      scripts/ci/test-and-build

+ 0 - 73
backend/.eslintrc.json

@@ -1,73 +0,0 @@
-{
-	"env": {
-		"node": true,
-		"es6": true
-	},
-	"extends": [
-		"eslint:recommended"
-	],
-	"globals": {
-		"Atomics": "readonly",
-		"SharedArrayBuffer": "readonly"
-	},
-	"parserOptions": {
-		"ecmaVersion": 2018,
-		"sourceType": "module"
-	},
-	"plugins": [
-		"align-assignments"
-	],
-	"rules": {
-		"arrow-parens": [
-			"error",
-			"always"
-		],
-		"indent": [
-			"error",
-			"tab"
-		],
-		"linebreak-style": [
-			"error",
-			"unix"
-		],
-		"quotes": [
-			"error",
-			"single"
-		],
-		"semi": [
-			"error",
-			"always"
-		],
-		"key-spacing": [
-			"error",
-			{
-				"align": "value"
-			}
-		],
-		"comma-spacing": [
-			"error",
-			{
-				"before": false,
-				"after": true
-			}
-		],
-		"func-call-spacing": [
-			"error",
-			"never"
-		],
-		"keyword-spacing": [
-			"error",
-			{
-				"before": true
-			}
-		],
-		"no-irregular-whitespace": "error",
-		"no-unused-expressions": 0,
-		"align-assignments/align-assignments": [
-			2,
-			{
-				"requiresOnly": false
-			}
-		]
-	}
-}

+ 0 - 11
backend/.prettierrc

@@ -1,11 +0,0 @@
-{
-	"printWidth": 320,
-	"tabWidth": 4,
-	"useTabs": true,
-	"semi": true,
-	"singleQuote": true,
-	"bracketSpacing": true,
-	"jsxBracketSameLine": true,
-	"trailingComma": "all",
-	"proseWrap": "always"
-}

+ 45 - 43
backend/app.js

@@ -1,9 +1,12 @@
-const express     = require('express');
-const bodyParser  = require('body-parser');
-const fileUpload  = require('express-fileupload');
-const compression = require('compression');
-const config      = require('./lib/config');
-const log         = require('./logger').express;
+import bodyParser from "body-parser";
+import compression from "compression";
+import express from "express";
+import fileUpload from "express-fileupload";
+import { isDebugMode } from "./lib/config.js";
+import cors from "./lib/express/cors.js";
+import jwt from "./lib/express/jwt.js";
+import { express as logger } from "./logger.js";
+import mainRoutes from "./routes/main.js";
 
 /**
  * App
@@ -11,7 +14,7 @@ const log         = require('./logger').express;
 const app = express();
 app.use(fileUpload());
 app.use(bodyParser.json());
-app.use(bodyParser.urlencoded({extended: true}));
+app.use(bodyParser.urlencoded({ extended: true }));
 
 // Gzip
 app.use(compression());
@@ -20,71 +23,70 @@ app.use(compression());
  * General Logging, BEFORE routes
  */
 
-app.disable('x-powered-by');
-app.enable('trust proxy', ['loopback', 'linklocal', 'uniquelocal']);
-app.enable('strict routing');
+app.disable("x-powered-by");
+app.enable("trust proxy", ["loopback", "linklocal", "uniquelocal"]);
+app.enable("strict routing");
 
 // pretty print JSON when not live
-if (config.debug()) {
-	app.set('json spaces', 2);
+if (isDebugMode()) {
+	app.set("json spaces", 2);
 }
 
 // CORS for everything
-app.use(require('./lib/express/cors'));
+app.use(cors);
 
 // General security/cache related headers + server header
-app.use(function (req, res, next) {
-	let x_frame_options = 'DENY';
+app.use((_, res, next) => {
+	let x_frame_options = "DENY";
 
-	if (typeof process.env.X_FRAME_OPTIONS !== 'undefined' && process.env.X_FRAME_OPTIONS) {
+	if (typeof process.env.X_FRAME_OPTIONS !== "undefined" && process.env.X_FRAME_OPTIONS) {
 		x_frame_options = process.env.X_FRAME_OPTIONS;
 	}
 
 	res.set({
-		'X-XSS-Protection':       '1; mode=block',
-		'X-Content-Type-Options': 'nosniff',
-		'X-Frame-Options':        x_frame_options,
-		'Cache-Control':          'no-cache, no-store, max-age=0, must-revalidate',
-		Pragma:                   'no-cache',
-		Expires:                  0
+		"X-XSS-Protection": "1; mode=block",
+		"X-Content-Type-Options": "nosniff",
+		"X-Frame-Options": x_frame_options,
+		"Cache-Control": "no-cache, no-store, max-age=0, must-revalidate",
+		Pragma: "no-cache",
+		Expires: 0,
 	});
 	next();
 });
 
-app.use(require('./lib/express/jwt')());
-app.use('/', require('./routes/main'));
+app.use(jwt());
+app.use("/", mainRoutes);
 
 // production error handler
 // no stacktraces leaked to user
-// eslint-disable-next-line
-app.use(function (err, req, res, next) {
-
-	let payload = {
+app.use((err, req, res, _) => {
+	const payload = {
 		error: {
-			code:    err.status,
-			message: err.public ? err.message : 'Internal Error'
-		}
+			code: err.status,
+			message: err.public ? err.message : "Internal Error",
+		},
 	};
 
-	if (config.debug() || (req.baseUrl + req.path).includes('nginx/certificates')) {
+	if (typeof err.message_i18n !== "undefined") {
+		payload.error.message_i18n = err.message_i18n;
+	}
+
+	if (isDebugMode() || (req.baseUrl + req.path).includes("nginx/certificates")) {
 		payload.debug = {
-			stack:    typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null,
-			previous: err.previous
+			stack: typeof err.stack !== "undefined" && err.stack ? err.stack.split("\n") : null,
+			previous: err.previous,
 		};
 	}
 
 	// Not every error is worth logging - but this is good for now until it gets annoying.
-	if (typeof err.stack !== 'undefined' && err.stack) {
-		if (config.debug()) {
-			log.debug(err.stack);
-		} else if (typeof err.public == 'undefined' || !err.public) {
-			log.warn(err.message);
+	if (typeof err.stack !== "undefined" && err.stack) {
+		logger.debug(err.stack);
+		if (typeof err.public === "undefined" || !err.public) {
+			logger.warn(err.message);
 		}
 	}
 
-	res
-		.status(err.status || 500)
-		.send(payload);
+	res.status(err.status || 500).send(payload);
 });
 
-module.exports = app;
+export default app;

+ 91 - 0
backend/biome.json

@@ -0,0 +1,91 @@
+{
+    "$schema": "https://biomejs.dev/schemas/2.2.0/schema.json",
+    "vcs": {
+        "enabled": true,
+        "clientKind": "git",
+        "useIgnoreFile": true
+    },
+    "files": {
+        "ignoreUnknown": false,
+        "includes": [
+            "**/*.ts",
+            "**/*.tsx",
+            "**/*.js",
+            "**/*.jsx",
+            "!**/dist/**/*"
+        ]
+    },
+    "formatter": {
+        "enabled": true,
+        "indentStyle": "tab",
+        "indentWidth": 4,
+        "lineWidth": 120,
+        "formatWithErrors": true
+    },
+    "assist": {
+        "actions": {
+            "source": {
+                "organizeImports": {
+                    "level": "on",
+                    "options": {
+                        "groups": [
+                            ":BUN:",
+                            ":NODE:",
+                            [
+                                "npm:*",
+                                "npm:*/**"
+                            ],
+                            ":PACKAGE_WITH_PROTOCOL:",
+                            ":URL:",
+                            ":PACKAGE:",
+                            [
+                                "/src/*",
+                                "/src/**"
+                            ],
+                            [
+                                "/**"
+                            ],
+                            [
+                                "#*",
+                                "#*/**"
+                            ],
+                            ":PATH:"
+                        ]
+                    }
+                }
+            }
+        }
+    },
+    "linter": {
+        "enabled": true,
+        "rules": {
+            "recommended": true,
+            "correctness": {
+                "useUniqueElementIds": "off"
+            },
+            "suspicious": {
+                "noExplicitAny": "off"
+            },
+            "performance": {
+                "noDelete": "off"
+            },
+            "nursery": "off",
+            "a11y": {
+                "useSemanticElements": "off",
+                "useValidAnchor": "off"
+            },
+            "style": {
+                "noParameterAssign": "error",
+                "useAsConstAssertion": "error",
+                "useDefaultParameterLast": "error",
+                "useEnumInitializers": "error",
+                "useSelfClosingElements": "error",
+                "useSingleVarDeclarator": "error",
+                "noUnusedTemplateLiteral": "error",
+                "useNumberNamespace": "error",
+                "noInferrableTypes": "error",
+                "noUselessElse": "error"
+            }
+        }
+    }
+}

+ 20 - 15
backend/db.js

@@ -1,27 +1,32 @@
-const config = require('./lib/config');
+import knex from "knex";
+import {configGet, configHas} from "./lib/config.js";
 
-if (!config.has('database')) {
-	throw new Error('Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/');
-}
+const generateDbConfig = () => {
+	if (!configHas("database")) {
+		throw new Error(
+			"Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/",
+		);
+	}
+
+	const cfg = configGet("database");
 
-function generateDbConfig() {
-	const cfg = config.get('database');
-	if (cfg.engine === 'knex-native') {
+	if (cfg.engine === "knex-native") {
 		return cfg.knex;
 	}
+
 	return {
-		client:     cfg.engine,
+		client: cfg.engine,
 		connection: {
-			host:     cfg.host,
-			user:     cfg.user,
+			host: cfg.host,
+			user: cfg.user,
 			password: cfg.password,
 			database: cfg.name,
-			port:     cfg.port
+			port: cfg.port,
 		},
 		migrations: {
-			tableName: 'migrations'
-		}
+			tableName: "migrations",
+		},
 	};
-}
+};
 
-module.exports = require('knex')(generateDbConfig());
+export default knex(generateDbConfig());

+ 26 - 28
backend/index.js

@@ -1,48 +1,47 @@
 #!/usr/bin/env node
 
-const schema = require('./schema');
-const logger = require('./logger').global;
-
-const IP_RANGES_FETCH_ENABLED = process.env.IP_RANGES_FETCH_ENABLED !== 'false';
-
-async function appStart () {
-	const migrate             = require('./migrate');
-	const setup               = require('./setup');
-	const app                 = require('./app');
-	const internalCertificate = require('./internal/certificate');
-	const internalIpRanges    = require('./internal/ip_ranges');
-
-	return migrate.latest()
+import app from "./app.js";
+import internalCertificate from "./internal/certificate.js";
+import internalIpRanges from "./internal/ip_ranges.js";
+import { global as logger } from "./logger.js";
+import { migrateUp } from "./migrate.js";
+import { getCompiledSchema } from "./schema/index.js";
+import setup from "./setup.js";
+
+const IP_RANGES_FETCH_ENABLED = process.env.IP_RANGES_FETCH_ENABLED !== "false";
+
+async function appStart() {
+	return migrateUp()
 		.then(setup)
-		.then(schema.getCompiledSchema)
+		.then(getCompiledSchema)
 		.then(() => {
-			if (IP_RANGES_FETCH_ENABLED) {
-				logger.info('IP Ranges fetch is enabled');
-				return internalIpRanges.fetch().catch((err) => {
-					logger.error('IP Ranges fetch failed, continuing anyway:', err.message);
-				});
-			} else {
-				logger.info('IP Ranges fetch is disabled by environment variable');
+			if (!IP_RANGES_FETCH_ENABLED) {
+				logger.info("IP Ranges fetch is disabled by environment variable");
+				return;
 			}
+			logger.info("IP Ranges fetch is enabled");
+			return internalIpRanges.fetch().catch((err) => {
+				logger.error("IP Ranges fetch failed, continuing anyway:", err.message);
+			});
 		})
 		.then(() => {
 			internalCertificate.initTimer();
 			internalIpRanges.initTimer();
 
 			const server = app.listen(3000, () => {
-				logger.info('Backend PID ' + process.pid + ' listening on port 3000 ...');
+				logger.info(`Backend PID ${process.pid} listening on port 3000 ...`);
 
-				process.on('SIGTERM', () => {
-					logger.info('PID ' + process.pid + ' received SIGTERM');
+				process.on("SIGTERM", () => {
+					logger.info(`PID ${process.pid} received SIGTERM`);
 					server.close(() => {
-						logger.info('Stopping.');
+						logger.info("Stopping.");
 						process.exit(0);
 					});
 				});
 			});
 		})
 		.catch((err) => {
-			logger.error(err.message, err);
+			logger.error(`Startup Error: ${err.message}`, err);
 			setTimeout(appStart, 1000);
 		});
 }
@@ -50,7 +49,6 @@ async function appStart () {
 try {
 	appStart();
 } catch (err) {
-	logger.error(err.message, err);
+	logger.fatal(err);
 	process.exit(1);
 }
-

+ 226 - 213
backend/internal/access-list.js

@@ -1,37 +1,37 @@
-const _                     = require('lodash');
-const fs                    = require('node:fs');
-const batchflow             = require('batchflow');
-const logger                = require('../logger').access;
-const error                 = require('../lib/error');
-const utils                 = require('../lib/utils');
-const accessListModel       = require('../models/access_list');
-const accessListAuthModel   = require('../models/access_list_auth');
-const accessListClientModel = require('../models/access_list_client');
-const proxyHostModel        = require('../models/proxy_host');
-const internalAuditLog      = require('./audit-log');
-const internalNginx         = require('./nginx');
-
-function omissions () {
-	return ['is_deleted'];
-}
+import fs from "node:fs";
+import batchflow from "batchflow";
+import _ from "lodash";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import { access as logger } from "../logger.js";
+import accessListModel from "../models/access_list.js";
+import accessListAuthModel from "../models/access_list_auth.js";
+import accessListClientModel from "../models/access_list_client.js";
+import proxyHostModel from "../models/proxy_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalNginx from "./nginx.js";
+
+const omissions = () => {
+	return ["is_deleted"];
+};
 
 const internalAccessList = {
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
 	create: (access, data) => {
-		return access.can('access_lists:create', data)
+		return access
+			.can("access_lists:create", data)
 			.then((/*access_data*/) => {
 				return accessListModel
 					.query()
 					.insertAndFetch({
-						name:          data.name,
-						satisfy_any:   data.satisfy_any,
-						pass_auth:     data.pass_auth,
-						owner_user_id: access.token.getUserId(1)
+						name: data.name,
+						satisfy_any: data.satisfy_any,
+						pass_auth: data.pass_auth,
+						owner_user_id: access.token.getUserId(1),
 					})
 					.then(utils.omitRow(omissions()));
 			})
@@ -42,27 +42,27 @@ const internalAccessList = {
 
 				// Now add the items
 				data.items.map((item) => {
-					promises.push(accessListAuthModel
-						.query()
-						.insert({
+					promises.push(
+						accessListAuthModel.query().insert({
 							access_list_id: row.id,
-							username:       item.username,
-							password:       item.password
-						})
+							username: item.username,
+							password: item.password,
+						}),
 					);
+					return true;
 				});
 
 				// Now add the clients
-				if (typeof data.clients !== 'undefined' && data.clients) {
+				if (typeof data.clients !== "undefined" && data.clients) {
 					data.clients.map((client) => {
-						promises.push(accessListClientModel
-							.query()
-							.insert({
+						promises.push(
+							accessListClientModel.query().insert({
 								access_list_id: row.id,
-								address:        client.address,
-								directive:      client.directive
-							})
+								address: client.address,
+								directive: client.directive,
+							}),
 						);
+						return true;
 					});
 				}
 
@@ -70,28 +70,33 @@ const internalAccessList = {
 			})
 			.then(() => {
 				// re-fetch with expansions
-				return internalAccessList.get(access, {
-					id:     data.id,
-					expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]']
-				}, true /* <- skip masking */);
+				return internalAccessList.get(
+					access,
+					{
+						id: data.id,
+						expand: ["owner", "items", "clients", "proxy_hosts.access_list.[clients,items]"],
+					},
+					true /* <- skip masking */,
+				);
 			})
 			.then((row) => {
 				// Audit log
 				data.meta = _.assign({}, data.meta || {}, row.meta);
 
-				return internalAccessList.build(row)
+				return internalAccessList
+					.build(row)
 					.then(() => {
-						if (parseInt(row.proxy_host_count, 10)) {
-							return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
+						if (Number.parseInt(row.proxy_host_count, 10)) {
+							return internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
 						}
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'created',
-							object_type: 'access-list',
-							object_id:   row.id,
-							meta:        internalAccessList.maskItems(data)
+							action: "created",
+							object_type: "access-list",
+							object_id: row.id,
+							meta: internalAccessList.maskItems(data),
 						});
 					})
 					.then(() => {
@@ -109,124 +114,122 @@ const internalAccessList = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		return access.can('access_lists:update', data.id)
+		return access
+			.can("access_lists:update", data.id)
 			.then((/*access_data*/) => {
-				return internalAccessList.get(access, {id: data.id});
+				return internalAccessList.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (row.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError(`Access List could not be updated, IDs do not match: ${row.id} !== ${data.id}`);
+					throw new errs.InternalValidationError(
+						`Access List could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
+					);
 				}
 			})
 			.then(() => {
 				// patch name if specified
-				if (typeof data.name !== 'undefined' && data.name) {
-					return accessListModel
-						.query()
-						.where({id: data.id})
-						.patch({
-							name:        data.name,
-							satisfy_any: data.satisfy_any,
-							pass_auth:   data.pass_auth,
-						});
+				if (typeof data.name !== "undefined" && data.name) {
+					return accessListModel.query().where({ id: data.id }).patch({
+						name: data.name,
+						satisfy_any: data.satisfy_any,
+						pass_auth: data.pass_auth,
+					});
 				}
 			})
 			.then(() => {
 				// Check for items and add/update/remove them
-				if (typeof data.items !== 'undefined' && data.items) {
-					const promises      = [];
+				if (typeof data.items !== "undefined" && data.items) {
+					const promises = [];
 					const items_to_keep = [];
 
 					data.items.map((item) => {
 						if (item.password) {
-							promises.push(accessListAuthModel
-								.query()
-								.insert({
+							promises.push(
+								accessListAuthModel.query().insert({
 									access_list_id: data.id,
-									username:       item.username,
-									password:       item.password
-								})
+									username: item.username,
+									password: item.password,
+								}),
 							);
 						} else {
 							// This was supplied with an empty password, which means keep it but don't change the password
 							items_to_keep.push(item.username);
 						}
+						return true;
 					});
 
-					const query = accessListAuthModel
-						.query()
-						.delete()
-						.where('access_list_id', data.id);
+					const query = accessListAuthModel.query().delete().where("access_list_id", data.id);
 
 					if (items_to_keep.length) {
-						query.andWhere('username', 'NOT IN', items_to_keep);
+						query.andWhere("username", "NOT IN", items_to_keep);
 					}
 
-					return query
-						.then(() => {
-							// Add new items
-							if (promises.length) {
-								return Promise.all(promises);
-							}
-						});
+					return query.then(() => {
+						// Add new items
+						if (promises.length) {
+							return Promise.all(promises);
+						}
+					});
 				}
 			})
 			.then(() => {
 				// Check for clients and add/update/remove them
-				if (typeof data.clients !== 'undefined' && data.clients) {
+				if (typeof data.clients !== "undefined" && data.clients) {
 					const promises = [];
 
 					data.clients.map((client) => {
 						if (client.address) {
-							promises.push(accessListClientModel
-								.query()
-								.insert({
+							promises.push(
+								accessListClientModel.query().insert({
 									access_list_id: data.id,
-									address:        client.address,
-									directive:      client.directive
-								})
+									address: client.address,
+									directive: client.directive,
+								}),
 							);
 						}
+						return true;
 					});
 
-					const query = accessListClientModel
-						.query()
-						.delete()
-						.where('access_list_id', data.id);
+					const query = accessListClientModel.query().delete().where("access_list_id", data.id);
 
-					return query
-						.then(() => {
-							// Add new items
-							if (promises.length) {
-								return Promise.all(promises);
-							}
-						});
+					return query.then(() => {
+						// Add new items
+						if (promises.length) {
+							return Promise.all(promises);
+						}
+					});
 				}
 			})
 			.then(() => {
 				// Add to audit log
 				return internalAuditLog.add(access, {
-					action:      'updated',
-					object_type: 'access-list',
-					object_id:   data.id,
-					meta:        internalAccessList.maskItems(data)
+					action: "updated",
+					object_type: "access-list",
+					object_id: data.id,
+					meta: internalAccessList.maskItems(data),
 				});
 			})
 			.then(() => {
 				// re-fetch with expansions
-				return internalAccessList.get(access, {
-					id:     data.id,
-					expand: ['owner', 'items', 'clients', 'proxy_hosts.[certificate,access_list.[clients,items]]']
-				}, true /* <- skip masking */);
+				return internalAccessList.get(
+					access,
+					{
+						id: data.id,
+						expand: ["owner", "items", "clients", "proxy_hosts.[certificate,access_list.[clients,items]]"],
+					},
+					true /* <- skip masking */,
+				);
 			})
 			.then((row) => {
-				return internalAccessList.build(row)
+				return internalAccessList
+					.build(row)
 					.then(() => {
-						if (parseInt(row.proxy_host_count, 10)) {
-							return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
+						if (Number.parseInt(row.proxy_host_count, 10)) {
+							return internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
 						}
-					}).then(internalNginx.reload)
+					})
+					.then(internalNginx.reload)
 					.then(() => {
 						return internalAccessList.maskItems(row);
 					});
@@ -243,47 +246,50 @@ const internalAccessList = {
 	 * @return {Promise}
 	 */
 	get: (access, data, skip_masking) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
-		return access.can('access_lists:get', data.id)
-			.then((access_data) => {
+		return access
+			.can("access_lists:get", thisData.id)
+			.then((accessData) => {
 				const query = accessListModel
 					.query()
-					.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
-					.leftJoin('proxy_host', function() {
-						this.on('proxy_host.access_list_id', '=', 'access_list.id')
-							.andOn('proxy_host.is_deleted', '=', 0);
+					.select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
+					.leftJoin("proxy_host", function () {
+						this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
+							"proxy_host.is_deleted",
+							"=",
+							0,
+						);
 					})
-					.where('access_list.is_deleted', 0)
-					.andWhere('access_list.id', data.id)
-					.groupBy('access_list.id')
-					.allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]')
+					.where("access_list.is_deleted", 0)
+					.andWhere("access_list.id", thisData.id)
+					.groupBy("access_list.id")
+					.allowGraph("[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]")
 					.first();
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
+				if (accessData.permission_visibility !== "all") {
+					query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
 				}
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched(`[${data.expand.join(', ')}]`);
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
+				let thisRow = row;
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
-				if (!skip_masking && typeof row.items !== 'undefined' && row.items) {
-					row = internalAccessList.maskItems(row);
+				if (!skip_masking && typeof thisRow.items !== "undefined" && thisRow.items) {
+					thisRow = internalAccessList.maskItems(thisRow);
 				}
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof data.omit !== "undefined" && data.omit !== null) {
+					thisRow = _.omit(thisRow, data.omit);
 				}
-				return row;
+				return thisRow;
 			});
 	},
 
@@ -295,13 +301,14 @@ const internalAccessList = {
 	 * @returns {Promise}
 	 */
 	delete: (access, data) => {
-		return access.can('access_lists:delete', data.id)
+		return access
+			.can("access_lists:delete", data.id)
 			.then(() => {
-				return internalAccessList.get(access, {id: data.id, expand: ['proxy_hosts', 'items', 'clients']});
+				return internalAccessList.get(access, { id: data.id, expand: ["proxy_hosts", "items", "clients"] });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 
 				// 1. update row to be deleted
@@ -312,26 +319,27 @@ const internalAccessList = {
 				// 1. update row to be deleted
 				return accessListModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					.then(() => {
 						// 2. update any proxy hosts that were using it (ignoring permissions)
 						if (row.proxy_hosts) {
 							return proxyHostModel
 								.query()
-								.where('access_list_id', '=', row.id)
-								.patch({access_list_id: 0})
+								.where("access_list_id", "=", row.id)
+								.patch({ access_list_id: 0 })
 								.then(() => {
 									// 3. reconfigure those hosts, then reload nginx
 
 									// set the access_list_id to zero for these items
 									row.proxy_hosts.map((_val, idx) => {
 										row.proxy_hosts[idx].access_list_id = 0;
+										return true;
 									});
 
-									return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
+									return internalNginx.bulkGenerateConfigs("proxy_host", row.proxy_hosts);
 								})
 								.then(() => {
 									return internalNginx.reload();
@@ -351,10 +359,10 @@ const internalAccessList = {
 					.then(() => {
 						// 4. audit log
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'access-list',
-							object_id:   row.id,
-							meta:        _.omit(internalAccessList.maskItems(row), ['is_deleted', 'proxy_hosts'])
+							action: "deleted",
+							object_type: "access-list",
+							object_id: row.id,
+							meta: _.omit(internalAccessList.maskItems(row), ["is_deleted", "proxy_hosts"]),
 						});
 					});
 			})
@@ -372,33 +380,37 @@ const internalAccessList = {
 	 * @returns {Promise}
 	 */
 	getAll: (access, expand, search_query) => {
-		return access.can('access_lists:list')
+		return access
+			.can("access_lists:list")
 			.then((access_data) => {
 				const query = accessListModel
 					.query()
-					.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
-					.leftJoin('proxy_host', function() {
-						this.on('proxy_host.access_list_id', '=', 'access_list.id')
-							.andOn('proxy_host.is_deleted', '=', 0);
+					.select("access_list.*", accessListModel.raw("COUNT(proxy_host.id) as proxy_host_count"))
+					.leftJoin("proxy_host", function () {
+						this.on("proxy_host.access_list_id", "=", "access_list.id").andOn(
+							"proxy_host.is_deleted",
+							"=",
+							0,
+						);
 					})
-					.where('access_list.is_deleted', 0)
-					.groupBy('access_list.id')
-					.allowGraph('[owner,items,clients]')
-					.orderBy('access_list.name', 'ASC');
+					.where("access_list.is_deleted", 0)
+					.groupBy("access_list.id")
+					.allowGraph("[owner,items,clients]")
+					.orderBy("access_list.name", "ASC");
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("access_list.owner_user_id", access.token.getUserId(1));
 				}
 
 				// Query is used for searching
-				if (typeof search_query === 'string') {
+				if (typeof search_query === "string") {
 					query.where(function () {
-						this.where('name', 'like', `%${search_query}%`);
+						this.where("name", "like", `%${search_query}%`);
 					});
 				}
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched(`[${expand.join(', ')}]`);
+				if (typeof expand !== "undefined" && expand !== null) {
+					query.withGraphFetched(`[${expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRows(omissions()));
@@ -406,9 +418,10 @@ const internalAccessList = {
 			.then((rows) => {
 				if (rows) {
 					rows.map((row, idx) => {
-						if (typeof row.items !== 'undefined' && row.items) {
+						if (typeof row.items !== "undefined" && row.items) {
 							rows[idx] = internalAccessList.maskItems(row);
 						}
+						return true;
 					});
 				}
 
@@ -424,19 +437,15 @@ const internalAccessList = {
 	 * @returns {Promise}
 	 */
 	getCount: (user_id, visibility) => {
-		const query = accessListModel
-			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+		const query = accessListModel.query().count("id as count").where("is_deleted", 0);
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
+		return query.first().then((row) => {
+			return Number.parseInt(row.count, 10);
+		});
 	},
 
 	/**
@@ -444,18 +453,19 @@ const internalAccessList = {
 	 * @returns {Object}
 	 */
 	maskItems: (list) => {
-		if (list && typeof list.items !== 'undefined') {
+		if (list && typeof list.items !== "undefined") {
 			list.items.map((val, idx) => {
 				let repeat_for = 8;
-				let first_char = '*';
+				let first_char = "*";
 
-				if (typeof val.password !== 'undefined' && val.password) {
+				if (typeof val.password !== "undefined" && val.password) {
 					repeat_for = val.password.length - 1;
 					first_char = val.password.charAt(0);
 				}
 
-				list.items[idx].hint     = first_char + ('*').repeat(repeat_for);
-				list.items[idx].password = '';
+				list.items[idx].hint = first_char + "*".repeat(repeat_for);
+				list.items[idx].password = "";
+				return true;
 			});
 		}
 
@@ -493,48 +503,51 @@ const internalAccessList = {
 
 			// 2. create empty access file
 			try {
-				fs.writeFileSync(htpasswd_file, '', {encoding: 'utf8'});
+				fs.writeFileSync(htpasswd_file, "", { encoding: "utf8" });
 				resolve(htpasswd_file);
 			} catch (err) {
 				reject(err);
 			}
-		})
-			.then((htpasswd_file) => {
-				// 3. generate password for each user
-				if (list.items.length) {
-					return new Promise((resolve, reject) => {
-						batchflow(list.items).sequential()
-							.each((_i, item, next) => {
-								if (typeof item.password !== 'undefined' && item.password.length) {
-									logger.info(`Adding: ${item.username}`);
-
-									utils.execFile('openssl', ['passwd', '-apr1', item.password])
-										.then((res) => {
-											try {
-												fs.appendFileSync(htpasswd_file, `${item.username}:${res}\n`, {encoding: 'utf8'});
-											} catch (err) {
-												reject(err);
-											}
-											next();
-										})
-										.catch((err) => {
-											logger.error(err);
-											next(err);
-										});
-								}
-							})
-							.error((err) => {
-								logger.error(err);
-								reject(err);
-							})
-							.end((results) => {
-								logger.success(`Built Access file #${list.id} for: ${list.name}`);
-								resolve(results);
-							});
-					});
-				}
-			});
-	}
+		}).then((htpasswd_file) => {
+			// 3. generate password for each user
+			if (list.items.length) {
+				return new Promise((resolve, reject) => {
+					batchflow(list.items)
+						.sequential()
+						.each((_i, item, next) => {
+							if (typeof item.password !== "undefined" && item.password.length) {
+								logger.info(`Adding: ${item.username}`);
+
+								utils
+									.execFile("openssl", ["passwd", "-apr1", item.password])
+									.then((res) => {
+										try {
+											fs.appendFileSync(htpasswd_file, `${item.username}:${res}\n`, {
+												encoding: "utf8",
+											});
+										} catch (err) {
+											reject(err);
+										}
+										next();
+									})
+									.catch((err) => {
+										logger.error(err);
+										next(err);
+									});
+							}
+						})
+						.error((err) => {
+							logger.error(err);
+							reject(err);
+						})
+						.end((results) => {
+							logger.success(`Built Access file #${list.id} for: ${list.name}`);
+							resolve(results);
+						});
+				});
+			}
+		});
+	},
 };
 
-module.exports = internalAccessList;
+export default internalAccessList;

+ 35 - 36
backend/internal/audit-log.js

@@ -1,6 +1,6 @@
-const error            = require('../lib/error');
-const auditLogModel    = require('../models/audit-log');
-const {castJsonIfNeed} = require('../lib/helpers');
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import auditLogModel from "../models/audit-log.js";
 
 const internalAuditLog = {
 
@@ -13,28 +13,27 @@ const internalAuditLog = {
 	 * @returns {Promise}
 	 */
 	getAll: (access, expand, search_query) => {
-		return access.can('auditlog:list')
-			.then(() => {
-				let query = auditLogModel
-					.query()
-					.orderBy('created_on', 'DESC')
-					.orderBy('id', 'DESC')
-					.limit(100)
-					.allowGraph('[user]');
+		return access.can("auditlog:list").then(() => {
+			const query = auditLogModel
+				.query()
+				.orderBy("created_on", "DESC")
+				.orderBy("id", "DESC")
+				.limit(100)
+				.allowGraph("[user]");
 
-				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
-					query.where(function () {
-						this.where(castJsonIfNeed('meta'), 'like', '%' + search_query + '%');
-					});
-				}
+			// Query is used for searching
+			if (typeof search_query === "string" && search_query.length > 0) {
+				query.where(function () {
+					this.where(castJsonIfNeed("meta"), "like", `%${search_query}`);
+				});
+			}
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
-				}
+			if (typeof expand !== "undefined" && expand !== null) {
+				query.withGraphFetched(`[${expand.join(", ")}]`);
+			}
 
-				return query;
-			});
+			return query;
+		});
 	},
 
 	/**
@@ -54,26 +53,26 @@ const internalAuditLog = {
 	add: (access, data) => {
 		return new Promise((resolve, reject) => {
 			// Default the user id
-			if (typeof data.user_id === 'undefined' || !data.user_id) {
+			if (typeof data.user_id === "undefined" || !data.user_id) {
 				data.user_id = access.token.getUserId(1);
 			}
 
-			if (typeof data.action === 'undefined' || !data.action) {
-				reject(new error.InternalValidationError('Audit log entry must contain an Action'));
+			if (typeof data.action === "undefined" || !data.action) {
+				reject(new errs.InternalValidationError("Audit log entry must contain an Action"));
 			} else {
 				// Make sure at least 1 of the IDs are set and action
-				resolve(auditLogModel
-					.query()
-					.insert({
-						user_id:     data.user_id,
-						action:      data.action,
-						object_type: data.object_type || '',
-						object_id:   data.object_id || 0,
-						meta:        data.meta || {}
-					}));
+				resolve(
+					auditLogModel.query().insert({
+						user_id: data.user_id,
+						action: data.action,
+						object_type: data.object_type || "",
+						object_id: data.object_id || 0,
+						meta: data.meta || {},
+					}),
+				);
 			}
 		});
-	}
+	},
 };
 
-module.exports = internalAuditLog;
+export default internalAuditLog;

Diff do ficheiro suprimidas por serem muito extensas
+ 336 - 322
backend/internal/certificate.js


+ 186 - 180
backend/internal/dead-host.js

@@ -1,106 +1,104 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const deadHostModel       = require('../models/dead_host');
-const internalHost        = require('./host');
-const internalNginx       = require('./nginx');
-const internalAuditLog    = require('./audit-log');
-const internalCertificate = require('./certificate');
-const {castJsonIfNeed}    = require('../lib/helpers');
-
-function omissions () {
-	return ['is_deleted'];
-}
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import deadHostModel from "../models/dead_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
+
+const omissions = () => {
+	return ["is_deleted"];
+};
 
 const internalDeadHost = {
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
 	create: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		const createCertificate = data.certificate_id === "new";
 
-		if (create_certificate) {
+		if (createCertificate) {
 			delete data.certificate_id;
 		}
 
-		return access.can('dead_hosts:create', data)
+		return access
+			.can("dead_hosts:create", data)
 			.then((/*access_data*/) => {
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
-				data.domain_names.map(function (domain_name) {
+				data.domain_names.map((domain_name) => {
 					domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
+					return true;
 				});
 
-				return Promise.all(domain_name_check_promises)
-					.then((check_results) => {
-						check_results.map(function (result) {
-							if (result.is_taken) {
-								throw new error.ValidationError(result.hostname + ' is already in use');
-							}
-						});
+				return Promise.all(domain_name_check_promises).then((check_results) => {
+					check_results.map((result) => {
+						if (result.is_taken) {
+							throw new errs.ValidationError(`${result.hostname} is already in use`);
+						}
+						return true;
 					});
+				});
 			})
 			.then(() => {
 				// At this point the domains should have been checked
 				data.owner_user_id = access.token.getUserId(1);
-				data               = internalHost.cleanSslHstsData(data);
+				const thisData = internalHost.cleanSslHstsData(data);
 
 				// Fix for db field not having a default value
 				// for this optional field.
-				if (typeof data.advanced_config === 'undefined') {
-					data.advanced_config = '';
+				if (typeof data.advanced_config === "undefined") {
+					thisData.advanced_config = "";
 				}
 
-				return deadHostModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
+				return deadHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
+				if (createCertificate) {
+					return internalCertificate
+						.createQuickCertificate(access, data)
 						.then((cert) => {
 							// update host with cert id
 							return internalDeadHost.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
+								id: row.id,
+								certificate_id: cert.id,
 							});
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// re-fetch with cert
 				return internalDeadHost.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner']
+					id: row.id,
+					expand: ["certificate", "owner"],
 				});
 			})
 			.then((row) => {
 				// Configure nginx
-				return internalNginx.configure(deadHostModel, 'dead_host', row)
-					.then(() => {
-						return row;
-					});
+				return internalNginx.configure(deadHostModel, "dead_host", row).then(() => {
+					return row;
+				});
 			})
 			.then((row) => {
 				data.meta = _.assign({}, data.meta || {}, row.meta);
 
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'dead-host',
-					object_id:   row.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "created",
+						object_type: "dead-host",
+						object_id: row.id,
+						meta: data,
+					})
 					.then(() => {
 						return row;
 					});
@@ -114,95 +112,104 @@ const internalDeadHost = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data;
+		const createCertificate = thisData.certificate_id === "new";
 
-		if (create_certificate) {
-			delete data.certificate_id;
+		if (createCertificate) {
+			delete thisData.certificate_id;
 		}
 
-		return access.can('dead_hosts:update', data.id)
+		return access
+			.can("dead_hosts:update", thisData.id)
 			.then((/*access_data*/) => {
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
-				if (typeof data.domain_names !== 'undefined') {
-					data.domain_names.map(function (domain_name) {
-						domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'dead', data.id));
+				if (typeof thisData.domain_names !== "undefined") {
+					thisData.domain_names.map((domain_name) => {
+						domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, "dead", data.id));
+						return true;
 					});
 
-					return Promise.all(domain_name_check_promises)
-						.then((check_results) => {
-							check_results.map(function (result) {
-								if (result.is_taken) {
-									throw new error.ValidationError(result.hostname + ' is already in use');
-								}
-							});
+					return Promise.all(domain_name_check_promises).then((check_results) => {
+						check_results.map((result) => {
+							if (result.is_taken) {
+								throw new errs.ValidationError(`${result.hostname} is already in use`);
+							}
+							return true;
 						});
+					});
 				}
 			})
 			.then(() => {
-				return internalDeadHost.get(access, {id: data.id});
+				return internalDeadHost.get(access, { id: thisData.id });
 			})
 			.then((row) => {
-				if (row.id !== data.id) {
+				if (row.id !== thisData.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('404 Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`404 Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
+					);
 				}
 
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
+				if (createCertificate) {
+					return internalCertificate
+						.createQuickCertificate(access, {
+							domain_names: thisData.domain_names || row.domain_names,
+							meta: _.assign({}, row.meta, thisData.meta),
+						})
 						.then((cert) => {
 							// update host with cert id
-							data.certificate_id = cert.id;
+							thisData.certificate_id = cert.id;
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
+				thisData = _.assign(
+					{},
+					{
+						domain_names: row.domain_names,
+					},
+					data,
+				);
 
-				data = internalHost.cleanSslHstsData(data, row);
+				thisData = internalHost.cleanSslHstsData(thisData, row);
 
 				return deadHostModel
 					.query()
-					.where({id: data.id})
-					.patch(data)
+					.where({ id: thisData.id })
+					.patch(thisData)
 					.then((saved_row) => {
 						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        data
-						})
+						return internalAuditLog
+							.add(access, {
+								action: "updated",
+								object_type: "dead-host",
+								object_id: row.id,
+								meta: thisData,
+							})
 							.then(() => {
 								return _.omit(saved_row, omissions());
 							});
 					});
 			})
 			.then(() => {
-				return internalDeadHost.get(access, {
-					id:     data.id,
-					expand: ['owner', 'certificate']
-				})
+				return internalDeadHost
+					.get(access, {
+						id: thisData.id,
+						expand: ["owner", "certificate"],
+					})
 					.then((row) => {
 						// Configure nginx
-						return internalNginx.configure(deadHostModel, 'dead_host', row)
-							.then((new_meta) => {
-								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
-							});
+						return internalNginx.configure(deadHostModel, "dead_host", row).then((new_meta) => {
+							row.meta = new_meta;
+							return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
+						});
 					});
 			});
 	},
@@ -216,36 +223,35 @@ const internalDeadHost = {
 	 * @return {Promise}
 	 */
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
-		return access.can('dead_hosts:get', data.id)
+		return access
+			.can("dead_hosts:get", thisData.id)
 			.then((access_data) => {
-				let query = deadHostModel
+				const query = deadHostModel
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,certificate]')
+					.where("is_deleted", 0)
+					.andWhere("id", dthisDataata.id)
+					.allowGraph("[owner,certificate]")
 					.first();
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${data.expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(row, thisData.omit);
 				}
 				return row;
 			});
@@ -259,35 +265,35 @@ const internalDeadHost = {
 	 * @returns {Promise}
 	 */
 	delete: (access, data) => {
-		return access.can('dead_hosts:delete', data.id)
+		return access
+			.can("dead_hosts:delete", data.id)
 			.then(() => {
-				return internalDeadHost.get(access, {id: data.id});
+				return internalDeadHost.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 
 				return deadHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('dead_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("dead_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "deleted",
+							object_type: "dead-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -304,39 +310,41 @@ const internalDeadHost = {
 	 * @returns {Promise}
 	 */
 	enable: (access, data) => {
-		return access.can('dead_hosts:update', data.id)
+		return access
+			.can("dead_hosts:update", data.id)
 			.then(() => {
 				return internalDeadHost.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner']
+					id: data.id,
+					expand: ["certificate", "owner"],
 				});
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Host is already enabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (row.enabled) {
+					throw new errs.ValidationError("Host is already enabled");
 				}
 
 				row.enabled = 1;
 
 				return deadHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 1
+						enabled: 1,
 					})
 					.then(() => {
 						// Configure nginx
-						return internalNginx.configure(deadHostModel, 'dead_host', row);
+						return internalNginx.configure(deadHostModel, "dead_host", row);
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "enabled",
+							object_type: "dead-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -353,39 +361,40 @@ const internalDeadHost = {
 	 * @returns {Promise}
 	 */
 	disable: (access, data) => {
-		return access.can('dead_hosts:update', data.id)
+		return access
+			.can("dead_hosts:update", data.id)
 			.then(() => {
-				return internalDeadHost.get(access, {id: data.id});
+				return internalDeadHost.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Host is already disabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (!row.enabled) {
+					throw new errs.ValidationError("Host is already disabled");
 				}
 
 				row.enabled = 0;
 
 				return deadHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 0
+						enabled: 0,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('dead_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("dead_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'dead-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "disabled",
+							object_type: "dead-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -403,34 +412,35 @@ const internalDeadHost = {
 	 * @returns {Promise}
 	 */
 	getAll: (access, expand, search_query) => {
-		return access.can('dead_hosts:list')
+		return access
+			.can("dead_hosts:list")
 			.then((access_data) => {
-				let query = deadHostModel
+				const query = deadHostModel
 					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,certificate]')
-					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
+					.where("is_deleted", 0)
+					.groupBy("id")
+					.allowGraph("[owner,certificate]")
+					.orderBy(castJsonIfNeed("domain_names"), "ASC");
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
 				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
+				if (typeof search_query === "string" && search_query.length > 0) {
 					query.where(function () {
-						this.where(castJsonIfNeed('domain_names'), 'like', '%' + search_query + '%');
+						this.where(castJsonIfNeed("domain_names"), "like", `%${search_query}%`);
 					});
 				}
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
+				if (typeof expand !== "undefined" && expand !== null) {
+					query.withGraphFetched(`[${expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRows(omissions()));
 			})
 			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
+				if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
 					return internalHost.cleanAllRowsCertificateMeta(rows);
 				}
 
@@ -446,20 +456,16 @@ const internalDeadHost = {
 	 * @returns {Promise}
 	 */
 	getCount: (user_id, visibility) => {
-		let query = deadHostModel
-			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+		const query = deadHostModel.query().count("id as count").where("is_deleted", 0);
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		return query.first().then((row) => {
+			return Number.parseInt(row.count, 10);
+		});
+	},
 };
 
-module.exports = internalDeadHost;
+export default internalDeadHost;

+ 130 - 115
backend/internal/host.js

@@ -1,11 +1,10 @@
-const _                    = require('lodash');
-const proxyHostModel       = require('../models/proxy_host');
-const redirectionHostModel = require('../models/redirection_host');
-const deadHostModel        = require('../models/dead_host');
-const {castJsonIfNeed}     = require('../lib/helpers');
+import _ from "lodash";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import deadHostModel from "../models/dead_host.js";
+import proxyHostModel from "../models/proxy_host.js";
+import redirectionHostModel from "../models/redirection_host.js";
 
 const internalHost = {
-
 	/**
 	 * Makes sure that the ssl_* and hsts_* fields play nicely together.
 	 * ie: if there is no cert, then force_ssl is off.
@@ -15,25 +14,23 @@ const internalHost = {
 	 * @param   {object} [existing_data]
 	 * @returns {object}
 	 */
-	cleanSslHstsData: function (data, existing_data) {
-		existing_data = existing_data === undefined ? {} : existing_data;
-
-		const combined_data = _.assign({}, existing_data, data);
+	cleanSslHstsData: (data, existingData) => {
+		const combinedData = _.assign({}, existingData || {}, data);
 
-		if (!combined_data.certificate_id) {
-			combined_data.ssl_forced    = false;
-			combined_data.http2_support = false;
+		if (!combinedData.certificate_id) {
+			combinedData.ssl_forced = false;
+			combinedData.http2_support = false;
 		}
 
-		if (!combined_data.ssl_forced) {
-			combined_data.hsts_enabled = false;
+		if (!combinedData.ssl_forced) {
+			combinedData.hsts_enabled = false;
 		}
 
-		if (!combined_data.hsts_enabled) {
-			combined_data.hsts_subdomains = false;
+		if (!combinedData.hsts_enabled) {
+			combinedData.hsts_subdomains = false;
 		}
 
-		return combined_data;
+		return combinedData;
 	},
 
 	/**
@@ -42,11 +39,12 @@ const internalHost = {
 	 * @param   {Array}  rows
 	 * @returns {Array}
 	 */
-	cleanAllRowsCertificateMeta: function (rows) {
-		rows.map(function (row, idx) {
-			if (typeof rows[idx].certificate !== 'undefined' && rows[idx].certificate) {
+	cleanAllRowsCertificateMeta: (rows) => {
+		rows.map((_, idx) => {
+			if (typeof rows[idx].certificate !== "undefined" && rows[idx].certificate) {
 				rows[idx].certificate.meta = {};
 			}
+			return true;
 		});
 
 		return rows;
@@ -58,8 +56,8 @@ const internalHost = {
 	 * @param   {Object}  row
 	 * @returns {Object}
 	 */
-	cleanRowCertificateMeta: function (row) {
-		if (typeof row.certificate !== 'undefined' && row.certificate) {
+	cleanRowCertificateMeta: (row) => {
+		if (typeof row.certificate !== "undefined" && row.certificate) {
 			row.certificate.meta = {};
 		}
 
@@ -73,48 +71,44 @@ const internalHost = {
 	 * @param   {Array}  domain_names
 	 * @returns {Promise}
 	 */
-	getHostsWithDomains: function (domain_names) {
+	getHostsWithDomains: (domain_names) => {
 		const promises = [
-			proxyHostModel
-				.query()
-				.where('is_deleted', 0),
-			redirectionHostModel
-				.query()
-				.where('is_deleted', 0),
-			deadHostModel
-				.query()
-				.where('is_deleted', 0)
+			proxyHostModel.query().where("is_deleted", 0),
+			redirectionHostModel.query().where("is_deleted", 0),
+			deadHostModel.query().where("is_deleted", 0),
 		];
 
-		return Promise.all(promises)
-			.then((promises_results) => {
-				let response_object = {
-					total_count:       0,
-					dead_hosts:        [],
-					proxy_hosts:       [],
-					redirection_hosts: []
-				};
-
-				if (promises_results[0]) {
-					// Proxy Hosts
-					response_object.proxy_hosts  = internalHost._getHostsWithDomains(promises_results[0], domain_names);
-					response_object.total_count += response_object.proxy_hosts.length;
-				}
+		return Promise.all(promises).then((promises_results) => {
+			const response_object = {
+				total_count: 0,
+				dead_hosts: [],
+				proxy_hosts: [],
+				redirection_hosts: [],
+			};
+
+			if (promises_results[0]) {
+				// Proxy Hosts
+				response_object.proxy_hosts = internalHost._getHostsWithDomains(promises_results[0], domain_names);
+				response_object.total_count += response_object.proxy_hosts.length;
+			}
 
-				if (promises_results[1]) {
-					// Redirection Hosts
-					response_object.redirection_hosts = internalHost._getHostsWithDomains(promises_results[1], domain_names);
-					response_object.total_count      += response_object.redirection_hosts.length;
-				}
+			if (promises_results[1]) {
+				// Redirection Hosts
+				response_object.redirection_hosts = internalHost._getHostsWithDomains(
+					promises_results[1],
+					domain_names,
+				);
+				response_object.total_count += response_object.redirection_hosts.length;
+			}
 
-				if (promises_results[2]) {
-					// Dead Hosts
-					response_object.dead_hosts   = internalHost._getHostsWithDomains(promises_results[2], domain_names);
-					response_object.total_count += response_object.dead_hosts.length;
-				}
+			if (promises_results[2]) {
+				// Dead Hosts
+				response_object.dead_hosts = internalHost._getHostsWithDomains(promises_results[2], domain_names);
+				response_object.total_count += response_object.dead_hosts.length;
+			}
 
-				return response_object;
-			});
+			return response_object;
+		});
 	},
 
 	/**
@@ -125,112 +119,133 @@ const internalHost = {
 	 * @param   {Integer}  [ignore_id]     Must be supplied if type was also supplied
 	 * @returns {Promise}
 	 */
-	isHostnameTaken: function (hostname, ignore_type, ignore_id) {
+	isHostnameTaken: (hostname, ignore_type, ignore_id) => {
 		const promises = [
 			proxyHostModel
 				.query()
-				.where('is_deleted', 0)
-				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
+				.where("is_deleted", 0)
+				.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
 			redirectionHostModel
 				.query()
-				.where('is_deleted', 0)
-				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
+				.where("is_deleted", 0)
+				.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
 			deadHostModel
 				.query()
-				.where('is_deleted', 0)
-				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%')
+				.where("is_deleted", 0)
+				.andWhere(castJsonIfNeed("domain_names"), "like", `%${hostname}%`),
 		];
 
-		return Promise.all(promises)
-			.then((promises_results) => {
-				let is_taken = false;
-
-				if (promises_results[0]) {
-					// Proxy Hosts
-					if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[0], ignore_type === 'proxy' && ignore_id ? ignore_id : 0)) {
-						is_taken = true;
-					}
+		return Promise.all(promises).then((promises_results) => {
+			let is_taken = false;
+
+			if (promises_results[0]) {
+				// Proxy Hosts
+				if (
+					internalHost._checkHostnameRecordsTaken(
+						hostname,
+						promises_results[0],
+						ignore_type === "proxy" && ignore_id ? ignore_id : 0,
+					)
+				) {
+					is_taken = true;
 				}
+			}
 
-				if (promises_results[1]) {
-					// Redirection Hosts
-					if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[1], ignore_type === 'redirection' && ignore_id ? ignore_id : 0)) {
-						is_taken = true;
-					}
+			if (promises_results[1]) {
+				// Redirection Hosts
+				if (
+					internalHost._checkHostnameRecordsTaken(
+						hostname,
+						promises_results[1],
+						ignore_type === "redirection" && ignore_id ? ignore_id : 0,
+					)
+				) {
+					is_taken = true;
 				}
+			}
 
-				if (promises_results[2]) {
-					// Dead Hosts
-					if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[2], ignore_type === 'dead' && ignore_id ? ignore_id : 0)) {
-						is_taken = true;
-					}
+			if (promises_results[2]) {
+				// Dead Hosts
+				if (
+					internalHost._checkHostnameRecordsTaken(
+						hostname,
+						promises_results[2],
+						ignore_type === "dead" && ignore_id ? ignore_id : 0,
+					)
+				) {
+					is_taken = true;
 				}
+			}
 
-				return {
-					hostname: hostname,
-					is_taken: is_taken
-				};
-			});
+			return {
+				hostname: hostname,
+				is_taken: is_taken,
+			};
+		});
 	},
 
 	/**
 	 * Private call only
 	 *
 	 * @param   {String}  hostname
-	 * @param   {Array}   existing_rows
-	 * @param   {Integer} [ignore_id]
+	 * @param   {Array}   existingRows
+	 * @param   {Integer} [ignoreId]
 	 * @returns {Boolean}
 	 */
-	_checkHostnameRecordsTaken: function (hostname, existing_rows, ignore_id) {
-		let is_taken = false;
+	_checkHostnameRecordsTaken: (hostname, existingRows, ignoreId) => {
+		let isTaken = false;
 
-		if (existing_rows && existing_rows.length) {
-			existing_rows.map(function (existing_row) {
-				existing_row.domain_names.map(function (existing_hostname) {
+		if (existingRows?.length) {
+			existingRows.map((existingRow) => {
+				existingRow.domain_names.map((existingHostname) => {
 					// Does this domain match?
-					if (existing_hostname.toLowerCase() === hostname.toLowerCase()) {
-						if (!ignore_id || ignore_id !== existing_row.id) {
-							is_taken = true;
+					if (existingHostname.toLowerCase() === hostname.toLowerCase()) {
+						if (!ignoreId || ignoreId !== existingRow.id) {
+							isTaken = true;
 						}
 					}
+					return true;
 				});
+				return true;
 			});
 		}
 
-		return is_taken;
+		return isTaken;
 	},
 
 	/**
 	 * Private call only
 	 *
 	 * @param   {Array}   hosts
-	 * @param   {Array}   domain_names
+	 * @param   {Array}   domainNames
 	 * @returns {Array}
 	 */
-	_getHostsWithDomains: function (hosts, domain_names) {
-		let response = [];
+	_getHostsWithDomains: (hosts, domainNames) => {
+		const response = [];
 
-		if (hosts && hosts.length) {
-			hosts.map(function (host) {
-				let host_matches = false;
+		if (hosts?.length) {
+			hosts.map((host) => {
+				let hostMatches = false;
 
-				domain_names.map(function (domain_name) {
-					host.domain_names.map(function (host_domain_name) {
-						if (domain_name.toLowerCase() === host_domain_name.toLowerCase()) {
-							host_matches = true;
+				domainNames.map((domainName) => {
+					host.domain_names.map((hostDomainName) => {
+						if (domainName.toLowerCase() === hostDomainName.toLowerCase()) {
+							hostMatches = true;
 						}
+						return true;
 					});
+					return true;
 				});
 
-				if (host_matches) {
+				if (hostMatches) {
 					response.push(host);
 				}
+				return true;
 			});
 		}
 
 		return response;
-	}
-
+	},
 };
 
-module.exports = internalHost;
+export default internalHost;

+ 65 - 56
backend/internal/ip_ranges.js

@@ -1,45 +1,51 @@
-const https         = require('https');
-const fs            = require('fs');
-const logger        = require('../logger').ip_ranges;
-const error         = require('../lib/error');
-const utils         = require('../lib/utils');
-const internalNginx = require('./nginx');
-
-const CLOUDFRONT_URL   = 'https://ip-ranges.amazonaws.com/ip-ranges.json';
-const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4';
-const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
+import fs from "node:fs";
+import https from "node:https";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import { ipRanges as logger } from "../logger.js";
+import internalNginx from "./nginx.js";
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
+
+const CLOUDFRONT_URL = "https://ip-ranges.amazonaws.com/ip-ranges.json";
+const CLOUDFARE_V4_URL = "https://www.cloudflare.com/ips-v4";
+const CLOUDFARE_V6_URL = "https://www.cloudflare.com/ips-v6";
 
 const regIpV4 = /^(\d+\.?){4}\/\d+/;
 const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
 
 const internalIpRanges = {
-
-	interval_timeout:    1000 * 60 * 60 * 6, // 6 hours
-	interval:            null,
+	interval_timeout: 1000 * 60 * 60 * 6, // 6 hours
+	interval: null,
 	interval_processing: false,
-	iteration_count:     0,
+	iteration_count: 0,
 
 	initTimer: () => {
-		logger.info('IP Ranges Renewal Timer initialized');
+		logger.info("IP Ranges Renewal Timer initialized");
 		internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout);
 	},
 
 	fetchUrl: (url) => {
 		return new Promise((resolve, reject) => {
-			logger.info('Fetching ' + url);
-			return https.get(url, (res) => {
-				res.setEncoding('utf8');
-				let raw_data = '';
-				res.on('data', (chunk) => {
-					raw_data += chunk;
-				});
+			logger.info(`Fetching ${url}`);
+			return https
+				.get(url, (res) => {
+					res.setEncoding("utf8");
+					let raw_data = "";
+					res.on("data", (chunk) => {
+						raw_data += chunk;
+					});
 
-				res.on('end', () => {
-					resolve(raw_data);
+					res.on("end", () => {
+						resolve(raw_data);
+					});
+				})
+				.on("error", (err) => {
+					reject(err);
 				});
-			}).on('error', (err) => {
-				reject(err);
-			});
 		});
 	},
 
@@ -49,27 +55,30 @@ const internalIpRanges = {
 	fetch: () => {
 		if (!internalIpRanges.interval_processing) {
 			internalIpRanges.interval_processing = true;
-			logger.info('Fetching IP Ranges from online services...');
+			logger.info("Fetching IP Ranges from online services...");
 
 			let ip_ranges = [];
 
-			return internalIpRanges.fetchUrl(CLOUDFRONT_URL)
+			return internalIpRanges
+				.fetchUrl(CLOUDFRONT_URL)
 				.then((cloudfront_data) => {
-					let data = JSON.parse(cloudfront_data);
+					const data = JSON.parse(cloudfront_data);
 
-					if (data && typeof data.prefixes !== 'undefined') {
+					if (data && typeof data.prefixes !== "undefined") {
 						data.prefixes.map((item) => {
-							if (item.service === 'CLOUDFRONT') {
+							if (item.service === "CLOUDFRONT") {
 								ip_ranges.push(item.ip_prefix);
 							}
+							return true;
 						});
 					}
 
-					if (data && typeof data.ipv6_prefixes !== 'undefined') {
+					if (data && typeof data.ipv6_prefixes !== "undefined") {
 						data.ipv6_prefixes.map((item) => {
-							if (item.service === 'CLOUDFRONT') {
+							if (item.service === "CLOUDFRONT") {
 								ip_ranges.push(item.ipv6_prefix);
 							}
+							return true;
 						});
 					}
 				})
@@ -77,38 +86,38 @@ const internalIpRanges = {
 					return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
 				})
 				.then((cloudfare_data) => {
-					let items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line));
-					ip_ranges = [... ip_ranges, ... items];
+					const items = cloudfare_data.split("\n").filter((line) => regIpV4.test(line));
+					ip_ranges = [...ip_ranges, ...items];
 				})
 				.then(() => {
 					return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
 				})
 				.then((cloudfare_data) => {
-					let items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line));
-					ip_ranges = [... ip_ranges, ... items];
+					const items = cloudfare_data.split("\n").filter((line) => regIpV6.test(line));
+					ip_ranges = [...ip_ranges, ...items];
 				})
 				.then(() => {
-					let clean_ip_ranges = [];
+					const clean_ip_ranges = [];
 					ip_ranges.map((range) => {
 						if (range) {
 							clean_ip_ranges.push(range);
 						}
+						return true;
 					});
 
-					return internalIpRanges.generateConfig(clean_ip_ranges)
-						.then(() => {
-							if (internalIpRanges.iteration_count) {
-								// Reload nginx
-								return internalNginx.reload();
-							}
-						});
+					return internalIpRanges.generateConfig(clean_ip_ranges).then(() => {
+						if (internalIpRanges.iteration_count) {
+							// Reload nginx
+							return internalNginx.reload();
+						}
+					});
 				})
 				.then(() => {
 					internalIpRanges.interval_processing = false;
 					internalIpRanges.iteration_count++;
 				})
 				.catch((err) => {
-					logger.error(err.message);
+					logger.fatal(err.message);
 					internalIpRanges.interval_processing = false;
 				});
 		}
@@ -122,26 +131,26 @@ const internalIpRanges = {
 		const renderEngine = utils.getRenderEngine();
 		return new Promise((resolve, reject) => {
 			let template = null;
-			let filename = '/etc/nginx/conf.d/include/ip_ranges.conf';
+			const filename = "/etc/nginx/conf.d/include/ip_ranges.conf";
 			try {
-				template = fs.readFileSync(__dirname + '/../templates/ip_ranges.conf', {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/ip_ranges.conf`, { encoding: "utf8" });
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 			}
 
 			renderEngine
-				.parseAndRender(template, {ip_ranges: ip_ranges})
+				.parseAndRender(template, { ip_ranges: ip_ranges })
 				.then((config_text) => {
-					fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
+					fs.writeFileSync(filename, config_text, { encoding: "utf8" });
 					resolve(true);
 				})
 				.catch((err) => {
-					logger.warn('Could not write ' + filename + ':', err.message);
-					reject(new error.ConfigurationError(err.message));
+					logger.warn(`Could not write ${filename}: ${err.message}`);
+					reject(new errs.ConfigurationError(err.message));
 				});
 		});
-	}
+	},
 };
 
-module.exports = internalIpRanges;
+export default internalIpRanges;

+ 96 - 103
backend/internal/nginx.js

@@ -1,12 +1,15 @@
-const _      = require('lodash');
-const fs     = require('node:fs');
-const logger = require('../logger').nginx;
-const config = require('../lib/config');
-const utils  = require('../lib/utils');
-const error  = require('../lib/error');
+import fs from "node:fs";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import _ from "lodash";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import { nginx as logger } from "../logger.js";
 
-const internalNginx = {
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
 
+const internalNginx = {
 	/**
 	 * This will:
 	 * - test the nginx config first to make sure it's OK
@@ -24,7 +27,8 @@ const internalNginx = {
 	configure: (model, host_type, host) => {
 		let combined_meta = {};
 
-		return internalNginx.test()
+		return internalNginx
+			.test()
 			.then(() => {
 				// Nginx is OK
 				// We're deleting this config regardless.
@@ -37,20 +41,18 @@ const internalNginx = {
 			})
 			.then(() => {
 				// Test nginx again and update meta with result
-				return internalNginx.test()
+				return internalNginx
+					.test()
 					.then(() => {
 						// nginx is ok
 						combined_meta = _.assign({}, host.meta, {
 							nginx_online: true,
-							nginx_err:    null
+							nginx_err: null,
 						});
 
-						return model
-							.query()
-							.where('id', host.id)
-							.patch({
-								meta: combined_meta
-							});
+						return model.query().where("id", host.id).patch({
+							meta: combined_meta,
+						});
 					})
 					.catch((err) => {
 						// Remove the error_log line because it's a docker-ism false positive that doesn't need to be reported.
@@ -58,28 +60,27 @@ const internalNginx = {
 						//   nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address)
 
 						const valid_lines = [];
-						const err_lines   = err.message.split('\n');
+						const err_lines = err.message.split("\n");
 						err_lines.map((line) => {
-							if (line.indexOf('/var/log/nginx/error.log') === -1) {
+							if (line.indexOf("/var/log/nginx/error.log") === -1) {
 								valid_lines.push(line);
 							}
+							return true;
 						});
 
-						if (config.debug()) {
-							logger.error('Nginx test failed:', valid_lines.join('\n'));
-						}
+						logger.debug("Nginx test failed:", valid_lines.join("\n"));
 
 						// config is bad, update meta and delete config
 						combined_meta = _.assign({}, host.meta, {
 							nginx_online: false,
-							nginx_err:    valid_lines.join('\n')
+							nginx_err: valid_lines.join("\n"),
 						});
 
 						return model
 							.query()
-							.where('id', host.id)
+							.where("id", host.id)
 							.patch({
-								meta: combined_meta
+								meta: combined_meta,
 							})
 							.then(() => {
 								internalNginx.renameConfigAsError(host_type, host);
@@ -101,22 +102,18 @@ const internalNginx = {
 	 * @returns {Promise}
 	 */
 	test: () => {
-		if (config.debug()) {
-			logger.info('Testing Nginx configuration');
-		}
-
-		return utils.execFile('/usr/sbin/nginx', ['-t', '-g', 'error_log off;']);
+		logger.debug("Testing Nginx configuration");
+		return utils.execFile("/usr/sbin/nginx", ["-t", "-g", "error_log off;"]);
 	},
 
 	/**
 	 * @returns {Promise}
 	 */
 	reload: () => {
-		return internalNginx.test()
-			.then(() => {
-				logger.info('Reloading Nginx');
-				return utils.execFile('/usr/sbin/nginx', ['-s', 'reload']);
-			});
+		return internalNginx.test().then(() => {
+			logger.info("Reloading Nginx");
+			return utils.execFile("/usr/sbin/nginx", ["-s", "reload"]);
+		});
 	},
 
 	/**
@@ -125,8 +122,8 @@ const internalNginx = {
 	 * @returns {String}
 	 */
 	getConfigName: (host_type, host_id) => {
-		if (host_type === 'default') {
-			return '/data/nginx/default_host/site.conf';
+		if (host_type === "default") {
+			return "/data/nginx/default_host/site.conf";
 		}
 		return `/data/nginx/${internalNginx.getFileFriendlyHostType(host_type)}/${host_id}.conf`;
 	},
@@ -141,38 +138,45 @@ const internalNginx = {
 			let template;
 
 			try {
-				template = fs.readFileSync(`${__dirname}/../templates/_location.conf`, {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/_location.conf`, { encoding: "utf8" });
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 			}
 
-			const renderEngine    = utils.getRenderEngine();
-			let renderedLocations = '';
+			const renderEngine = utils.getRenderEngine();
+			let renderedLocations = "";
 
 			const locationRendering = async () => {
 				for (let i = 0; i < host.locations.length; i++) {
-					const locationCopy = Object.assign({}, {access_list_id: host.access_list_id}, {certificate_id: host.certificate_id},
-						{ssl_forced: host.ssl_forced}, {caching_enabled: host.caching_enabled}, {block_exploits: host.block_exploits},
-						{allow_websocket_upgrade: host.allow_websocket_upgrade}, {http2_support: host.http2_support},
-						{hsts_enabled: host.hsts_enabled}, {hsts_subdomains: host.hsts_subdomains}, {access_list: host.access_list},
-						{certificate: host.certificate}, host.locations[i]);
-
-					if (locationCopy.forward_host.indexOf('/') > -1) {
-						const splitted = locationCopy.forward_host.split('/');
+					const locationCopy = Object.assign(
+						{},
+						{ access_list_id: host.access_list_id },
+						{ certificate_id: host.certificate_id },
+						{ ssl_forced: host.ssl_forced },
+						{ caching_enabled: host.caching_enabled },
+						{ block_exploits: host.block_exploits },
+						{ allow_websocket_upgrade: host.allow_websocket_upgrade },
+						{ http2_support: host.http2_support },
+						{ hsts_enabled: host.hsts_enabled },
+						{ hsts_subdomains: host.hsts_subdomains },
+						{ access_list: host.access_list },
+						{ certificate: host.certificate },
+						host.locations[i],
+					);
+
+					if (locationCopy.forward_host.indexOf("/") > -1) {
+						const splitted = locationCopy.forward_host.split("/");
 
 						locationCopy.forward_host = splitted.shift();
-						locationCopy.forward_path = `/${splitted.join('/')}`;
+						locationCopy.forward_path = `/${splitted.join("/")}`;
 					}
 
-					// eslint-disable-next-line
 					renderedLocations += await renderEngine.parseAndRender(template, locationCopy);
 				}
-
 			};
 
 			locationRendering().then(() => resolve(renderedLocations));
-
 		});
 	},
 
@@ -183,23 +187,21 @@ const internalNginx = {
 	 */
 	generateConfig: (host_type, host_row) => {
 		// Prevent modifying the original object:
-		const host           = JSON.parse(JSON.stringify(host_row));
+		const host = JSON.parse(JSON.stringify(host_row));
 		const nice_host_type = internalNginx.getFileFriendlyHostType(host_type);
 
-		if (config.debug()) {
-			logger.info(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
-		}
+		logger.debug(`Generating ${nice_host_type} Config:`, JSON.stringify(host, null, 2));
 
 		const renderEngine = utils.getRenderEngine();
 
 		return new Promise((resolve, reject) => {
-			let template   = null;
+			let template = null;
 			const filename = internalNginx.getConfigName(nice_host_type, host.id);
 
 			try {
-				template = fs.readFileSync(`${__dirname}/../templates/${nice_host_type}.conf`, {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/${nice_host_type}.conf`, { encoding: "utf8" });
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 			}
 
@@ -207,27 +209,26 @@ const internalNginx = {
 			let origLocations;
 
 			// Manipulate the data a bit before sending it to the template
-			if (nice_host_type !== 'default') {
+			if (nice_host_type !== "default") {
 				host.use_default_location = true;
-				if (typeof host.advanced_config !== 'undefined' && host.advanced_config) {
+				if (typeof host.advanced_config !== "undefined" && host.advanced_config) {
 					host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
 				}
 			}
 
 			if (host.locations) {
 				//logger.info ('host.locations = ' + JSON.stringify(host.locations, null, 2));
-				origLocations    = [].concat(host.locations);
+				origLocations = [].concat(host.locations);
 				locationsPromise = internalNginx.renderLocations(host).then((renderedLocations) => {
 					host.locations = renderedLocations;
 				});
 
 				// Allow someone who is using / custom location path to use it, and skip the default / location
 				_.map(host.locations, (location) => {
-					if (location.path === '/') {
+					if (location.path === "/") {
 						host.use_default_location = false;
 					}
 				});
-
 			} else {
 				locationsPromise = Promise.resolve();
 			}
@@ -239,11 +240,8 @@ const internalNginx = {
 				renderEngine
 					.parseAndRender(template, host)
 					.then((config_text) => {
-						fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
-
-						if (config.debug()) {
-							logger.success('Wrote config:', filename, config_text);
-						}
+						fs.writeFileSync(filename, config_text, { encoding: "utf8" });
+						logger.debug("Wrote config:", filename, config_text);
 
 						// Restore locations array
 						host.locations = origLocations;
@@ -251,11 +249,8 @@ const internalNginx = {
 						resolve(true);
 					})
 					.catch((err) => {
-						if (config.debug()) {
-							logger.warn(`Could not write ${filename}:`, err.message);
-						}
-
-						reject(new error.ConfigurationError(err.message));
+						logger.debug(`Could not write ${filename}:`, err.message);
+						reject(new errs.ConfigurationError(err.message));
 					});
 			});
 		});
@@ -270,20 +265,17 @@ const internalNginx = {
 	 * @returns {Promise}
 	 */
 	generateLetsEncryptRequestConfig: (certificate) => {
-		if (config.debug()) {
-			logger.info('Generating LetsEncrypt Request Config:', certificate);
-		}
-
+		logger.debug("Generating LetsEncrypt Request Config:", certificate);
 		const renderEngine = utils.getRenderEngine();
 
 		return new Promise((resolve, reject) => {
-			let template   = null;
+			let template = null;
 			const filename = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
 
 			try {
-				template = fs.readFileSync(`${__dirname}/../templates/letsencrypt-request.conf`, {encoding: 'utf8'});
+				template = fs.readFileSync(`${__dirname}/../templates/letsencrypt-request.conf`, { encoding: "utf8" });
 			} catch (err) {
-				reject(new error.ConfigurationError(err.message));
+				reject(new errs.ConfigurationError(err.message));
 				return;
 			}
 
@@ -292,20 +284,13 @@ const internalNginx = {
 			renderEngine
 				.parseAndRender(template, certificate)
 				.then((config_text) => {
-					fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
-
-					if (config.debug()) {
-						logger.success('Wrote config:', filename, config_text);
-					}
-
+					fs.writeFileSync(filename, config_text, { encoding: "utf8" });
+					logger.debug("Wrote config:", filename, config_text);
 					resolve(true);
 				})
 				.catch((err) => {
-					if (config.debug()) {
-						logger.warn(`Could not write ${filename}:`, err.message);
-					}
-
-					reject(new error.ConfigurationError(err.message));
+					logger.debug(`Could not write ${filename}:`, err.message);
+					reject(new errs.ConfigurationError(err.message));
 				});
 		});
 	},
@@ -320,7 +305,7 @@ const internalNginx = {
 		try {
 			fs.unlinkSync(filename);
 		} catch (err) {
-			logger.debug('Could not delete file:', JSON.stringify(err, null, 2));
+			logger.debug("Could not delete file:", JSON.stringify(err, null, 2));
 		}
 	},
 
@@ -330,7 +315,7 @@ const internalNginx = {
 	 * @returns String
 	 */
 	getFileFriendlyHostType: (host_type) => {
-		return host_type.replace(/-/g, '_');
+		return host_type.replace(/-/g, "_");
 	},
 
 	/**
@@ -341,7 +326,7 @@ const internalNginx = {
 	 */
 	deleteLetsEncryptRequestConfig: (certificate) => {
 		const config_file = `/data/nginx/temp/letsencrypt_${certificate.id}.conf`;
-		return new Promise((resolve/*, reject*/) => {
+		return new Promise((resolve /*, reject*/) => {
 			internalNginx.deleteFile(config_file);
 			resolve();
 		});
@@ -354,10 +339,13 @@ const internalNginx = {
 	 * @returns {Promise}
 	 */
 	deleteConfig: (host_type, host, delete_err_file) => {
-		const config_file     = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
+		const config_file = internalNginx.getConfigName(
+			internalNginx.getFileFriendlyHostType(host_type),
+			typeof host === "undefined" ? 0 : host.id,
+		);
 		const config_file_err = `${config_file}.err`;
 
-		return new Promise((resolve/*, reject*/) => {
+		return new Promise((resolve /*, reject*/) => {
 			internalNginx.deleteFile(config_file);
 			if (delete_err_file) {
 				internalNginx.deleteFile(config_file_err);
@@ -372,10 +360,13 @@ const internalNginx = {
 	 * @returns {Promise}
 	 */
 	renameConfigAsError: (host_type, host) => {
-		const config_file     = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
+		const config_file = internalNginx.getConfigName(
+			internalNginx.getFileFriendlyHostType(host_type),
+			typeof host === "undefined" ? 0 : host.id,
+		);
 		const config_file_err = `${config_file}.err`;
 
-		return new Promise((resolve/*, reject*/) => {
+		return new Promise((resolve /*, reject*/) => {
 			fs.unlink(config_file, () => {
 				// ignore result, continue
 				fs.rename(config_file, config_file_err, () => {
@@ -395,6 +386,7 @@ const internalNginx = {
 		const promises = [];
 		hosts.map((host) => {
 			promises.push(internalNginx.generateConfig(host_type, host));
+			return true;
 		});
 
 		return Promise.all(promises);
@@ -409,6 +401,7 @@ const internalNginx = {
 		const promises = [];
 		hosts.map((host) => {
 			promises.push(internalNginx.deleteConfig(host_type, host, true));
+			return true;
 		});
 
 		return Promise.all(promises);
@@ -424,13 +417,13 @@ const internalNginx = {
 	 * @returns {boolean}
 	 */
 	ipv6Enabled: () => {
-		if (typeof process.env.DISABLE_IPV6 !== 'undefined') {
+		if (typeof process.env.DISABLE_IPV6 !== "undefined") {
 			const disabled = process.env.DISABLE_IPV6.toLowerCase();
-			return !(disabled === 'on' || disabled === 'true' || disabled === '1' || disabled === 'yes');
+			return !(disabled === "on" || disabled === "true" || disabled === "1" || disabled === "yes");
 		}
 
 		return true;
-	}
+	},
 };
 
-module.exports = internalNginx;
+export default internalNginx;

+ 191 - 183
backend/internal/proxy-host.js

@@ -1,107 +1,106 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const proxyHostModel      = require('../models/proxy_host');
-const internalHost        = require('./host');
-const internalNginx       = require('./nginx');
-const internalAuditLog    = require('./audit-log');
-const internalCertificate = require('./certificate');
-const {castJsonIfNeed}    = require('../lib/helpers');
-
-function omissions () {
-	return ['is_deleted', 'owner.is_deleted'];
-}
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import proxyHostModel from "../models/proxy_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
+
+const omissions = () => {
+	return ["is_deleted", "owner.is_deleted"];
+};
 
 const internalProxyHost = {
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
 	create: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data;
+		const createCertificate = thisData.certificate_id === "new";
 
-		if (create_certificate) {
-			delete data.certificate_id;
+		if (createCertificate) {
+			delete thisData.certificate_id;
 		}
 
-		return access.can('proxy_hosts:create', data)
+		return access
+			.can("proxy_hosts:create", thisData)
 			.then(() => {
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
-				data.domain_names.map(function (domain_name) {
+				thisData.domain_names.map((domain_name) => {
 					domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
+					return true;
 				});
 
-				return Promise.all(domain_name_check_promises)
-					.then((check_results) => {
-						check_results.map(function (result) {
-							if (result.is_taken) {
-								throw new error.ValidationError(result.hostname + ' is already in use');
-							}
-						});
+				return Promise.all(domain_name_check_promises).then((check_results) => {
+					check_results.map((result) => {
+						if (result.is_taken) {
+							throw new errs.ValidationError(`${result.hostname} is already in use`);
+						}
+						return true;
 					});
+				});
 			})
 			.then(() => {
 				// At this point the domains should have been checked
-				data.owner_user_id = access.token.getUserId(1);
-				data               = internalHost.cleanSslHstsData(data);
+				thisData.owner_user_id = access.token.getUserId(1);
+				thisData = internalHost.cleanSslHstsData(thisData);
 
 				// Fix for db field not having a default value
 				// for this optional field.
-				if (typeof data.advanced_config === 'undefined') {
-					data.advanced_config = '';
+				if (typeof thisData.advanced_config === "undefined") {
+					thisData.advanced_config = "";
 				}
 
-				return proxyHostModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
+				return proxyHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
+				if (createCertificate) {
+					return internalCertificate
+						.createQuickCertificate(access, thisData)
 						.then((cert) => {
 							// update host with cert id
 							return internalProxyHost.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
+								id: row.id,
+								certificate_id: cert.id,
 							});
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// re-fetch with cert
 				return internalProxyHost.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner', 'access_list.[clients,items]']
+					id: row.id,
+					expand: ["certificate", "owner", "access_list.[clients,items]"],
 				});
 			})
 			.then((row) => {
 				// Configure nginx
-				return internalNginx.configure(proxyHostModel, 'proxy_host', row)
-					.then(() => {
-						return row;
-					});
+				return internalNginx.configure(proxyHostModel, "proxy_host", row).then(() => {
+					return row;
+				});
 			})
 			.then((row) => {
 				// Audit log
-				data.meta = _.assign({}, data.meta || {}, row.meta);
+				thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
 
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'proxy-host',
-					object_id:   row.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "created",
+						object_type: "proxy-host",
+						object_id: row.id,
+						meta: thisData,
+					})
 					.then(() => {
 						return row;
 					});
@@ -115,100 +114,110 @@ const internalProxyHost = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data;
+		const create_certificate = thisData.certificate_id === "new";
 
 		if (create_certificate) {
-			delete data.certificate_id;
+			delete thisData.certificate_id;
 		}
 
-		return access.can('proxy_hosts:update', data.id)
+		return access
+			.can("proxy_hosts:update", thisData.id)
 			.then((/*access_data*/) => {
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
-				if (typeof data.domain_names !== 'undefined') {
-					data.domain_names.map(function (domain_name) {
-						domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'proxy', data.id));
+				if (typeof thisData.domain_names !== "undefined") {
+					thisData.domain_names.map((domain_name) => {
+						return domain_name_check_promises.push(
+							internalHost.isHostnameTaken(domain_name, "proxy", thisData.id),
+						);
 					});
 
-					return Promise.all(domain_name_check_promises)
-						.then((check_results) => {
-							check_results.map(function (result) {
-								if (result.is_taken) {
-									throw new error.ValidationError(result.hostname + ' is already in use');
-								}
-							});
+					return Promise.all(domain_name_check_promises).then((check_results) => {
+						check_results.map((result) => {
+							if (result.is_taken) {
+								throw new errs.ValidationError(`${result.hostname} is already in use`);
+							}
+							return true;
 						});
+					});
 				}
 			})
 			.then(() => {
-				return internalProxyHost.get(access, {id: data.id});
+				return internalProxyHost.get(access, { id: thisData.id });
 			})
 			.then((row) => {
-				if (row.id !== data.id) {
+				if (row.id !== thisData.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Proxy Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Proxy Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
+					);
 				}
 
 				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
+					return internalCertificate
+						.createQuickCertificate(access, {
+							domain_names: thisData.domain_names || row.domain_names,
+							meta: _.assign({}, row.meta, thisData.meta),
+						})
 						.then((cert) => {
 							// update host with cert id
-							data.certificate_id = cert.id;
+							thisData.certificate_id = cert.id;
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
+				thisData = _.assign(
+					{},
+					{
+						domain_names: row.domain_names,
+					},
+					data,
+				);
 
-				data = internalHost.cleanSslHstsData(data, row);
+				thisData = internalHost.cleanSslHstsData(thisData, row);
 
 				return proxyHostModel
 					.query()
-					.where({id: data.id})
-					.patch(data)
+					.where({ id: thisData.id })
+					.patch(thisData)
 					.then(utils.omitRow(omissions()))
 					.then((saved_row) => {
 						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        data
-						})
+						return internalAuditLog
+							.add(access, {
+								action: "updated",
+								object_type: "proxy-host",
+								object_id: row.id,
+								meta: thisData,
+							})
 							.then(() => {
 								return saved_row;
 							});
 					});
 			})
 			.then(() => {
-				return internalProxyHost.get(access, {
-					id:     data.id,
-					expand: ['owner', 'certificate', 'access_list.[clients,items]']
-				})
+				return internalProxyHost
+					.get(access, {
+						id: thisData.id,
+						expand: ["owner", "certificate", "access_list.[clients,items]"],
+					})
 					.then((row) => {
 						if (!row.enabled) {
 							// No need to add nginx config if host is disabled
 							return row;
 						}
 						// Configure nginx
-						return internalNginx.configure(proxyHostModel, 'proxy_host', row)
-							.then((new_meta) => {
-								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
-							});
+						return internalNginx.configure(proxyHostModel, "proxy_host", row).then((new_meta) => {
+							row.meta = new_meta;
+							return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
+						});
 					});
 			});
 	},
@@ -222,39 +231,38 @@ const internalProxyHost = {
 	 * @return {Promise}
 	 */
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
-		return access.can('proxy_hosts:get', data.id)
+		return access
+			.can("proxy_hosts:get", thisData.id)
 			.then((access_data) => {
-				let query = proxyHostModel
+				const query = proxyHostModel
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,access_list.[clients,items],certificate]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[owner,access_list.[clients,items],certificate]")
 					.first();
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
-				row = internalHost.cleanRowCertificateMeta(row);
+				const thisRow = internalHost.cleanRowCertificateMeta(row);
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(row, thisData.omit);
 				}
-				return row;
+				return thisRow;
 			});
 	},
 
@@ -266,35 +274,35 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 */
 	delete: (access, data) => {
-		return access.can('proxy_hosts:delete', data.id)
+		return access
+			.can("proxy_hosts:delete", data.id)
 			.then(() => {
-				return internalProxyHost.get(access, {id: data.id});
+				return internalProxyHost.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 
 				return proxyHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('proxy_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("proxy_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "deleted",
+							object_type: "proxy-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -311,39 +319,41 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 */
 	enable: (access, data) => {
-		return access.can('proxy_hosts:update', data.id)
+		return access
+			.can("proxy_hosts:update", data.id)
 			.then(() => {
 				return internalProxyHost.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner', 'access_list']
+					id: data.id,
+					expand: ["certificate", "owner", "access_list"],
 				});
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Host is already enabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (row.enabled) {
+					throw new errs.ValidationError("Host is already enabled");
 				}
 
 				row.enabled = 1;
 
 				return proxyHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 1
+						enabled: 1,
 					})
 					.then(() => {
 						// Configure nginx
-						return internalNginx.configure(proxyHostModel, 'proxy_host', row);
+						return internalNginx.configure(proxyHostModel, "proxy_host", row);
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "enabled",
+							object_type: "proxy-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -360,39 +370,40 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 */
 	disable: (access, data) => {
-		return access.can('proxy_hosts:update', data.id)
+		return access
+			.can("proxy_hosts:update", data.id)
 			.then(() => {
-				return internalProxyHost.get(access, {id: data.id});
+				return internalProxyHost.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Host is already disabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (!row.enabled) {
+					throw new errs.ValidationError("Host is already disabled");
 				}
 
 				row.enabled = 0;
 
 				return proxyHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 0
+						enabled: 0,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('proxy_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("proxy_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'proxy-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "disabled",
+							object_type: "proxy-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -410,34 +421,35 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 */
 	getAll: (access, expand, search_query) => {
-		return access.can('proxy_hosts:list')
+		return access
+			.can("proxy_hosts:list")
 			.then((access_data) => {
-				let query = proxyHostModel
+				const query = proxyHostModel
 					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,access_list,certificate]')
-					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
+					.where("is_deleted", 0)
+					.groupBy("id")
+					.allowGraph("[owner,access_list,certificate]")
+					.orderBy(castJsonIfNeed("domain_names"), "ASC");
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
 				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
+				if (typeof search_query === "string" && search_query.length > 0) {
 					query.where(function () {
-						this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
+						this.where(castJsonIfNeed("domain_names"), "like", `%${search_query}%`);
 					});
 				}
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
+				if (typeof expand !== "undefined" && expand !== null) {
+					query.withGraphFetched(`[${expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRows(omissions()));
 			})
 			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
+				if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
 					return internalHost.cleanAllRowsCertificateMeta(rows);
 				}
 
@@ -453,20 +465,16 @@ const internalProxyHost = {
 	 * @returns {Promise}
 	 */
 	getCount: (user_id, visibility) => {
-		let query = proxyHostModel
-			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+		const query = proxyHostModel.query().count("id as count").where("is_deleted", 0);
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		return query.first().then((row) => {
+			return Number.parseInt(row.count, 10);
+		});
+	},
 };
 
-module.exports = internalProxyHost;
+export default internalProxyHost;

+ 195 - 182
backend/internal/redirection-host.js

@@ -1,73 +1,73 @@
-const _                    = require('lodash');
-const error                = require('../lib/error');
-const utils                = require('../lib/utils');
-const redirectionHostModel = require('../models/redirection_host');
-const internalHost         = require('./host');
-const internalNginx        = require('./nginx');
-const internalAuditLog     = require('./audit-log');
-const internalCertificate  = require('./certificate');
-const {castJsonIfNeed}     = require('../lib/helpers');
-
-function omissions () {
-	return ['is_deleted'];
-}
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import redirectionHostModel from "../models/redirection_host.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
+
+const omissions = () => {
+	return ["is_deleted"];
+};
 
 const internalRedirectionHost = {
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
 	create: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data || {};
+		const createCertificate = thisData.certificate_id === "new";
 
-		if (create_certificate) {
-			delete data.certificate_id;
+		if (createCertificate) {
+			delete thisData.certificate_id;
 		}
 
-		return access.can('redirection_hosts:create', data)
+		return access
+			.can("redirection_hosts:create", thisData)
 			.then((/*access_data*/) => {
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
+				const domain_name_check_promises = [];
 
-				data.domain_names.map(function (domain_name) {
+				thisData.domain_names.map((domain_name) => {
 					domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
+					return true;
 				});
 
-				return Promise.all(domain_name_check_promises)
-					.then((check_results) => {
-						check_results.map(function (result) {
-							if (result.is_taken) {
-								throw new error.ValidationError(result.hostname + ' is already in use');
-							}
-						});
+				return Promise.all(domain_name_check_promises).then((check_results) => {
+					check_results.map((result) => {
+						if (result.is_taken) {
+							throw new errs.ValidationError(`${result.hostname} is already in use`);
+						}
+						return true;
 					});
+				});
 			})
 			.then(() => {
 				// At this point the domains should have been checked
-				data.owner_user_id = access.token.getUserId(1);
-				data               = internalHost.cleanSslHstsData(data);
+				thisData.owner_user_id = access.token.getUserId(1);
+				thisData = internalHost.cleanSslHstsData(thisData);
 
 				// Fix for db field not having a default value
 				// for this optional field.
-				if (typeof data.advanced_config === 'undefined') {
-					data.advanced_config = '';
+				if (typeof data.advanced_config === "undefined") {
+					data.advanced_config = "";
 				}
 
-				return redirectionHostModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
+				return redirectionHostModel.query().insertAndFetch(thisData).then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
+				if (createCertificate) {
+					return internalCertificate
+						.createQuickCertificate(access, thisData)
 						.then((cert) => {
 							// update host with cert id
 							return internalRedirectionHost.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
+								id: row.id,
+								certificate_id: cert.id,
 							});
 						})
 						.then(() => {
@@ -79,27 +79,27 @@ const internalRedirectionHost = {
 			.then((row) => {
 				// re-fetch with cert
 				return internalRedirectionHost.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner']
+					id: row.id,
+					expand: ["certificate", "owner"],
 				});
 			})
 			.then((row) => {
 				// Configure nginx
-				return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
-					.then(() => {
-						return row;
-					});
+				return internalNginx.configure(redirectionHostModel, "redirection_host", row).then(() => {
+					return row;
+				});
 			})
 			.then((row) => {
-				data.meta = _.assign({}, data.meta || {}, row.meta);
+				thisData.meta = _.assign({}, thisData.meta || {}, row.meta);
 
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'redirection-host',
-					object_id:   row.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "created",
+						object_type: "redirection-host",
+						object_id: row.id,
+						meta: thisData,
+					})
 					.then(() => {
 						return row;
 					});
@@ -113,94 +113,107 @@ const internalRedirectionHost = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		let create_certificate = data.certificate_id === 'new';
+		let thisData = data || {};
+		const createCertificate = thisData.certificate_id === "new";
 
-		if (create_certificate) {
-			delete data.certificate_id;
+		if (createCertificate) {
+			delete thisData.certificate_id;
 		}
 
-		return access.can('redirection_hosts:update', data.id)
+		return access
+			.can("redirection_hosts:update", thisData.id)
 			.then((/*access_data*/) => {
 				// Get a list of the domain names and check each of them against existing records
-				let domain_name_check_promises = [];
-
-				if (typeof data.domain_names !== 'undefined') {
-					data.domain_names.map(function (domain_name) {
-						domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'redirection', data.id));
+				const domain_name_check_promises = [];
+
+				if (typeof thisData.domain_names !== "undefined") {
+					thisData.domain_names.map((domain_name) => {
+						domain_name_check_promises.push(
+							internalHost.isHostnameTaken(domain_name, "redirection", thisData.id),
+						);
+						return true;
 					});
 
-					return Promise.all(domain_name_check_promises)
-						.then((check_results) => {
-							check_results.map(function (result) {
-								if (result.is_taken) {
-									throw new error.ValidationError(result.hostname + ' is already in use');
-								}
-							});
+					return Promise.all(domain_name_check_promises).then((check_results) => {
+						check_results.map((result) => {
+							if (result.is_taken) {
+								throw new errs.ValidationError(`${result.hostname} is already in use`);
+							}
+							return true;
 						});
+					});
 				}
 			})
 			.then(() => {
-				return internalRedirectionHost.get(access, {id: data.id});
+				return internalRedirectionHost.get(access, { id: thisData.id });
 			})
 			.then((row) => {
-				if (row.id !== data.id) {
+				if (row.id !== thisData.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Redirection Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Redirection Host could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
+					);
 				}
 
-				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
+				if (createCertificate) {
+					return internalCertificate
+						.createQuickCertificate(access, {
+							domain_names: thisData.domain_names || row.domain_names,
+							meta: _.assign({}, row.meta, thisData.meta),
+						})
 						.then((cert) => {
 							// update host with cert id
-							data.certificate_id = cert.id;
+							thisData.certificate_id = cert.id;
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
+				thisData = _.assign(
+					{},
+					{
+						domain_names: row.domain_names,
+					},
+					thisData,
+				);
 
-				data = internalHost.cleanSslHstsData(data, row);
+				thisData = internalHost.cleanSslHstsData(thisData, row);
 
 				return redirectionHostModel
 					.query()
-					.where({id: data.id})
-					.patch(data)
+					.where({ id: thisData.id })
+					.patch(thisData)
 					.then((saved_row) => {
 						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        data
-						})
+						return internalAuditLog
+							.add(access, {
+								action: "updated",
+								object_type: "redirection-host",
+								object_id: row.id,
+								meta: thisData,
+							})
 							.then(() => {
 								return _.omit(saved_row, omissions());
 							});
 					});
 			})
 			.then(() => {
-				return internalRedirectionHost.get(access, {
-					id:     data.id,
-					expand: ['owner', 'certificate']
-				})
+				return internalRedirectionHost
+					.get(access, {
+						id: thisData.id,
+						expand: ["owner", "certificate"],
+					})
 					.then((row) => {
 						// Configure nginx
-						return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
+						return internalNginx
+							.configure(redirectionHostModel, "redirection_host", row)
 							.then((new_meta) => {
 								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
+								return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
 							});
 					});
 			});
@@ -215,39 +228,39 @@ const internalRedirectionHost = {
 	 * @return {Promise}
 	 */
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
-		return access.can('redirection_hosts:get', data.id)
+		return access
+			.can("redirection_hosts:get", thisData.id)
 			.then((access_data) => {
-				let query = redirectionHostModel
+				const query = redirectionHostModel
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,certificate]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[owner,certificate]")
 					.first();
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+				let thisRow = row;
+				if (!thisRow || !thisRow.id) {
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
-				row = internalHost.cleanRowCertificateMeta(row);
+				thisRow = internalHost.cleanRowCertificateMeta(thisRow);
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(thisRow, thisData.omit);
 				}
-				return row;
+				return thisRow;
 			});
 	},
 
@@ -259,35 +272,35 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 */
 	delete: (access, data) => {
-		return access.can('redirection_hosts:delete', data.id)
+		return access
+			.can("redirection_hosts:delete", data.id)
 			.then(() => {
-				return internalRedirectionHost.get(access, {id: data.id});
+				return internalRedirectionHost.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 
 				return redirectionHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('redirection_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("redirection_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "deleted",
+							object_type: "redirection-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -304,39 +317,41 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 */
 	enable: (access, data) => {
-		return access.can('redirection_hosts:update', data.id)
+		return access
+			.can("redirection_hosts:update", data.id)
 			.then(() => {
 				return internalRedirectionHost.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner']
+					id: data.id,
+					expand: ["certificate", "owner"],
 				});
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Host is already enabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (row.enabled) {
+					throw new errs.ValidationError("Host is already enabled");
 				}
 
 				row.enabled = 1;
 
 				return redirectionHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 1
+						enabled: 1,
 					})
 					.then(() => {
 						// Configure nginx
-						return internalNginx.configure(redirectionHostModel, 'redirection_host', row);
+						return internalNginx.configure(redirectionHostModel, "redirection_host", row);
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "enabled",
+							object_type: "redirection-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -353,39 +368,40 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 */
 	disable: (access, data) => {
-		return access.can('redirection_hosts:update', data.id)
+		return access
+			.can("redirection_hosts:update", data.id)
 			.then(() => {
-				return internalRedirectionHost.get(access, {id: data.id});
+				return internalRedirectionHost.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Host is already disabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (!row.enabled) {
+					throw new errs.ValidationError("Host is already disabled");
 				}
 
 				row.enabled = 0;
 
 				return redirectionHostModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 0
+						enabled: 0,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('redirection_host', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("redirection_host", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'redirection-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "disabled",
+							object_type: "redirection-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -403,34 +419,35 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 */
 	getAll: (access, expand, search_query) => {
-		return access.can('redirection_hosts:list')
+		return access
+			.can("redirection_hosts:list")
 			.then((access_data) => {
-				let query = redirectionHostModel
+				const query = redirectionHostModel
 					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,certificate]')
-					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
+					.where("is_deleted", 0)
+					.groupBy("id")
+					.allowGraph("[owner,certificate]")
+					.orderBy(castJsonIfNeed("domain_names"), "ASC");
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
 				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
+				if (typeof search_query === "string" && search_query.length > 0) {
 					query.where(function () {
-						this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
+						this.where(castJsonIfNeed("domain_names"), "like", `%${search_query}%`);
 					});
 				}
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
+				if (typeof expand !== "undefined" && expand !== null) {
+					query.withGraphFetched(`[${expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRows(omissions()));
 			})
 			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
+				if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
 					return internalHost.cleanAllRowsCertificateMeta(rows);
 				}
 
@@ -446,20 +463,16 @@ const internalRedirectionHost = {
 	 * @returns {Promise}
 	 */
 	getCount: (user_id, visibility) => {
-		let query = redirectionHostModel
-			.query()
-			.count('id as count')
-			.where('is_deleted', 0);
+		const query = redirectionHostModel.query().count("id as count").where("is_deleted", 0);
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		return query.first().then((row) => {
+			return Number.parseInt(row.count, 10);
+		});
+	},
 };
 
-module.exports = internalRedirectionHost;
+export default internalRedirectionHost;

+ 17 - 18
backend/internal/report.js

@@ -1,38 +1,37 @@
-const internalProxyHost       = require('./proxy-host');
-const internalRedirectionHost = require('./redirection-host');
-const internalDeadHost        = require('./dead-host');
-const internalStream          = require('./stream');
+import internalDeadHost from "./dead-host.js";
+import internalProxyHost from "./proxy-host.js";
+import internalRedirectionHost from "./redirection-host.js";
+import internalStream from "./stream.js";
 
 const internalReport = {
-
 	/**
 	 * @param  {Access}   access
 	 * @return {Promise}
 	 */
 	getHostsReport: (access) => {
-		return access.can('reports:hosts', 1)
+		return access
+			.can("reports:hosts", 1)
 			.then((access_data) => {
-				let user_id = access.token.getUserId(1);
+				const userId = access.token.getUserId(1);
 
-				let promises = [
-					internalProxyHost.getCount(user_id, access_data.visibility),
-					internalRedirectionHost.getCount(user_id, access_data.visibility),
-					internalStream.getCount(user_id, access_data.visibility),
-					internalDeadHost.getCount(user_id, access_data.visibility)
+				const promises = [
+					internalProxyHost.getCount(userId, access_data.visibility),
+					internalRedirectionHost.getCount(userId, access_data.visibility),
+					internalStream.getCount(userId, access_data.visibility),
+					internalDeadHost.getCount(userId, access_data.visibility),
 				];
 
 				return Promise.all(promises);
 			})
 			.then((counts) => {
 				return {
-					proxy:       counts.shift(),
+					proxy: counts.shift(),
 					redirection: counts.shift(),
-					stream:      counts.shift(),
-					dead:        counts.shift()
+					stream: counts.shift(),
+					dead: counts.shift(),
 				};
 			});
-
-	}
+	},
 };
 
-module.exports = internalReport;
+export default internalReport;

+ 35 - 43
backend/internal/setting.js

@@ -1,10 +1,9 @@
-const fs            = require('fs');
-const error         = require('../lib/error');
-const settingModel  = require('../models/setting');
-const internalNginx = require('./nginx');
+import fs from "node:fs";
+import errs from "../lib/error.js";
+import settingModel from "../models/setting.js";
+import internalNginx from "./nginx.js";
 
 const internalSetting = {
-
 	/**
 	 * @param  {Access}  access
 	 * @param  {Object}  data
@@ -12,37 +11,38 @@ const internalSetting = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		return access.can('settings:update', data.id)
+		return access
+			.can("settings:update", data.id)
 			.then((/*access_data*/) => {
-				return internalSetting.get(access, {id: data.id});
+				return internalSetting.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (row.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Setting could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Setting could not be updated, IDs do not match: ${row.id} !== ${data.id}`,
+					);
 				}
 
-				return settingModel
-					.query()
-					.where({id: data.id})
-					.patch(data);
+				return settingModel.query().where({ id: data.id }).patch(data);
 			})
 			.then(() => {
 				return internalSetting.get(access, {
-					id: data.id
+					id: data.id,
 				});
 			})
 			.then((row) => {
-				if (row.id === 'default-site') {
+				if (row.id === "default-site") {
 					// write the html if we need to
-					if (row.value === 'html') {
-						fs.writeFileSync('/data/nginx/default_www/index.html', row.meta.html, {encoding: 'utf8'});
+					if (row.value === "html") {
+						fs.writeFileSync("/data/nginx/default_www/index.html", row.meta.html, { encoding: "utf8" });
 					}
 
 					// Configure nginx
-					return internalNginx.deleteConfig('default')
+					return internalNginx
+						.deleteConfig("default")
 						.then(() => {
-							return internalNginx.generateConfig('default', row);
+							return internalNginx.generateConfig("default", row);
 						})
 						.then(() => {
 							return internalNginx.test();
@@ -54,7 +54,8 @@ const internalSetting = {
 							return row;
 						})
 						.catch((/*err*/) => {
-							internalNginx.deleteConfig('default')
+							internalNginx
+								.deleteConfig("default")
 								.then(() => {
 									return internalNginx.test();
 								})
@@ -63,12 +64,11 @@ const internalSetting = {
 								})
 								.then(() => {
 									// I'm being slack here I know..
-									throw new error.ValidationError('Could not reconfigure Nginx. Please check logs.');
+									throw new errs.ValidationError("Could not reconfigure Nginx. Please check logs.");
 								});
 						});
-				} else {
-					return row;
 				}
+				return row;
 			});
 	},
 
@@ -79,19 +79,16 @@ const internalSetting = {
 	 * @return {Promise}
 	 */
 	get: (access, data) => {
-		return access.can('settings:get', data.id)
+		return access
+			.can("settings:get", data.id)
 			.then(() => {
-				return settingModel
-					.query()
-					.where('id', data.id)
-					.first();
+				return settingModel.query().where("id", data.id).first();
 			})
 			.then((row) => {
 				if (row) {
 					return row;
-				} else {
-					throw new error.ItemNotFoundError(data.id);
 				}
+				throw new errs.ItemNotFoundError(data.id);
 			});
 	},
 
@@ -102,15 +99,13 @@ const internalSetting = {
 	 * @returns {*}
 	 */
 	getCount: (access) => {
-		return access.can('settings:list')
+		return access
+			.can("settings:list")
 			.then(() => {
-				return settingModel
-					.query()
-					.count('id as count')
-					.first();
+				return settingModel.query().count("id as count").first();
 			})
 			.then((row) => {
-				return parseInt(row.count, 10);
+				return Number.parseInt(row.count, 10);
 			});
 	},
 
@@ -121,13 +116,10 @@ const internalSetting = {
 	 * @returns {Promise}
 	 */
 	getAll: (access) => {
-		return access.can('settings:list')
-			.then(() => {
-				return settingModel
-					.query()
-					.orderBy('description', 'ASC');
-			});
-	}
+		return access.can("settings:list").then(() => {
+			return settingModel.query().orderBy("description", "ASC");
+		});
+	},
 };
 
-module.exports = internalSetting;
+export default internalSetting;

+ 156 - 153
backend/internal/stream.js

@@ -1,88 +1,85 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const streamModel         = require('../models/stream');
-const internalNginx       = require('./nginx');
-const internalAuditLog    = require('./audit-log');
-const internalCertificate = require('./certificate');
-const internalHost        = require('./host');
-const {castJsonIfNeed}    = require('../lib/helpers');
-
-function omissions () {
-	return ['is_deleted', 'owner.is_deleted', 'certificate.is_deleted'];
-}
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { castJsonIfNeed } from "../lib/helpers.js";
+import utils from "../lib/utils.js";
+import streamModel from "../models/stream.js";
+import internalAuditLog from "./audit-log.js";
+import internalCertificate from "./certificate.js";
+import internalHost from "./host.js";
+import internalNginx from "./nginx.js";
+
+const omissions = () => {
+	return ["is_deleted", "owner.is_deleted", "certificate.is_deleted"];
+};
 
 const internalStream = {
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
 	create: (access, data) => {
-		const create_certificate = data.certificate_id === 'new';
+		const create_certificate = data.certificate_id === "new";
 
 		if (create_certificate) {
 			delete data.certificate_id;
 		}
 
-		return access.can('streams:create', data)
+		return access
+			.can("streams:create", data)
 			.then((/*access_data*/) => {
 				// TODO: At this point the existing ports should have been checked
 				data.owner_user_id = access.token.getUserId(1);
 
-				if (typeof data.meta === 'undefined') {
+				if (typeof data.meta === "undefined") {
 					data.meta = {};
 				}
 
 				// streams aren't routed by domain name so don't store domain names in the DB
-				let data_no_domains = structuredClone(data);
+				const data_no_domains = structuredClone(data);
 				delete data_no_domains.domain_names;
 
-				return streamModel
-					.query()
-					.insertAndFetch(data_no_domains)
-					.then(utils.omitRow(omissions()));
+				return streamModel.query().insertAndFetch(data_no_domains).then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
 				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, data)
+					return internalCertificate
+						.createQuickCertificate(access, data)
 						.then((cert) => {
 							// update host with cert id
 							return internalStream.update(access, {
-								id:             row.id,
-								certificate_id: cert.id
+								id: row.id,
+								certificate_id: cert.id,
 							});
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// re-fetch with cert
 				return internalStream.get(access, {
-					id:     row.id,
-					expand: ['certificate', 'owner']
+					id: row.id,
+					expand: ["certificate", "owner"],
 				});
 			})
 			.then((row) => {
 				// Configure nginx
-				return internalNginx.configure(streamModel, 'stream', row)
-					.then(() => {
-						return row;
-					});
+				return internalNginx.configure(streamModel, "stream", row).then(() => {
+					return row;
+				});
 			})
 			.then((row) => {
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'stream',
-					object_id:   row.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "created",
+						object_type: "stream",
+						object_id: row.id,
+						meta: data,
+					})
 					.then(() => {
 						return row;
 					});
@@ -96,72 +93,78 @@ const internalStream = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		const create_certificate = data.certificate_id === 'new';
+		let thisData = data;
+		const create_certificate = thisData.certificate_id === "new";
 
 		if (create_certificate) {
-			delete data.certificate_id;
+			delete thisData.certificate_id;
 		}
 
-		return access.can('streams:update', data.id)
+		return access
+			.can("streams:update", thisData.id)
 			.then((/*access_data*/) => {
 				// TODO: at this point the existing streams should have been checked
-				return internalStream.get(access, {id: data.id});
+				return internalStream.get(access, { id: thisData.id });
 			})
 			.then((row) => {
-				if (row.id !== data.id) {
+				if (row.id !== thisData.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('Stream could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`Stream could not be updated, IDs do not match: ${row.id} !== ${thisData.id}`,
+					);
 				}
 
 				if (create_certificate) {
-					return internalCertificate.createQuickCertificate(access, {
-						domain_names: data.domain_names || row.domain_names,
-						meta:         _.assign({}, row.meta, data.meta)
-					})
+					return internalCertificate
+						.createQuickCertificate(access, {
+							domain_names: thisData.domain_names || row.domain_names,
+							meta: _.assign({}, row.meta, thisData.meta),
+						})
 						.then((cert) => {
 							// update host with cert id
-							data.certificate_id = cert.id;
+							thisData.certificate_id = cert.id;
 						})
 						.then(() => {
 							return row;
 						});
-				} else {
-					return row;
 				}
+				return row;
 			})
 			.then((row) => {
 				// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
-				data = _.assign({}, {
-					domain_names: row.domain_names
-				}, data);
+				thisData = _.assign(
+					{},
+					{
+						domain_names: row.domain_names,
+					},
+					thisData,
+				);
 
 				return streamModel
 					.query()
-					.patchAndFetchById(row.id, data)
+					.patchAndFetchById(row.id, thisData)
 					.then(utils.omitRow(omissions()))
 					.then((saved_row) => {
 						// Add to audit log
-						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'stream',
-							object_id:   row.id,
-							meta:        data
-						})
+						return internalAuditLog
+							.add(access, {
+								action: "updated",
+								object_type: "stream",
+								object_id: row.id,
+								meta: thisData,
+							})
 							.then(() => {
 								return saved_row;
 							});
 					});
 			})
 			.then(() => {
-				return internalStream.get(access, {id: data.id, expand: ['owner', 'certificate']})
-					.then((row) => {
-						return internalNginx.configure(streamModel, 'stream', row)
-							.then((new_meta) => {
-								row.meta = new_meta;
-								row      = internalHost.cleanRowCertificateMeta(row);
-								return _.omit(row, omissions());
-							});
+				return internalStream.get(access, { id: thisData.id, expand: ["owner", "certificate"] }).then((row) => {
+					return internalNginx.configure(streamModel, "stream", row).then((new_meta) => {
+						row.meta = new_meta;
+						return _.omit(internalHost.cleanRowCertificateMeta(row), omissions());
 					});
+				});
 			});
 	},
 
@@ -174,39 +177,39 @@ const internalStream = {
 	 * @return {Promise}
 	 */
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
-		return access.can('streams:get', data.id)
+		return access
+			.can("streams:get", thisData.id)
 			.then((access_data) => {
-				let query = streamModel
+				const query = streamModel
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[owner,certificate]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[owner,certificate]")
 					.first();
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
-				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+				let thisRow = row;
+				if (!thisRow || !thisRow.id) {
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
-				row = internalHost.cleanRowCertificateMeta(row);
+				thisRow = internalHost.cleanRowCertificateMeta(thisRow);
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(thisRow, thisData.omit);
 				}
-				return row;
+				return thisRow;
 			});
 	},
 
@@ -218,35 +221,35 @@ const internalStream = {
 	 * @returns {Promise}
 	 */
 	delete: (access, data) => {
-		return access.can('streams:delete', data.id)
+		return access
+			.can("streams:delete", data.id)
 			.then(() => {
-				return internalStream.get(access, {id: data.id});
+				return internalStream.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 
 				return streamModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('stream', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("stream", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'stream',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "deleted",
+							object_type: "stream",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -263,39 +266,41 @@ const internalStream = {
 	 * @returns {Promise}
 	 */
 	enable: (access, data) => {
-		return access.can('streams:update', data.id)
+		return access
+			.can("streams:update", data.id)
 			.then(() => {
 				return internalStream.get(access, {
-					id:     data.id,
-					expand: ['certificate', 'owner']
+					id: data.id,
+					expand: ["certificate", "owner"],
 				});
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (row.enabled) {
-					throw new error.ValidationError('Stream is already enabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (row.enabled) {
+					throw new errs.ValidationError("Stream is already enabled");
 				}
 
 				row.enabled = 1;
 
 				return streamModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 1
+						enabled: 1,
 					})
 					.then(() => {
 						// Configure nginx
-						return internalNginx.configure(streamModel, 'stream', row);
+						return internalNginx.configure(streamModel, "stream", row);
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'enabled',
-							object_type: 'stream',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "enabled",
+							object_type: "stream",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -312,39 +317,40 @@ const internalStream = {
 	 * @returns {Promise}
 	 */
 	disable: (access, data) => {
-		return access.can('streams:update', data.id)
+		return access
+			.can("streams:update", data.id)
 			.then(() => {
-				return internalStream.get(access, {id: data.id});
+				return internalStream.get(access, { id: data.id });
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
-				} else if (!row.enabled) {
-					throw new error.ValidationError('Stream is already disabled');
+					throw new errs.ItemNotFoundError(data.id);
+				}
+				if (!row.enabled) {
+					throw new errs.ValidationError("Stream is already disabled");
 				}
 
 				row.enabled = 0;
 
 				return streamModel
 					.query()
-					.where('id', row.id)
+					.where("id", row.id)
 					.patch({
-						enabled: 0
+						enabled: 0,
 					})
 					.then(() => {
 						// Delete Nginx Config
-						return internalNginx.deleteConfig('stream', row)
-							.then(() => {
-								return internalNginx.reload();
-							});
+						return internalNginx.deleteConfig("stream", row).then(() => {
+							return internalNginx.reload();
+						});
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'disabled',
-							object_type: 'stream-host',
-							object_id:   row.id,
-							meta:        _.omit(row, omissions())
+							action: "disabled",
+							object_type: "stream-host",
+							object_id: row.id,
+							meta: _.omit(row, omissions()),
 						});
 					});
 			})
@@ -362,34 +368,35 @@ const internalStream = {
 	 * @returns {Promise}
 	 */
 	getAll: (access, expand, search_query) => {
-		return access.can('streams:list')
+		return access
+			.can("streams:list")
 			.then((access_data) => {
 				const query = streamModel
 					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[owner,certificate]')
-					.orderBy('incoming_port', 'ASC');
+					.where("is_deleted", 0)
+					.groupBy("id")
+					.allowGraph("[owner,certificate]")
+					.orderBy("incoming_port", "ASC");
 
-				if (access_data.permission_visibility !== 'all') {
-					query.andWhere('owner_user_id', access.token.getUserId(1));
+				if (access_data.permission_visibility !== "all") {
+					query.andWhere("owner_user_id", access.token.getUserId(1));
 				}
 
 				// Query is used for searching
-				if (typeof search_query === 'string' && search_query.length > 0) {
+				if (typeof search_query === "string" && search_query.length > 0) {
 					query.where(function () {
-						this.where(castJsonIfNeed('incoming_port'), 'like', `%${search_query}%`);
+						this.where(castJsonIfNeed("incoming_port"), "like", `%${search_query}%`);
 					});
 				}
 
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
+				if (typeof expand !== "undefined" && expand !== null) {
+					query.withGraphFetched(`[${expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRows(omissions()));
 			})
 			.then((rows) => {
-				if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
+				if (typeof expand !== "undefined" && expand !== null && expand.indexOf("certificate") !== -1) {
 					return internalHost.cleanAllRowsCertificateMeta(rows);
 				}
 
@@ -405,20 +412,16 @@ const internalStream = {
 	 * @returns {Promise}
 	 */
 	getCount: (user_id, visibility) => {
-		const query = streamModel
-			.query()
-			.count('id AS count')
-			.where('is_deleted', 0);
+		const query = streamModel.query().count("id AS count").where("is_deleted", 0);
 
-		if (visibility !== 'all') {
-			query.andWhere('owner_user_id', user_id);
+		if (visibility !== "all") {
+			query.andWhere("owner_user_id", user_id);
 		}
 
-		return query.first()
-			.then((row) => {
-				return parseInt(row.count, 10);
-			});
-	}
+		return query.first().then((row) => {
+			return Number.parseInt(row.count, 10);
+		});
+	},
 };
 
-module.exports = internalStream;
+export default internalStream;

+ 88 - 95
backend/internal/token.js

@@ -1,14 +1,14 @@
-const _          = require('lodash');
-const error      = require('../lib/error');
-const userModel  = require('../models/user');
-const authModel  = require('../models/auth');
-const helpers    = require('../lib/helpers');
-const TokenModel = require('../models/token');
+import _ from "lodash";
+import errs from "../lib/error.js";
+import { parseDatePeriod } from "../lib/helpers.js";
+import authModel from "../models/auth.js";
+import TokenModel from "../models/token.js";
+import userModel from "../models/user.js";
 
-const ERROR_MESSAGE_INVALID_AUTH = 'Invalid email or password';
-
-module.exports = {
+const ERROR_MESSAGE_INVALID_AUTH = "Invalid email or password";
+const ERROR_MESSAGE_INVALID_AUTH_I18N = "error.invalid-auth";
 
+export default {
 	/**
 	 * @param   {Object} data
 	 * @param   {String} data.identity
@@ -19,68 +19,65 @@ module.exports = {
 	 * @returns {Promise}
 	 */
 	getTokenFromEmail: (data, issuer) => {
-		let Token = new TokenModel();
+		const Token = TokenModel();
 
-		data.scope  = data.scope || 'user';
-		data.expiry = data.expiry || '1d';
+		data.scope = data.scope || "user";
+		data.expiry = data.expiry || "1d";
 
 		return userModel
 			.query()
-			.where('email', data.identity.toLowerCase().trim())
-			.andWhere('is_deleted', 0)
-			.andWhere('is_disabled', 0)
+			.where("email", data.identity.toLowerCase().trim())
+			.andWhere("is_deleted", 0)
+			.andWhere("is_disabled", 0)
 			.first()
 			.then((user) => {
 				if (user) {
 					// Get auth
 					return authModel
 						.query()
-						.where('user_id', '=', user.id)
-						.where('type', '=', 'password')
+						.where("user_id", "=", user.id)
+						.where("type", "=", "password")
 						.first()
 						.then((auth) => {
 							if (auth) {
-								return auth.verifyPassword(data.secret)
-									.then((valid) => {
-										if (valid) {
-
-											if (data.scope !== 'user' && _.indexOf(user.roles, data.scope) === -1) {
-												// The scope requested doesn't exist as a role against the user,
-												// you shall not pass.
-												throw new error.AuthError('Invalid scope: ' + data.scope);
-											}
-
-											// Create a moment of the expiry expression
-											let expiry = helpers.parseDatePeriod(data.expiry);
-											if (expiry === null) {
-												throw new error.AuthError('Invalid expiry time: ' + data.expiry);
-											}
+								return auth.verifyPassword(data.secret).then((valid) => {
+									if (valid) {
+										if (data.scope !== "user" && _.indexOf(user.roles, data.scope) === -1) {
+											// The scope requested doesn't exist as a role against the user,
+											// you shall not pass.
+											throw new errs.AuthError(`Invalid scope: ${data.scope}`);
+										}
 
-											return Token.create({
-												iss:   issuer || 'api',
-												attrs: {
-													id: user.id
-												},
-												scope:     [data.scope],
-												expiresIn: data.expiry
-											})
-												.then((signed) => {
-													return {
-														token:   signed.token,
-														expires: expiry.toISOString()
-													};
-												});
-										} else {
-											throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
+										// Create a moment of the expiry expression
+										const expiry = parseDatePeriod(data.expiry);
+										if (expiry === null) {
+											throw new errs.AuthError(`Invalid expiry time: ${data.expiry}`);
 										}
-									});
-							} else {
-								throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
+
+										return Token.create({
+											iss: issuer || "api",
+											attrs: {
+												id: user.id,
+											},
+											scope: [data.scope],
+											expiresIn: data.expiry,
+										}).then((signed) => {
+											return {
+												token: signed.token,
+												expires: expiry.toISOString(),
+											};
+										});
+									}
+									throw new errs.AuthError(
+										ERROR_MESSAGE_INVALID_AUTH,
+										ERROR_MESSAGE_INVALID_AUTH_I18N,
+									);
+								});
 							}
+							throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
 						});
-				} else {
-					throw new error.AuthError(ERROR_MESSAGE_INVALID_AUTH);
 				}
+				throw new errs.AuthError(ERROR_MESSAGE_INVALID_AUTH);
 			});
 	},
 
@@ -92,48 +89,45 @@ module.exports = {
 	 * @returns {Promise}
 	 */
 	getFreshToken: (access, data) => {
-		let Token = new TokenModel();
-
-		data        = data || {};
-		data.expiry = data.expiry || '1d';
+		const Token = TokenModel();
+		const thisData = data || {};
 
-		if (access && access.token.getUserId(0)) {
+		thisData.expiry = thisData.expiry || "1d";
 
+		if (access?.token.getUserId(0)) {
 			// Create a moment of the expiry expression
-			let expiry = helpers.parseDatePeriod(data.expiry);
+			const expiry = parseDatePeriod(thisData.expiry);
 			if (expiry === null) {
-				throw new error.AuthError('Invalid expiry time: ' + data.expiry);
+				throw new errs.AuthError(`Invalid expiry time: ${thisData.expiry}`);
 			}
 
-			let token_attrs = {
-				id: access.token.getUserId(0)
+			const token_attrs = {
+				id: access.token.getUserId(0),
 			};
 
 			// Only admins can request otherwise scoped tokens
-			let scope = access.token.get('scope');
-			if (data.scope && access.token.hasScope('admin')) {
-				scope = [data.scope];
+			let scope = access.token.get("scope");
+			if (thisData.scope && access.token.hasScope("admin")) {
+				scope = [thisData.scope];
 
-				if (data.scope === 'job-board' || data.scope === 'worker') {
+				if (thisData.scope === "job-board" || thisData.scope === "worker") {
 					token_attrs.id = 0;
 				}
 			}
 
 			return Token.create({
-				iss:       'api',
-				scope:     scope,
-				attrs:     token_attrs,
-				expiresIn: data.expiry
-			})
-				.then((signed) => {
-					return {
-						token:   signed.token,
-						expires: expiry.toISOString()
-					};
-				});
-		} else {
-			throw new error.AssertionFailedError('Existing token contained invalid user data');
+				iss: "api",
+				scope: scope,
+				attrs: token_attrs,
+				expiresIn: thisData.expiry,
+			}).then((signed) => {
+				return {
+					token: signed.token,
+					expires: expiry.toISOString(),
+				};
+			});
 		}
+		throw new error.AssertionFailedError("Existing token contained invalid user data");
 	},
 
 	/**
@@ -141,24 +135,23 @@ module.exports = {
 	 * @returns {Promise}
 	 */
 	getTokenFromUser: (user) => {
-		const expire = '1d';
-		const Token  = new TokenModel();
-		const expiry = helpers.parseDatePeriod(expire);
+		const expire = "1d";
+		const Token = new TokenModel();
+		const expiry = parseDatePeriod(expire);
 
 		return Token.create({
-			iss:   'api',
+			iss: "api",
 			attrs: {
-				id: user.id
+				id: user.id,
 			},
-			scope:     ['user'],
-			expiresIn: expire
-		})
-			.then((signed) => {
-				return {
-					token:   signed.token,
-					expires: expiry.toISOString(),
-					user:    user
-				};
-			});
-	}
+			scope: ["user"],
+			expiresIn: expire,
+		}).then((signed) => {
+			return {
+				token: signed.token,
+				expires: expiry.toISOString(),
+				user: user,
+			};
+		});
+	},
 };

+ 197 - 210
backend/internal/user.js

@@ -1,43 +1,40 @@
-const _                   = require('lodash');
-const error               = require('../lib/error');
-const utils               = require('../lib/utils');
-const userModel           = require('../models/user');
-const userPermissionModel = require('../models/user_permission');
-const authModel           = require('../models/auth');
-const gravatar            = require('gravatar');
-const internalToken       = require('./token');
-const internalAuditLog    = require('./audit-log');
-
-function omissions () {
-	return ['is_deleted'];
+import gravatar from "gravatar";
+import _ from "lodash";
+import errs from "../lib/error.js";
+import utils from "../lib/utils.js";
+import authModel from "../models/auth.js";
+import userModel from "../models/user.js";
+import userPermissionModel from "../models/user_permission.js";
+import internalAuditLog from "./audit-log.js";
+import internalToken from "./token.js";
+
+const omissions = () => {
+	return ["is_deleted"];
 }
 
 const internalUser = {
-
 	/**
 	 * @param   {Access}  access
 	 * @param   {Object}  data
 	 * @returns {Promise}
 	 */
 	create: (access, data) => {
-		let auth = data.auth || null;
+		const auth = data.auth || null;
 		delete data.auth;
 
-		data.avatar = data.avatar || '';
-		data.roles  = data.roles || [];
+		data.avatar = data.avatar || "";
+		data.roles = data.roles || [];
 
-		if (typeof data.is_disabled !== 'undefined') {
+		if (typeof data.is_disabled !== "undefined") {
 			data.is_disabled = data.is_disabled ? 1 : 0;
 		}
 
-		return access.can('users:create', data)
+		return access
+			.can("users:create", data)
 			.then(() => {
-				data.avatar = gravatar.url(data.email, {default: 'mm'});
+				data.avatar = gravatar.url(data.email, { default: "mm" });
 
-				return userModel
-					.query()
-					.insertAndFetch(data)
-					.then(utils.omitRow(omissions()));
+				return userModel.query().insertAndFetch(data).then(utils.omitRow(omissions()));
 			})
 			.then((user) => {
 				if (auth) {
@@ -45,45 +42,45 @@ const internalUser = {
 						.query()
 						.insert({
 							user_id: user.id,
-							type:    auth.type,
-							secret:  auth.secret,
-							meta:    {}
+							type: auth.type,
+							secret: auth.secret,
+							meta: {},
 						})
 						.then(() => {
 							return user;
 						});
-				} else {
-					return user;
 				}
+				return user;
 			})
 			.then((user) => {
 				// Create permissions row as well
-				let is_admin = data.roles.indexOf('admin') !== -1;
+				const is_admin = data.roles.indexOf("admin") !== -1;
 
 				return userPermissionModel
 					.query()
 					.insert({
-						user_id:           user.id,
-						visibility:        is_admin ? 'all' : 'user',
-						proxy_hosts:       'manage',
-						redirection_hosts: 'manage',
-						dead_hosts:        'manage',
-						streams:           'manage',
-						access_lists:      'manage',
-						certificates:      'manage'
+						user_id: user.id,
+						visibility: is_admin ? "all" : "user",
+						proxy_hosts: "manage",
+						redirection_hosts: "manage",
+						dead_hosts: "manage",
+						streams: "manage",
+						access_lists: "manage",
+						certificates: "manage",
 					})
 					.then(() => {
-						return internalUser.get(access, {id: user.id, expand: ['permissions']});
+						return internalUser.get(access, { id: user.id, expand: ["permissions"] });
 					});
 			})
 			.then((user) => {
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'created',
-					object_type: 'user',
-					object_id:   user.id,
-					meta:        user
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "created",
+						object_type: "user",
+						object_id: user.id,
+						meta: user,
+					})
 					.then(() => {
 						return user;
 					});
@@ -99,62 +96,58 @@ const internalUser = {
 	 * @return {Promise}
 	 */
 	update: (access, data) => {
-		if (typeof data.is_disabled !== 'undefined') {
+		if (typeof data.is_disabled !== "undefined") {
 			data.is_disabled = data.is_disabled ? 1 : 0;
 		}
 
-		return access.can('users:update', data.id)
+		return access
+			.can("users:update", data.id)
 			.then(() => {
-
 				// Make sure that the user being updated doesn't change their email to another user that is already using it
 				// 1. get user we want to update
-				return internalUser.get(access, {id: data.id})
-					.then((user) => {
-
-						// 2. if email is to be changed, find other users with that email
-						if (typeof data.email !== 'undefined') {
-							data.email = data.email.toLowerCase().trim();
-
-							if (user.email !== data.email) {
-								return internalUser.isEmailAvailable(data.email, data.id)
-									.then((available) => {
-										if (!available) {
-											throw new error.ValidationError('Email address already in use - ' + data.email);
-										}
-
-										return user;
-									});
-							}
+				return internalUser.get(access, { id: data.id }).then((user) => {
+					// 2. if email is to be changed, find other users with that email
+					if (typeof data.email !== "undefined") {
+						data.email = data.email.toLowerCase().trim();
+
+						if (user.email !== data.email) {
+							return internalUser.isEmailAvailable(data.email, data.id).then((available) => {
+								if (!available) {
+									throw new errs.ValidationError(`Email address already in use - ${data.email}`);
+								}
+								return user;
+							});
 						}
+					}
 
-						// No change to email:
-						return user;
-					});
+					// No change to email:
+					return user;
+				});
 			})
 			.then((user) => {
 				if (user.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
+					);
 				}
 
-				data.avatar = gravatar.url(data.email || user.email, {default: 'mm'});
+				data.avatar = gravatar.url(data.email || user.email, { default: "mm" });
 
-				return userModel
-					.query()
-					.patchAndFetchById(user.id, data)
-					.then(utils.omitRow(omissions()));
+				return userModel.query().patchAndFetchById(user.id, data).then(utils.omitRow(omissions()));
 			})
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			.then((user) => {
 				// Add to audit log
-				return internalAuditLog.add(access, {
-					action:      'updated',
-					object_type: 'user',
-					object_id:   user.id,
-					meta:        data
-				})
+				return internalAuditLog
+					.add(access, {
+						action: "updated",
+						object_type: "user",
+						object_id: user.id,
+						meta: data,
+					})
 					.then(() => {
 						return user;
 					});
@@ -170,36 +163,35 @@ const internalUser = {
 	 * @return {Promise}
 	 */
 	get: (access, data) => {
-		if (typeof data === 'undefined') {
-			data = {};
-		}
+		const thisData = data || {};
 
-		if (typeof data.id === 'undefined' || !data.id) {
-			data.id = access.token.getUserId(0);
+		if (typeof thisData.id === "undefined" || !thisData.id) {
+			thisData.id = access.token.getUserId(0);
 		}
 
-		return access.can('users:get', data.id)
+		return access
+			.can("users:get", thisData.id)
 			.then(() => {
-				let query = userModel
+				const query = userModel
 					.query()
-					.where('is_deleted', 0)
-					.andWhere('id', data.id)
-					.allowGraph('[permissions]')
+					.where("is_deleted", 0)
+					.andWhere("id", thisData.id)
+					.allowGraph("[permissions]")
 					.first();
 
-				if (typeof data.expand !== 'undefined' && data.expand !== null) {
-					query.withGraphFetched('[' + data.expand.join(', ') + ']');
+				if (typeof thisData.expand !== "undefined" && thisData.expand !== null) {
+					query.withGraphFetched(`[${thisData.expand.join(", ")}]`);
 				}
 
 				return query.then(utils.omitRow(omissions()));
 			})
 			.then((row) => {
 				if (!row || !row.id) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(thisData.id);
 				}
 				// Custom omissions
-				if (typeof data.omit !== 'undefined' && data.omit !== null) {
-					row = _.omit(row, data.omit);
+				if (typeof thisData.omit !== "undefined" && thisData.omit !== null) {
+					return _.omit(row, thisData.omit);
 				}
 				return row;
 			});
@@ -213,20 +205,15 @@ const internalUser = {
 	 * @param user_id
 	 */
 	isEmailAvailable: (email, user_id) => {
-		let query = userModel
-			.query()
-			.where('email', '=', email.toLowerCase().trim())
-			.where('is_deleted', 0)
-			.first();
-
-		if (typeof user_id !== 'undefined') {
-			query.where('id', '!=', user_id);
+		const query = userModel.query().where("email", "=", email.toLowerCase().trim()).where("is_deleted", 0).first();
+
+		if (typeof user_id !== "undefined") {
+			query.where("id", "!=", user_id);
 		}
 
-		return query
-			.then((user) => {
-				return !user;
-			});
+		return query.then((user) => {
+			return !user;
+		});
 	},
 
 	/**
@@ -237,33 +224,34 @@ const internalUser = {
 	 * @returns {Promise}
 	 */
 	delete: (access, data) => {
-		return access.can('users:delete', data.id)
+		return access
+			.can("users:delete", data.id)
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			.then((user) => {
 				if (!user) {
-					throw new error.ItemNotFoundError(data.id);
+					throw new errs.ItemNotFoundError(data.id);
 				}
 
 				// Make sure user can't delete themselves
 				if (user.id === access.token.getUserId(0)) {
-					throw new error.PermissionError('You cannot delete yourself.');
+					throw new errs.PermissionError("You cannot delete yourself.");
 				}
 
 				return userModel
 					.query()
-					.where('id', user.id)
+					.where("id", user.id)
 					.patch({
-						is_deleted: 1
+						is_deleted: 1,
 					})
 					.then(() => {
 						// Add to audit log
 						return internalAuditLog.add(access, {
-							action:      'deleted',
-							object_type: 'user',
-							object_id:   user.id,
-							meta:        _.omit(user, omissions())
+							action: "deleted",
+							object_type: "user",
+							object_id: user.id,
+							meta: _.omit(user, omissions()),
 						});
 					});
 			})
@@ -280,26 +268,26 @@ const internalUser = {
 	 * @returns {*}
 	 */
 	getCount: (access, search_query) => {
-		return access.can('users:list')
+		return access
+			.can("users:list")
 			.then(() => {
-				let query = userModel
-					.query()
-					.count('id as count')
-					.where('is_deleted', 0)
-					.first();
+				const query = userModel.query().count("id as count").where("is_deleted", 0).first();
 
 				// Query is used for searching
-				if (typeof search_query === 'string') {
+				if (typeof search_query === "string") {
 					query.where(function () {
-						this.where('user.name', 'like', '%' + search_query + '%')
-							.orWhere('user.email', 'like', '%' + search_query + '%');
+						this.where("user.name", "like", `%${search_query}%`).orWhere(
+							"user.email",
+							"like",
+							`%${search_query}%`,
+						);
 					});
 				}
 
 				return query;
 			})
 			.then((row) => {
-				return parseInt(row.count, 10);
+				return Number.parseInt(row.count, 10);
 			});
 	},
 
@@ -312,29 +300,31 @@ const internalUser = {
 	 * @returns {Promise}
 	 */
 	getAll: (access, expand, search_query) => {
-		return access.can('users:list')
-			.then(() => {
-				let query = userModel
-					.query()
-					.where('is_deleted', 0)
-					.groupBy('id')
-					.allowGraph('[permissions]')
-					.orderBy('name', 'ASC');
-
-				// Query is used for searching
-				if (typeof search_query === 'string') {
-					query.where(function () {
-						this.where('name', 'like', '%' + search_query + '%')
-							.orWhere('email', 'like', '%' + search_query + '%');
-					});
-				}
-
-				if (typeof expand !== 'undefined' && expand !== null) {
-					query.withGraphFetched('[' + expand.join(', ') + ']');
-				}
-
-				return query.then(utils.omitRows(omissions()));
-			});
+		return access.can("users:list").then(() => {
+			const query = userModel
+				.query()
+				.where("is_deleted", 0)
+				.groupBy("id")
+				.allowGraph("[permissions]")
+				.orderBy("name", "ASC");
+
+			// Query is used for searching
+			if (typeof search_query === "string") {
+				query.where(function () {
+					this.where("name", "like", `%${search_query}%`).orWhere(
+						"email",
+						"like",
+						`%${search_query}%`,
+					);
+				});
+			}
+
+			if (typeof expand !== "undefined" && expand !== null) {
+				query.withGraphFetched(`[${expand.join(", ")}]`);
+			}
+
+			return query.then(utils.omitRows(omissions()));
+		});
 	},
 
 	/**
@@ -345,8 +335,8 @@ const internalUser = {
 	getUserOmisionsByAccess: (access, id_requested) => {
 		let response = []; // Admin response
 
-		if (!access.token.hasScope('admin') && access.token.getUserId(0) !== id_requested) {
-			response = ['roles', 'is_deleted']; // Restricted response
+		if (!access.token.hasScope("admin") && access.token.getUserId(0) !== id_requested) {
+			response = ["roles", "is_deleted"]; // Restricted response
 		}
 
 		return response;
@@ -361,26 +351,30 @@ const internalUser = {
 	 * @return {Promise}
 	 */
 	setPassword: (access, data) => {
-		return access.can('users:password', data.id)
+		return access
+			.can("users:password", data.id)
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			.then((user) => {
 				if (user.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
+					);
 				}
 
 				if (user.id === access.token.getUserId(0)) {
 					// they're setting their own password. Make sure their current password is correct
-					if (typeof data.current === 'undefined' || !data.current) {
-						throw new error.ValidationError('Current password was not supplied');
+					if (typeof data.current === "undefined" || !data.current) {
+						throw new errs.ValidationError("Current password was not supplied");
 					}
 
-					return internalToken.getTokenFromEmail({
-						identity: user.email,
-						secret:   data.current
-					})
+					return internalToken
+						.getTokenFromEmail({
+							identity: user.email,
+							secret: data.current,
+						})
 						.then(() => {
 							return user;
 						});
@@ -392,43 +386,36 @@ const internalUser = {
 				// Get auth, patch if it exists
 				return authModel
 					.query()
-					.where('user_id', user.id)
-					.andWhere('type', data.type)
+					.where("user_id", user.id)
+					.andWhere("type", data.type)
 					.first()
 					.then((existing_auth) => {
 						if (existing_auth) {
 							// patch
-							return authModel
-								.query()
-								.where('user_id', user.id)
-								.andWhere('type', data.type)
-								.patch({
-									type:   data.type, // This is required for the model to encrypt on save
-									secret: data.secret
-								});
-						} else {
-							// insert
-							return authModel
-								.query()
-								.insert({
-									user_id: user.id,
-									type:    data.type,
-									secret:  data.secret,
-									meta:    {}
-								});
+							return authModel.query().where("user_id", user.id).andWhere("type", data.type).patch({
+								type: data.type, // This is required for the model to encrypt on save
+								secret: data.secret,
+							});
 						}
+						// insert
+						return authModel.query().insert({
+							user_id: user.id,
+							type: data.type,
+							secret: data.secret,
+							meta: {},
+						});
 					})
 					.then(() => {
 						// Add to Audit Log
 						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'user',
-							object_id:   user.id,
-							meta:        {
-								name:             user.name,
+							action: "updated",
+							object_type: "user",
+							object_id: user.id,
+							meta: {
+								name: user.name,
 								password_changed: true,
-								auth_type:        data.type
-							}
+								auth_type: data.type,
+							},
 						});
 					});
 			})
@@ -443,14 +430,17 @@ const internalUser = {
 	 * @return {Promise}
 	 */
 	setPermissions: (access, data) => {
-		return access.can('users:permissions', data.id)
+		return access
+			.can("users:permissions", data.id)
 			.then(() => {
-				return internalUser.get(access, {id: data.id});
+				return internalUser.get(access, { id: data.id });
 			})
 			.then((user) => {
 				if (user.id !== data.id) {
 					// Sanity check that something crazy hasn't happened
-					throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
+					throw new errs.InternalValidationError(
+						`User could not be updated, IDs do not match: ${user.id} !== ${data.id}`,
+					);
 				}
 
 				return user;
@@ -459,34 +449,30 @@ const internalUser = {
 				// Get perms row, patch if it exists
 				return userPermissionModel
 					.query()
-					.where('user_id', user.id)
+					.where("user_id", user.id)
 					.first()
 					.then((existing_auth) => {
 						if (existing_auth) {
 							// patch
 							return userPermissionModel
 								.query()
-								.where('user_id', user.id)
-								.patchAndFetchById(existing_auth.id, _.assign({user_id: user.id}, data));
-						} else {
-							// insert
-							return userPermissionModel
-								.query()
-								.insertAndFetch(_.assign({user_id: user.id}, data));
+								.where("user_id", user.id)
+								.patchAndFetchById(existing_auth.id, _.assign({ user_id: user.id }, data));
 						}
+						// insert
+						return userPermissionModel.query().insertAndFetch(_.assign({ user_id: user.id }, data));
 					})
 					.then((permissions) => {
 						// Add to Audit Log
 						return internalAuditLog.add(access, {
-							action:      'updated',
-							object_type: 'user',
-							object_id:   user.id,
-							meta:        {
-								name:        user.name,
-								permissions: permissions
-							}
+							action: "updated",
+							object_type: "user",
+							object_id: user.id,
+							meta: {
+								name: user.name,
+								permissions: permissions,
+							},
 						});
-
 					});
 			})
 			.then(() => {
@@ -500,14 +486,15 @@ const internalUser = {
 	 * @param {Integer}  data.id
 	 */
 	loginAs: (access, data) => {
-		return access.can('users:loginas', data.id)
+		return access
+			.can("users:loginas", data.id)
 			.then(() => {
 				return internalUser.get(access, data);
 			})
 			.then((user) => {
 				return internalToken.getTokenFromUser(user);
 			});
-	}
+	},
 };
 
-module.exports = internalUser;
+export default internalUser;

+ 164 - 167
backend/lib/access.js

@@ -4,28 +4,32 @@
  * "scope" in this file means "where did this token come from and what is using it", so 99% of the time
  * the "scope" is going to be "user" because it would be a user token. This is not to be confused with
  * the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else.
- *
- *
  */
 
-const _              = require('lodash');
-const logger         = require('../logger').access;
-const Ajv            = require('ajv/dist/2020');
-const error          = require('./error');
-const userModel      = require('../models/user');
-const proxyHostModel = require('../models/proxy_host');
-const TokenModel     = require('../models/token');
-const roleSchema     = require('./access/roles.json');
-const permsSchema    = require('./access/permissions.json');
+import fs from "node:fs";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import Ajv from "ajv/dist/2020.js";
+import _ from "lodash";
+import { access as logger } from "../logger.js";
+import proxyHostModel from "../models/proxy_host.js";
+import TokenModel from "../models/token.js";
+import userModel from "../models/user.js";
+import permsSchema from "./access/permissions.json" with { type: "json" };
+import roleSchema from "./access/roles.json" with { type: "json" };
+import errs from "./error.js";
 
-module.exports = function (token_string) {
-	let Token                 = new TokenModel();
-	let token_data            = null;
-	let initialised           = false;
-	let object_cache          = {};
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
+
+export default function (token_string) {
+	const Token = TokenModel();
+	let token_data = null;
+	let initialised = false;
+	const object_cache = {};
 	let allow_internal_access = false;
-	let user_roles            = [];
-	let permissions           = {};
+	let user_roles = [];
+	let permissions = {};
 
 	/**
 	 * Loads the Token object from the token string
@@ -37,10 +41,10 @@ module.exports = function (token_string) {
 			if (initialised) {
 				resolve();
 			} else if (!token_string) {
-				reject(new error.PermissionError('Permission Denied'));
+				reject(new errs.PermissionError("Permission Denied"));
 			} else {
-				resolve(Token.load(token_string)
-					.then((data) => {
+				resolve(
+					Token.load(token_string).then((data) => {
 						token_data = data;
 
 						// At this point we need to load the user from the DB and make sure they:
@@ -48,21 +52,25 @@ module.exports = function (token_string) {
 						// - still have the appropriate scopes for this token
 						// This is only required when the User ID is supplied or if the token scope has `user`
 
-						if (token_data.attrs.id || (typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'user') !== -1)) {
+						if (
+							token_data.attrs.id ||
+							(typeof token_data.scope !== "undefined" &&
+								_.indexOf(token_data.scope, "user") !== -1)
+						) {
 							// Has token user id or token user scope
 							return userModel
 								.query()
-								.where('id', token_data.attrs.id)
-								.andWhere('is_deleted', 0)
-								.andWhere('is_disabled', 0)
-								.allowGraph('[permissions]')
-								.withGraphFetched('[permissions]')
+								.where("id", token_data.attrs.id)
+								.andWhere("is_deleted", 0)
+								.andWhere("is_disabled", 0)
+								.allowGraph("[permissions]")
+								.withGraphFetched("[permissions]")
 								.first()
 								.then((user) => {
 									if (user) {
 										// make sure user has all scopes of the token
 										// The `user` role is not added against the user row, so we have to just add it here to get past this check.
-										user.roles.push('user');
+										user.roles.push("user");
 
 										let is_ok = true;
 										_.forEach(token_data.scope, (scope_item) => {
@@ -72,21 +80,19 @@ module.exports = function (token_string) {
 										});
 
 										if (!is_ok) {
-											throw new error.AuthError('Invalid token scope for User');
-										} else {
-											initialised = true;
-											user_roles  = user.roles;
-											permissions = user.permissions;
+											throw new errs.AuthError("Invalid token scope for User");
 										}
-
+										initialised = true;
+										user_roles = user.roles;
+										permissions = user.permissions;
 									} else {
-										throw new error.AuthError('User cannot be loaded for Token');
+										throw new errs.AuthError("User cannot be loaded for Token");
 									}
 								});
-						} else {
-							initialised = true;
 						}
-					}));
+						initialised = true;
+					}),
+				);
 			}
 		});
 	};
@@ -101,53 +107,55 @@ module.exports = function (token_string) {
 	 */
 	this.loadObjects = (object_type) => {
 		return new Promise((resolve, reject) => {
-			if (Token.hasScope('user')) {
-				if (typeof token_data.attrs.id === 'undefined' || !token_data.attrs.id) {
-					reject(new error.AuthError('User Token supplied without a User ID'));
+			if (Token.hasScope("user")) {
+				if (
+					typeof token_data.attrs.id === "undefined" ||
+					!token_data.attrs.id
+				) {
+					reject(new errs.AuthError("User Token supplied without a User ID"));
 				} else {
-					let token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
+					const token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
 					let query;
 
-					if (typeof object_cache[object_type] === 'undefined') {
+					if (typeof object_cache[object_type] === "undefined") {
 						switch (object_type) {
-
-						// USERS - should only return yourself
-						case 'users':
-							resolve(token_user_id ? [token_user_id] : []);
-							break;
+							// USERS - should only return yourself
+							case "users":
+								resolve(token_user_id ? [token_user_id] : []);
+								break;
 
 							// Proxy Hosts
-						case 'proxy_hosts':
-							query = proxyHostModel
-								.query()
-								.select('id')
-								.andWhere('is_deleted', 0);
+							case "proxy_hosts":
+								query = proxyHostModel
+									.query()
+									.select("id")
+									.andWhere("is_deleted", 0);
 
-							if (permissions.visibility === 'user') {
-								query.andWhere('owner_user_id', token_user_id);
-							}
+								if (permissions.visibility === "user") {
+									query.andWhere("owner_user_id", token_user_id);
+								}
 
-							resolve(query
-								.then((rows) => {
-									let result = [];
-									_.forEach(rows, (rule_row) => {
-										result.push(rule_row.id);
-									});
+								resolve(
+									query.then((rows) => {
+										const result = [];
+										_.forEach(rows, (rule_row) => {
+											result.push(rule_row.id);
+										});
 
-									// enum should not have less than 1 item
-									if (!result.length) {
-										result.push(0);
-									}
+										// enum should not have less than 1 item
+										if (!result.length) {
+											result.push(0);
+										}
 
-									return result;
-								})
-							);
-							break;
+										return result;
+									}),
+								);
+								break;
 
 							// DEFAULT: null
-						default:
-							resolve(null);
-							break;
+							default:
+								resolve(null);
+								break;
 						}
 					} else {
 						resolve(object_cache[object_type]);
@@ -156,11 +164,10 @@ module.exports = function (token_string) {
 			} else {
 				resolve(null);
 			}
-		})
-			.then((objects) => {
-				object_cache[object_type] = objects;
-				return objects;
-			});
+		}).then((objects) => {
+			object_cache[object_type] = objects;
+			return objects;
+		});
 	};
 
 	/**
@@ -170,50 +177,48 @@ module.exports = function (token_string) {
 	 * @returns {Object}
 	 */
 	this.getObjectSchema = (permission_label) => {
-		let base_object_type = permission_label.split(':').shift();
+		const base_object_type = permission_label.split(":").shift();
 
-		let schema = {
-			$id:                  'objects',
-			description:          'Actor Properties',
-			type:                 'object',
+		const schema = {
+			$id: "objects",
+			description: "Actor Properties",
+			type: "object",
 			additionalProperties: false,
-			properties:           {
+			properties: {
 				user_id: {
 					anyOf: [
 						{
-							type: 'number',
-							enum: [Token.get('attrs').id]
-						}
-					]
+							type: "number",
+							enum: [Token.get("attrs").id],
+						},
+					],
 				},
 				scope: {
-					type:    'string',
-					pattern: '^' + Token.get('scope') + '$'
-				}
-			}
+					type: "string",
+					pattern: `^${Token.get("scope")}$`,
+				},
+			},
 		};
 
-		return this.loadObjects(base_object_type)
-			.then((object_result) => {
-				if (typeof object_result === 'object' && object_result !== null) {
-					schema.properties[base_object_type] = {
-						type:    'number',
-						enum:    object_result,
-						minimum: 1
-					};
-				} else {
-					schema.properties[base_object_type] = {
-						type:    'number',
-						minimum: 1
-					};
-				}
+		return this.loadObjects(base_object_type).then((object_result) => {
+			if (typeof object_result === "object" && object_result !== null) {
+				schema.properties[base_object_type] = {
+					type: "number",
+					enum: object_result,
+					minimum: 1,
+				};
+			} else {
+				schema.properties[base_object_type] = {
+					type: "number",
+					minimum: 1,
+				};
+			}
 
-				return schema;
-			});
+			return schema;
+		});
 	};
 
 	return {
-
 		token: Token,
 
 		/**
@@ -222,7 +227,7 @@ module.exports = function (token_string) {
 		 * @returns {Promise}
 		 */
 		load: (allow_internal) => {
-			return new Promise(function (resolve/*, reject*/) {
+			return new Promise((resolve /*, reject*/) => {
 				if (token_string) {
 					resolve(Token.load(token_string));
 				} else {
@@ -240,68 +245,60 @@ module.exports = function (token_string) {
 		 * @param {*}       [data]
 		 * @returns {Promise}
 		 */
-		can: (permission, data) => {
+		can: async (permission, data) => {
 			if (allow_internal_access === true) {
-				return Promise.resolve(true);
-				//return true;
-			} else {
-				return this.init()
-					.then(() => {
-						// Initialised, token decoded ok
-						return this.getObjectSchema(permission)
-							.then((objectSchema) => {
-								const data_schema = {
-									[permission]: {
-										data:                         data,
-										scope:                        Token.get('scope'),
-										roles:                        user_roles,
-										permission_visibility:        permissions.visibility,
-										permission_proxy_hosts:       permissions.proxy_hosts,
-										permission_redirection_hosts: permissions.redirection_hosts,
-										permission_dead_hosts:        permissions.dead_hosts,
-										permission_streams:           permissions.streams,
-										permission_access_lists:      permissions.access_lists,
-										permission_certificates:      permissions.certificates
-									}
-								};
+				return true;
+			}
 
-								let permissionSchema = {
-									$async:               true,
-									$id:                  'permissions',
-									type:                 'object',
-									additionalProperties: false,
-									properties:           {}
-								};
+			try {
+				await this.init();
+				const objectSchema = await this.getObjectSchema(permission);
 
-								permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json');
+				const dataSchema = {
+					[permission]: {
+						data: data,
+						scope: Token.get("scope"),
+						roles: user_roles,
+						permission_visibility: permissions.visibility,
+						permission_proxy_hosts: permissions.proxy_hosts,
+						permission_redirection_hosts: permissions.redirection_hosts,
+						permission_dead_hosts: permissions.dead_hosts,
+						permission_streams: permissions.streams,
+						permission_access_lists: permissions.access_lists,
+						permission_certificates: permissions.certificates,
+					},
+				};
 
-								const ajv = new Ajv({
-									verbose:      true,
-									allErrors:    true,
-									breakOnError: true,
-									coerceTypes:  true,
-									schemas:      [
-										roleSchema,
-										permsSchema,
-										objectSchema,
-										permissionSchema
-									]
-								});
+				const permissionSchema = {
+					$async: true,
+					$id: "permissions",
+					type: "object",
+					additionalProperties: false,
+					properties: {},
+				};
+
+				const rawData = fs.readFileSync(
+					`${__dirname}/access/${permission.replace(/:/gim, "-")}.json`,
+					{ encoding: "utf8" },
+				);
+				permissionSchema.properties[permission] = JSON.parse(rawData);
 
-								return ajv.validate('permissions', data_schema)
-									.then(() => {
-										return data_schema[permission];
-									});
-							});
-					})
-					.catch((err) => {
-						err.permission      = permission;
-						err.permission_data = data;
-						logger.error(permission, data, err.message);
+				const ajv = new Ajv({
+					verbose: true,
+					allErrors: true,
+					breakOnError: true,
+					coerceTypes: true,
+					schemas: [roleSchema, permsSchema, objectSchema, permissionSchema],
+				});
 
-						throw new error.PermissionError('Permission Denied', err);
-					});
+				const valid = ajv.validate("permissions", dataSchema);
+				return valid && dataSchema[permission];
+			} catch (err) {
+				err.permission = permission;
+				err.permission_data = data;
+				logger.error(permission, data, err.message);
+				throw errs.PermissionError("Permission Denied", err);
 			}
-		}
+		},
 	};
-};
+}

+ 76 - 74
backend/lib/certbot.js

@@ -1,85 +1,87 @@
-const dnsPlugins = require('../global/certbot-dns-plugins.json');
-const utils      = require('./utils');
-const error      = require('./error');
-const logger     = require('../logger').certbot;
-const batchflow  = require('batchflow');
+import batchflow from "batchflow";
+import dnsPlugins from "../global/certbot-dns-plugins.json" with { type: "json" };
+import { certbot as logger } from "../logger.js";
+import errs from "./error.js";
+import utils from "./utils.js";
 
-const CERTBOT_VERSION_REPLACEMENT = '$(certbot --version | grep -Eo \'[0-9](\\.[0-9]+)+\')';
+const CERTBOT_VERSION_REPLACEMENT = "$(certbot --version | grep -Eo '[0-9](\\.[0-9]+)+')";
 
-const certbot = {
+/**
+ * @param {array} pluginKeys
+ */
+const installPlugins = async (pluginKeys) => {
+	let hasErrors = false;
 
-	/**
-	 * @param {array} pluginKeys
-	 */
-	installPlugins: async (pluginKeys) => {
-		let hasErrors = false;
-
-		return new Promise((resolve, reject) => {
-			if (pluginKeys.length === 0) {
-				resolve();
-				return;
-			}
+	return new Promise((resolve, reject) => {
+		if (pluginKeys.length === 0) {
+			resolve();
+			return;
+		}
 
-			batchflow(pluginKeys).sequential()
-				.each((_i, pluginKey, next) => {
-					certbot.installPlugin(pluginKey)
-						.then(() => {
-							next();
-						})
-						.catch((err) => {
-							hasErrors = true;
-							next(err);
-						});
-				})
-				.error((err) => {
-					logger.error(err.message);
-				})
-				.end(() => {
-					if (hasErrors) {
-						reject(new error.CommandError('Some plugins failed to install. Please check the logs above', 1));
-					} else {
-						resolve();
-					}
-				});
-		});
-	},
+		batchflow(pluginKeys)
+			.sequential()
+			.each((_i, pluginKey, next) => {
+				certbot
+					.installPlugin(pluginKey)
+					.then(() => {
+						next();
+					})
+					.catch((err) => {
+						hasErrors = true;
+						next(err);
+					});
+			})
+			.error((err) => {
+				logger.error(err.message);
+			})
+			.end(() => {
+				if (hasErrors) {
+					reject(
+						new errs.CommandError("Some plugins failed to install. Please check the logs above", 1),
+					);
+				} else {
+					resolve();
+				}
+			});
+	});
+};
 
-	/**
-	 * Installs a cerbot plugin given the key for the object from
-	 * ../global/certbot-dns-plugins.json
-	 *
-	 * @param   {string}  pluginKey
-	 * @returns {Object}
-	 */
-	installPlugin: async (pluginKey) => {
-		if (typeof dnsPlugins[pluginKey] === 'undefined') {
-			// throw Error(`Certbot plugin ${pluginKey} not found`);
-			throw new error.ItemNotFoundError(pluginKey);
-		}
+/**
+ * Installs a cerbot plugin given the key for the object from
+ * ../global/certbot-dns-plugins.json
+ *
+ * @param   {string}  pluginKey
+ * @returns {Object}
+ */
+const installPlugin = async (pluginKey) => {
+	if (typeof dnsPlugins[pluginKey] === "undefined") {
+		// throw Error(`Certbot plugin ${pluginKey} not found`);
+		throw new errs.ItemNotFoundError(pluginKey);
+	}
 
-		const plugin = dnsPlugins[pluginKey];
-		logger.start(`Installing ${pluginKey}...`);
+	const plugin = dnsPlugins[pluginKey];
+	logger.start(`Installing ${pluginKey}...`);
 
-		plugin.version      = plugin.version.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
-		plugin.dependencies = plugin.dependencies.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
+	plugin.version = plugin.version.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
+	plugin.dependencies = plugin.dependencies.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
 
-		// SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly
-		// in new versions of Python
-		let env = Object.assign({}, process.env, {SETUPTOOLS_USE_DISTUTILS: 'stdlib'});
-		if (typeof plugin.env === 'object') {
-			env = Object.assign(env, plugin.env);
-		}
+	// SETUPTOOLS_USE_DISTUTILS is required for certbot plugins to install correctly
+	// in new versions of Python
+	let env = Object.assign({}, process.env, { SETUPTOOLS_USE_DISTUTILS: "stdlib" });
+	if (typeof plugin.env === "object") {
+		env = Object.assign(env, plugin.env);
+	}
 
-		const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version}  && deactivate`;
-		return utils.exec(cmd, {env})
-			.then((result) => {
-				logger.complete(`Installed ${pluginKey}`);
-				return result;
-			})
-			.catch((err) => {
-				throw err;
-			});
-	},
+	const cmd = `. /opt/certbot/bin/activate && pip install --no-cache-dir ${plugin.dependencies} ${plugin.package_name}${plugin.version}  && deactivate`;
+	return utils
+		.exec(cmd, { env })
+		.then((result) => {
+			logger.complete(`Installed ${pluginKey}`);
+			return result;
+		})
+		.catch((err) => {
+			throw err;
+		});
 };
 
-module.exports = certbot;
+export { installPlugins, installPlugin };

+ 144 - 144
backend/lib/config.js

@@ -1,6 +1,6 @@
-const fs      = require('fs');
-const NodeRSA = require('node-rsa');
-const logger  = require('../logger').global;
+import fs from "node:fs";
+import NodeRSA from "node-rsa";
+import { global as logger } from "../logger.js";
 
 const keysFile         = '/data/keys.json';
 const mysqlEngine      = 'mysql2';
@@ -12,18 +12,20 @@ let instance = null;
 // 1. Load from config file first (not recommended anymore)
 // 2. Use config env variables next
 const configure = () => {
-	const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json';
+	const filename = `${process.env.NODE_CONFIG_DIR || "./config"}/${process.env.NODE_ENV || "default"}.json`;
 	if (fs.existsSync(filename)) {
 		let configData;
 		try {
-			configData = require(filename);
+			// Load this json  synchronously
+			const rawData = fs.readFileSync(filename);
+			configData = JSON.parse(rawData);
 		} catch (_) {
 			// do nothing
 		}
 
-		if (configData && configData.database) {
+		if (configData?.database) {
 			logger.info(`Using configuration from file: ${filename}`);
-			instance      = configData;
+			instance = configData;
 			instance.keys = getKeys();
 			return;
 		}
@@ -34,15 +36,15 @@ const configure = () => {
 	const envMysqlName = process.env.DB_MYSQL_NAME || null;
 	if (envMysqlHost && envMysqlUser && envMysqlName) {
 		// we have enough mysql creds to go with mysql
-		logger.info('Using MySQL configuration');
+		logger.info("Using MySQL configuration");
 		instance = {
 			database: {
-				engine:   mysqlEngine,
-				host:     envMysqlHost,
-				port:     process.env.DB_MYSQL_PORT || 3306,
-				user:     envMysqlUser,
+				engine: mysqlEngine,
+				host: envMysqlHost,
+				port: process.env.DB_MYSQL_PORT || 3306,
+				user: envMysqlUser,
 				password: process.env.DB_MYSQL_PASSWORD,
-				name:     envMysqlName,
+				name: envMysqlName,
 			},
 			keys: getKeys(),
 		};
@@ -54,33 +56,33 @@ const configure = () => {
 	const envPostgresName = process.env.DB_POSTGRES_NAME || null;
 	if (envPostgresHost && envPostgresUser && envPostgresName) {
 		// we have enough postgres creds to go with postgres
-		logger.info('Using Postgres configuration');
+		logger.info("Using Postgres configuration");
 		instance = {
 			database: {
-				engine:   postgresEngine,
-				host:     envPostgresHost,
-				port:     process.env.DB_POSTGRES_PORT || 5432,
-				user:     envPostgresUser,
+				engine: postgresEngine,
+				host: envPostgresHost,
+				port: process.env.DB_POSTGRES_PORT || 5432,
+				user: envPostgresUser,
 				password: process.env.DB_POSTGRES_PASSWORD,
-				name:     envPostgresName,
+				name: envPostgresName,
 			},
 			keys: getKeys(),
 		};
 		return;
 	}
 
-	const envSqliteFile = process.env.DB_SQLITE_FILE || '/data/database.sqlite';
+	const envSqliteFile = process.env.DB_SQLITE_FILE || "/data/database.sqlite";
 	logger.info(`Using Sqlite: ${envSqliteFile}`);
 	instance = {
 		database: {
-			engine: 'knex-native',
-			knex:   {
-				client:     sqliteClientName,
+			engine: "knex-native",
+			knex: {
+				client: sqliteClientName,
 				connection: {
-					filename: envSqliteFile
+					filename: envSqliteFile,
 				},
-				useNullAsDefault: true
-			}
+				useNullAsDefault: true,
+			},
 		},
 		keys: getKeys(),
 	};
@@ -88,150 +90,148 @@ const configure = () => {
 
 const getKeys = () => {
 	// Get keys from file
+	logger.debug("Cheecking for keys file:", keysFile);
 	if (!fs.existsSync(keysFile)) {
 		generateKeys();
 	} else if (process.env.DEBUG) {
-		logger.info('Keys file exists OK');
+		logger.info("Keys file exists OK");
 	}
 	try {
-		return require(keysFile);
+		// Load this json keysFile synchronously and return the json object
+		const rawData = fs.readFileSync(keysFile);
+		return JSON.parse(rawData);
 	} catch (err) {
-		logger.error('Could not read JWT key pair from config file: ' + keysFile, err);
+		logger.error(`Could not read JWT key pair from config file: ${keysFile}`, err);
 		process.exit(1);
 	}
 };
 
 const generateKeys = () => {
-	logger.info('Creating a new JWT key pair...');
+	logger.info("Creating a new JWT key pair...");
 	// Now create the keys and save them in the config.
 	const key = new NodeRSA({ b: 2048 });
 	key.generateKeyPair();
 
 	const keys = {
-		key: key.exportKey('private').toString(),
-		pub: key.exportKey('public').toString(),
+		key: key.exportKey("private").toString(),
+		pub: key.exportKey("public").toString(),
 	};
 
 	// Write keys config
 	try {
 		fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2));
 	} catch (err) {
-		logger.error('Could not write JWT key pair to config file: ' + keysFile + ': ' + err.message);
+		logger.error(`Could not write JWT key pair to config file: ${keysFile}: ${err.message}`);
 		process.exit(1);
 	}
-	logger.info('Wrote JWT key pair to config file: ' + keysFile);
+	logger.info(`Wrote JWT key pair to config file: ${keysFile}`);
 };
 
-module.exports = {
-
-	/**
-	 *
-	 * @param   {string}  key   ie: 'database' or 'database.engine'
-	 * @returns {boolean}
-	 */
-	has: function(key) {
-		instance === null && configure();
-		const keys = key.split('.');
-		let level  = instance;
-		let has    = true;
-		keys.forEach((keyItem) =>{
-			if (typeof level[keyItem] === 'undefined') {
-				has = false;
-			} else {
-				level = level[keyItem];
-			}
-		});
-
-		return has;
-	},
-
-	/**
-	 * Gets a specific key from the top level
-	 *
-	 * @param {string} key
-	 * @returns {*}
-	 */
-	get: function (key) {
-		instance === null && configure();
-		if (key && typeof instance[key] !== 'undefined') {
-			return instance[key];
-		}
-		return instance;
-	},
-
-	/**
-	 * Is this a sqlite configuration?
-	 *
-	 * @returns {boolean}
-	 */
-	isSqlite: function () {
-		instance === null && configure();
-		return instance.database.knex && instance.database.knex.client === sqliteClientName;
-	},
-
-	/**
-	 * Is this a mysql configuration?
-	 *
-	 * @returns {boolean}
-	 */
-	isMysql: function () {
-		instance === null && configure();
-		return instance.database.engine === mysqlEngine;
-	},
-	
-	/**
-		 * Is this a postgres configuration?
-		 *
-		 * @returns {boolean}
-		 */
-	isPostgres: function () {
-		instance === null && configure();
-		return instance.database.engine === postgresEngine;
-	},
-
-	/**
-	 * Are we running in debug mdoe?
-	 *
-	 * @returns {boolean}
-	 */
-	debug: function () {
-		return !!process.env.DEBUG;
-	},
-
-	/**
-	 * Returns a public key
-	 *
-	 * @returns {string}
-	 */
-	getPublicKey: function () {
-		instance === null && configure();
-		return instance.keys.pub;
-	},
-
-	/**
-	 * Returns a private key
-	 *
-	 * @returns {string}
-	 */
-	getPrivateKey: function () {
-		instance === null && configure();
-		return instance.keys.key;
-	},
-
-	/**
-	 * @returns {boolean}
-	 */
-	useLetsencryptStaging: function () {
-		return !!process.env.LE_STAGING;
-	},
-
-	/**
-	 * @returns {string|null}
-	 */
-	useLetsencryptServer: function () {
-		if (process.env.LE_SERVER) {
-			return process.env.LE_SERVER;
+/**
+ *
+ * @param   {string}  key   ie: 'database' or 'database.engine'
+ * @returns {boolean}
+ */
+const configHas = (key) => {
+	instance === null && configure();
+	const keys = key.split(".");
+	let level = instance;
+	let has = true;
+	keys.forEach((keyItem) => {
+		if (typeof level[keyItem] === "undefined") {
+			has = false;
+		} else {
+			level = level[keyItem];
 		}
-		return null;
+	});
+
+	return has;
+};
+
+/**
+ * Gets a specific key from the top level
+ *
+ * @param {string} key
+ * @returns {*}
+ */
+const configGet = (key) => {
+	instance === null && configure();
+	if (key && typeof instance[key] !== "undefined") {
+		return instance[key];
 	}
+	return instance;
 };
+
+/**
+ * Is this a sqlite configuration?
+ *
+ * @returns {boolean}
+ */
+const isSqlite = () => {
+	instance === null && configure();
+	return instance.database.knex && instance.database.knex.client === sqliteClientName;
+};
+
+/**
+ * Is this a mysql configuration?
+ *
+ * @returns {boolean}
+ */
+const isMysql = () => {
+	instance === null && configure();
+	return instance.database.engine === mysqlEngine;
+};
+
+/**
+ * Is this a postgres configuration?
+ *
+ * @returns {boolean}
+ */
+const isPostgres = () => {
+	instance === null && configure();
+	return instance.database.engine === postgresEngine;
+};
+
+/**
+ * Are we running in debug mdoe?
+ *
+ * @returns {boolean}
+ */
+const isDebugMode = () => !!process.env.DEBUG;
+
+/**
+ * Returns a public key
+ *
+ * @returns {string}
+ */
+const getPublicKey = () => {
+	instance === null && configure();
+	return instance.keys.pub;
+};
+
+/**
+ * Returns a private key
+ *
+ * @returns {string}
+ */
+const getPrivateKey = () => {
+	instance === null && configure();
+	return instance.keys.key;
+};
+
+/**
+ * @returns {boolean}
+ */
+const useLetsencryptStaging = () => !!process.env.LE_STAGING;
+
+/**
+ * @returns {string|null}
+ */
+const useLetsencryptServer = () => {
+	if (process.env.LE_SERVER) {
+		return process.env.LE_SERVER;
+	}
+	return null;
+};
+
+export { configHas, configGet, isSqlite, isMysql, isPostgres, isDebugMode, getPrivateKey, getPublicKey, useLetsencryptStaging, useLetsencryptServer };

+ 49 - 48
backend/lib/error.js

@@ -1,99 +1,100 @@
-const _    = require('lodash');
-const util = require('util');
+import _ from "lodash";
 
-module.exports = {
-
-	PermissionError: function (message, previous) {
+const errs = {
+	PermissionError: function (_, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = 'Permission Denied';
-		this.public   = true;
-		this.status   = 403;
+		this.message = "Permission Denied";
+		this.public = true;
+		this.status = 403;
 	},
 
 	ItemNotFoundError: function (id, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = 'Item Not Found - ' + id;
-		this.public   = true;
-		this.status   = 404;
+		this.message = `Item Not Found - ${id}`;
+		this.public = true;
+		this.status = 404;
 	},
 
-	AuthError: function (message, previous) {
+	AuthError: function (message, messageI18n, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.public   = true;
-		this.status   = 401;
+		this.message = message;
+		this.message_i18n = messageI18n;
+		this.public = true;
+		this.status = 400;
 	},
 
 	InternalError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.status   = 500;
-		this.public   = false;
+		this.message = message;
+		this.status = 500;
+		this.public = false;
 	},
 
 	InternalValidationError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.status   = 400;
-		this.public   = false;
+		this.message = message;
+		this.status = 400;
+		this.public = false;
 	},
 
 	ConfigurationError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.status   = 400;
-		this.public   = true;
+		this.message = message;
+		this.status = 400;
+		this.public = true;
 	},
 
 	CacheError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
-		this.message  = message;
+		this.name = this.constructor.name;
+		this.message = message;
 		this.previous = previous;
-		this.status   = 500;
-		this.public   = false;
+		this.status = 500;
+		this.public = false;
 	},
 
 	ValidationError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.public   = true;
-		this.status   = 400;
+		this.message = message;
+		this.public = true;
+		this.status = 400;
 	},
 
 	AssertionFailedError: function (message, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = message;
-		this.public   = false;
-		this.status   = 400;
+		this.message = message;
+		this.public = false;
+		this.status = 400;
 	},
 
 	CommandError: function (stdErr, code, previous) {
 		Error.captureStackTrace(this, this.constructor);
-		this.name     = this.constructor.name;
+		this.name = this.constructor.name;
 		this.previous = previous;
-		this.message  = stdErr;
-		this.code     = code;
-		this.public   = false;
+		this.message = stdErr;
+		this.code = code;
+		this.public = false;
 	},
 };
 
-_.forEach(module.exports, function (error) {
-	util.inherits(error, Error);
+_.forEach(errs, (err) => {
+	err.prototype = Object.create(Error.prototype);
 });
+
+export default errs;

+ 8 - 7
backend/lib/express/cors.js

@@ -1,12 +1,13 @@
-module.exports = function (req, res, next) {
+export default (req, res, next) => {
 	if (req.headers.origin) {
 		res.set({
-			'Access-Control-Allow-Origin':      req.headers.origin,
-			'Access-Control-Allow-Credentials': true,
-			'Access-Control-Allow-Methods':     'OPTIONS, GET, POST',
-			'Access-Control-Allow-Headers':     'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit',
-			'Access-Control-Max-Age':           5 * 60,
-			'Access-Control-Expose-Headers':    'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit'
+			"Access-Control-Allow-Origin": req.headers.origin,
+			"Access-Control-Allow-Credentials": true,
+			"Access-Control-Allow-Methods": "OPTIONS, GET, POST",
+			"Access-Control-Allow-Headers":
+				"Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
+			"Access-Control-Max-Age": 5 * 60,
+			"Access-Control-Expose-Headers": "X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit",
 		});
 		next();
 	} else {

+ 6 - 6
backend/lib/express/jwt-decode.js

@@ -1,10 +1,11 @@
-const Access = require('../access');
+import Access from "../access.js";
 
-module.exports = () => {
-	return function (req, res, next) {
+export default () => {
+	return (_, res, next) => {
 		res.locals.access = null;
-		let access        = new Access(res.locals.token || null);
-		access.load()
+		const access = new Access(res.locals.token || null);
+		access
+			.load()
 			.then(() => {
 				res.locals.access = access;
 				next();
@@ -12,4 +13,3 @@ module.exports = () => {
 			.catch(next);
 	};
 };
-

+ 5 - 5
backend/lib/express/jwt.js

@@ -1,13 +1,13 @@
-module.exports = function () {
-	return function (req, res, next) {
+export default function () {
+	return (req, res, next) => {
 		if (req.headers.authorization) {
-			let parts = req.headers.authorization.split(' ');
+			const parts = req.headers.authorization.split(" ");
 
-			if (parts && parts[0] === 'Bearer' && parts[1]) {
+			if (parts && parts[0] === "Bearer" && parts[1]) {
 				res.locals.token = parts[1];
 			}
 		}
 
 		next();
 	};
-};
+}

+ 16 - 16
backend/lib/express/pagination.js

@@ -1,7 +1,6 @@
-let _ = require('lodash');
-
-module.exports = function (default_sort, default_offset, default_limit, max_limit) {
+import _  from "lodash";
 
+export default (default_sort, default_offset, default_limit, max_limit) => {
 	/**
 	 * This will setup the req query params with filtered data and defaults
 	 *
@@ -11,34 +10,35 @@ module.exports = function (default_sort, default_offset, default_limit, max_limi
 	 *
 	 */
 
-	return function (req, res, next) {
-
-		req.query.offset = typeof req.query.limit === 'undefined' ? default_offset || 0 : parseInt(req.query.offset, 10);
-		req.query.limit  = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10);
+	return (req, _res, next) => {
+		req.query.offset =
+			typeof req.query.limit === "undefined" ? default_offset || 0 : Number.parseInt(req.query.offset, 10);
+		req.query.limit =
+			typeof req.query.limit === "undefined" ? default_limit || 50 : Number.parseInt(req.query.limit, 10);
 
 		if (max_limit && req.query.limit > max_limit) {
 			req.query.limit = max_limit;
 		}
 
 		// Sorting
-		let sort       = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort;
-		let myRegexp   = /.*\.(asc|desc)$/ig;
-		let sort_array = [];
+		let sort = typeof req.query.sort === "undefined" ? default_sort : req.query.sort;
+		const myRegexp = /.*\.(asc|desc)$/gi;
+		const sort_array = [];
 
-		sort = sort.split(',');
-		_.map(sort, function (val) {
-			let matches = myRegexp.exec(val);
+		sort = sort.split(",");
+		_.map(sort, (val) => {
+			const matches = myRegexp.exec(val);
 
 			if (matches !== null) {
-				let dir = matches[1];
+				const dir = matches[1];
 				sort_array.push({
 					field: val.substr(0, val.length - (dir.length + 1)),
-					dir:   dir.toLowerCase()
+					dir: dir.toLowerCase(),
 				});
 			} else {
 				sort_array.push({
 					field: val,
-					dir:   'asc'
+					dir: "asc",
 				});
 			}
 		});

+ 2 - 3
backend/lib/express/user-id-from-me.js

@@ -1,9 +1,8 @@
-module.exports = (req, res, next) => {
+export default (req, res, next) => {
 	if (req.params.user_id === 'me' && res.locals.access) {
 		req.params.user_id = res.locals.access.token.get('attrs').id;
 	} else {
-		req.params.user_id = parseInt(req.params.user_id, 10);
+		req.params.user_id = Number.parseInt(req.params.user_id, 10);
 	}
-
 	next();
 };

+ 52 - 56
backend/lib/helpers.js

@@ -1,62 +1,58 @@
-const moment       = require('moment');
-const {isPostgres} = require('./config');
-const {ref}        = require('objection');
+import moment from "moment";
+import { ref } from "objection";
+import { isPostgres } from "./config.js";
 
-module.exports = {
-
-	/**
-	 * Takes an expression such as 30d and returns a moment object of that date in future
-	 *
-	 * Key      Shorthand
-	 * ==================
-	 * years         y
-	 * quarters      Q
-	 * months        M
-	 * weeks         w
-	 * days          d
-	 * hours         h
-	 * minutes       m
-	 * seconds       s
-	 * milliseconds  ms
-	 *
-	 * @param {String}  expression
-	 * @returns {Object}
-	 */
-	parseDatePeriod: function (expression) {
-		let matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
-		if (matches) {
-			return moment().add(matches[1], matches[2]);
-		}
+/**
+ * Takes an expression such as 30d and returns a moment object of that date in future
+ *
+ * Key      Shorthand
+ * ==================
+ * years         y
+ * quarters      Q
+ * months        M
+ * weeks         w
+ * days          d
+ * hours         h
+ * minutes       m
+ * seconds       s
+ * milliseconds  ms
+ *
+ * @param {String}  expression
+ * @returns {Object}
+ */
+const parseDatePeriod = (expression) => {
+	const matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
+	if (matches) {
+		return moment().add(matches[1], matches[2]);
+	}
 
-		return null;
-	},
+	return null;
+};
 
-	convertIntFieldsToBool: function (obj, fields) {
-		fields.forEach(function (field) {
-			if (typeof obj[field] !== 'undefined') {
-				obj[field] = obj[field] === 1;
-			}
-		});
-		return obj;
-	},
+const convertIntFieldsToBool = (obj, fields) => {
+	fields.forEach((field) => {
+		if (typeof obj[field] !== "undefined") {
+			obj[field] = obj[field] === 1;
+		}
+	});
+	return obj;
+};
 
-	convertBoolFieldsToInt: function (obj, fields) {
-		fields.forEach(function (field) {
-			if (typeof obj[field] !== 'undefined') {
-				obj[field] = obj[field] ? 1 : 0;
-			}
-		});
-		return obj;
-	},
+const convertBoolFieldsToInt = (obj, fields) => {
+	fields.forEach((field) => {
+		if (typeof obj[field] !== "undefined") {
+			obj[field] = obj[field] ? 1 : 0;
+		}
+	});
+	return obj;
+};
 
-	/**
-	 * Casts a column to json if using postgres
-	 *
-	 * @param {string} colName
-	 * @returns {string|Objection.ReferenceBuilder}
-	 */
-	castJsonIfNeed: function (colName) {
-		return isPostgres() ? ref(colName).castText() : colName;
-	}
+/**
+ * Casts a column to json if using postgres
+ *
+ * @param {string} colName
+ * @returns {string|Objection.ReferenceBuilder}
+ */
+const castJsonIfNeed = (colName) => (isPostgres() ? ref(colName).castText() : colName);
 
-};
+export { parseDatePeriod, convertIntFieldsToBool, convertBoolFieldsToInt, castJsonIfNeed };

+ 25 - 21
backend/lib/migrate_template.js

@@ -1,33 +1,34 @@
-const migrate_name = 'identifier_for_migrate';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "identifier_for_migrate";
 
 /**
  * Migrate
  *
  * @see http://knexjs.org/#Schema
  *
- * @param {Object} knex
- * @param {Promise} Promise
+ * @param   {Object} knex
  * @returns {Promise}
  */
-exports.up = function (knex, Promise) {
-
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (_knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 	// Create Table example:
 
-	/*return knex.schema.createTable('notification', (table) => {
+	/*
+	return knex.schema.createTable('notification', (table) => {
 		 table.increments().primary();
 		 table.string('name').notNull();
 		 table.string('type').notNull();
 		 table.integer('created_on').notNull();
 		 table.integer('modified_on').notNull();
 	 })
-	 .then(function () {
-		logger.info('[' + migrate_name + '] Notification Table created');
-	 });*/
+		.then(function () {
+			logger.info('[' + migrateName + '] Notification Table created');
+		});
+	 */
 
-	logger.info('[' + migrate_name + '] Migrating Up Complete');
+	logger.info(`[${migrateName}] Migrating Up Complete`);
 
 	return Promise.resolve(true);
 };
@@ -35,21 +36,24 @@ exports.up = function (knex, Promise) {
 /**
  * Undo Migrate
  *
- * @param {Object} knex
- * @param {Promise} Promise
+ * @param   {Object} knex
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (_knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 	// Drop table example:
 
-	/*return knex.schema.dropTable('notification')
-	 .then(() => {
-		logger.info('[' + migrate_name + '] Notification Table dropped');
-	 });*/
+	/*
+	return knex.schema.dropTable('notification')
+		.then(() => {
+			logger.info(`[${migrateName}] Notification Table dropped`);
+		});
+	*/
 
-	logger.info('[' + migrate_name + '] Migrating Down Complete');
+	logger.info(`[${migrateName}] Migrating Down Complete`);
 
 	return Promise.resolve(true);
 };
+
+export { up, down };

+ 92 - 92
backend/lib/utils.js

@@ -1,110 +1,110 @@
-const _          = require('lodash');
-const exec       = require('node:child_process').exec;
-const execFile   = require('node:child_process').execFile;
-const { Liquid } = require('liquidjs');
-const logger     = require('../logger').global;
-const error      = require('./error');
+import { exec as nodeExec, execFile as nodeExecFile } from "node:child_process";
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import { Liquid } from "liquidjs";
+import _ from "lodash";
+import { global as logger } from "../logger.js";
+import errs from "./error.js";
 
-module.exports = {
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
 
-	exec: async (cmd, options = {}) => {
-		logger.debug('CMD:', cmd);
-
-		const { stdout, stderr } = await new Promise((resolve, reject) => {
-			const child = exec(cmd, options, (isError, stdout, stderr) => {
-				if (isError) {
-					reject(new error.CommandError(stderr, isError));
-				} else {
-					resolve({ stdout, stderr });
-				}
-			});
+const exec = async (cmd, options = {}) => {
+	logger.debug("CMD:", cmd);
+	const { stdout, stderr } = await new Promise((resolve, reject) => {
+		const child = nodeExec(cmd, options, (isError, stdout, stderr) => {
+			if (isError) {
+				reject(new errs.CommandError(stderr, isError));
+			} else {
+				resolve({ stdout, stderr });
+			}
+		});
 
-			child.on('error', (e) => {
-				reject(new error.CommandError(stderr, 1, e));
-			});
+		child.on("error", (e) => {
+			reject(new errs.CommandError(stderr, 1, e));
 		});
-		return stdout;
-	},
+	});
+	return stdout;
+};
 
-	/**
-	 * @param   {String} cmd
-	 * @param   {Array}  args
-	 * @param   {Object|undefined}  options
-	 * @returns {Promise}
-	 */
-	execFile: (cmd, args, options) => {
-		logger.debug(`CMD: ${cmd} ${args ? args.join(' ') : ''}`);
-		if (typeof options === 'undefined') {
-			options = {};
-		}
+/**
+ * @param   {String} cmd
+ * @param   {Array}  args
+ * @param   {Object|undefined}  options
+ * @returns {Promise}
+ */
+const execFile = (cmd, args, options) => {
+	logger.debug(`CMD: ${cmd} ${args ? args.join(" ") : ""}`);
+	const opts = options || {};
 
-		return new Promise((resolve, reject) => {
-			execFile(cmd, args, options, (err, stdout, stderr) => {
-				if (err && typeof err === 'object') {
-					reject(new error.CommandError(stderr, 1, err));
-				} else {
-					resolve(stdout.trim());
-				}
-			});
+	return new Promise((resolve, reject) => {
+		nodeExecFile(cmd, args, opts, (err, stdout, stderr) => {
+			if (err && typeof err === "object") {
+				reject(new errs.CommandError(stderr, 1, err));
+			} else {
+				resolve(stdout.trim());
+			}
 		});
-	},
+	});
+};
 
+/**
+ * Used in objection query builder
+ *
+ * @param   {Array}  omissions
+ * @returns {Function}
+ */
+const omitRow = (omissions) => {
 	/**
-	 * Used in objection query builder
-	 *
-	 * @param   {Array}  omissions
-	 * @returns {Function}
+	 * @param   {Object} row
+	 * @returns {Object}
 	 */
-	omitRow: (omissions) => {
-		/**
-		 * @param   {Object} row
-		 * @returns {Object}
-		 */
-		return (row) => {
-			return _.omit(row, omissions);
-		};
-	},
+	return (row) => {
+		return _.omit(row, omissions);
+	};
+};
 
+/**
+ * Used in objection query builder
+ *
+ * @param   {Array}  omissions
+ * @returns {Function}
+ */
+const omitRows = (omissions) => {
 	/**
-	 * Used in objection query builder
-	 *
-	 * @param   {Array}  omissions
-	 * @returns {Function}
+	 * @param   {Array} rows
+	 * @returns {Object}
 	 */
-	omitRows: (omissions) => {
-		/**
-		 * @param   {Array} rows
-		 * @returns {Object}
-		 */
-		return (rows) => {
-			rows.forEach((row, idx) => {
-				rows[idx] = _.omit(row, omissions);
-			});
-			return rows;
-		};
-	},
+	return (rows) => {
+		rows.forEach((row, idx) => {
+			rows[idx] = _.omit(row, omissions);
+		});
+		return rows;
+	};
+};
+
+/**
+ * @returns {Object} Liquid render engine
+ */
+const getRenderEngine = () => {
+	const renderEngine = new Liquid({
+		root: `${__dirname}/../templates/`,
+	});
 
 	/**
-	 * @returns {Object} Liquid render engine
+	 * nginxAccessRule expects the object given to have 2 properties:
+	 *
+	 * directive  string
+	 * address    string
 	 */
-	getRenderEngine: () => {
-		const renderEngine = new Liquid({
-			root: `${__dirname}/../templates/`
-		});
-
-		/**
-		 * nginxAccessRule expects the object given to have 2 properties:
-		 *
-		 * directive  string
-		 * address    string
-		 */
-		renderEngine.registerFilter('nginxAccessRule', (v) => {
-			if (typeof v.directive !== 'undefined' && typeof v.address !== 'undefined' && v.directive && v.address) {
-				return `${v.directive} ${v.address};`;
-			}
-			return '';
-		});
+	renderEngine.registerFilter("nginxAccessRule", (v) => {
+		if (typeof v.directive !== "undefined" && typeof v.address !== "undefined" && v.directive && v.address) {
+			return `${v.directive} ${v.address};`;
+		}
+		return "";
+	});
 
-		return renderEngine;
-	}
+	return renderEngine;
 };
+
+export default { exec, execFile, omitRow, omitRows, getRenderEngine };

+ 16 - 16
backend/lib/validator/api.js

@@ -1,12 +1,12 @@
-const Ajv   = require('ajv/dist/2020');
-const error = require('../error');
+import Ajv from "ajv/dist/2020.js";
+import errs from "../error.js";
 
 const ajv = new Ajv({
-	verbose:         true,
-	allErrors:       true,
+	verbose: true,
+	allErrors: true,
 	allowUnionTypes: true,
-	strict:          false,
-	coerceTypes:     true,
+	strict: false,
+	coerceTypes: true,
 });
 
 /**
@@ -14,30 +14,30 @@ const ajv = new Ajv({
  * @param {Object} payload
  * @returns {Promise}
  */
-function apiValidator (schema, payload/*, description*/) {
-	return new Promise(function Promise_apiValidator (resolve, reject) {
+function apiValidator(schema, payload /*, description*/) {
+	return new Promise(function Promise_apiValidator(resolve, reject) {
 		if (schema === null) {
-			reject(new error.ValidationError('Schema is undefined'));
+			reject(new errs.ValidationError("Schema is undefined"));
 			return;
 		}
 
-		if (typeof payload === 'undefined') {
-			reject(new error.ValidationError('Payload is undefined'));
+		if (typeof payload === "undefined") {
+			reject(new errs.ValidationError("Payload is undefined"));
 			return;
 		}
 
 		const validate = ajv.compile(schema);
-		const valid    = validate(payload);
+		const valid = validate(payload);
 
 		if (valid && !validate.errors) {
 			resolve(payload);
 		} else {
-			let message = ajv.errorsText(validate.errors);
-			let err     = new error.ValidationError(message);
-			err.debug   = [validate.errors, payload];
+			const message = ajv.errorsText(validate.errors);
+			const err = new errs.ValidationError(message);
+			err.debug = [validate.errors, payload];
 			reject(err);
 		}
 	});
 }
 
-module.exports = apiValidator;
+export default apiValidator;

+ 18 - 18
backend/lib/validator/index.js

@@ -1,17 +1,17 @@
-const _                 = require('lodash');
-const Ajv               = require('ajv/dist/2020');
-const error             = require('../error');
-const commonDefinitions = require('../../schema/common.json');
+import Ajv from 'ajv/dist/2020.js';
+import _ from "lodash";
+import commonDefinitions from "../../schema/common.json" with { type: "json" };
+import errs from "../error.js";
 
 RegExp.prototype.toJSON = RegExp.prototype.toString;
 
 const ajv = new Ajv({
-	verbose:         true,
-	allErrors:       true,
+	verbose: true,
+	allErrors: true,
 	allowUnionTypes: true,
-	coerceTypes:     true,
-	strict:          false,
-	schemas:         [commonDefinitions]
+	coerceTypes: true,
+	strict: false,
+	schemas: [commonDefinitions],
 });
 
 /**
@@ -20,26 +20,26 @@ const ajv = new Ajv({
  * @param   {Object} payload
  * @returns {Promise}
  */
-function validator (schema, payload) {
-	return new Promise(function (resolve, reject) {
+const validator = (schema, payload) => {
+	return new Promise((resolve, reject) => {
 		if (!payload) {
-			reject(new error.InternalValidationError('Payload is falsy'));
+			reject(new errs.InternalValidationError("Payload is falsy"));
 		} else {
 			try {
-				let validate = ajv.compile(schema);
-				let valid    = validate(payload);
+				const validate = ajv.compile(schema);
+				const valid = validate(payload);
 
 				if (valid && !validate.errors) {
 					resolve(_.cloneDeep(payload));
 				} else {
-					let message = ajv.errorsText(validate.errors);
-					reject(new error.InternalValidationError(message));
+					const message = ajv.errorsText(validate.errors);
+					reject(new errs.InternalValidationError(message));
 				}
 			} catch (err) {
 				reject(err);
 			}
 		}
 	});
-}
+};
 
-module.exports = validator;
+export default validator;

+ 16 - 12
backend/logger.js

@@ -1,14 +1,18 @@
-const {Signale} = require('signale');
+import signale from "signale";
 
-module.exports = {
-	global:    new Signale({scope: 'Global   '}),
-	migrate:   new Signale({scope: 'Migrate  '}),
-	express:   new Signale({scope: 'Express  '}),
-	access:    new Signale({scope: 'Access   '}),
-	nginx:     new Signale({scope: 'Nginx    '}),
-	ssl:       new Signale({scope: 'SSL      '}),
-	certbot:   new Signale({scope: 'Certbot  '}),
-	import:    new Signale({scope: 'Importer '}),
-	setup:     new Signale({scope: 'Setup    '}),
-	ip_ranges: new Signale({scope: 'IP Ranges'})
+const opts = {
+	logLevel: "info",
 };
+
+const global = new signale.Signale({ scope: "Global   ", ...opts });
+const migrate = new signale.Signale({ scope: "Migrate  ", ...opts });
+const express = new signale.Signale({ scope: "Express  ", ...opts });
+const access = new signale.Signale({ scope: "Access   ", ...opts });
+const nginx = new signale.Signale({ scope: "Nginx    ", ...opts });
+const ssl = new signale.Signale({ scope: "SSL      ", ...opts });
+const certbot = new signale.Signale({ scope: "Certbot  ", ...opts });
+const importer = new signale.Signale({ scope: "Importer ", ...opts });
+const setup = new signale.Signale({ scope: "Setup    ", ...opts });
+const ipRanges = new signale.Signale({ scope: "IP Ranges", ...opts });
+
+export { global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges };

+ 11 - 13
backend/migrate.js

@@ -1,15 +1,13 @@
-const db     = require('./db');
-const logger = require('./logger').migrate;
+import db from "./db.js";
+import { migrate as logger } from "./logger.js";
 
-module.exports = {
-	latest: function () {
-		return db.migrate.currentVersion()
-			.then((version) => {
-				logger.info('Current database version:', version);
-				return db.migrate.latest({
-					tableName: 'migrations',
-					directory: 'migrations'
-				});
-			});
-	}
+const migrateUp = async () => {
+	const version = await db.migrate.currentVersion();
+	logger.info("Current database version:", version);
+	return await db.migrate.latest({
+		tableName: "migrations",
+		directory: "migrations",
+	});
 };
+
+export { migrateUp };

+ 134 - 133
backend/migrations/20180618015850_initial.js

@@ -1,5 +1,6 @@
-const migrate_name = 'initial-schema';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "initial-schema";
 
 /**
  * Migrate
@@ -7,199 +8,199 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.createTable('auth', (table) => {
-		table.increments().primary();
-		table.dateTime('created_on').notNull();
-		table.dateTime('modified_on').notNull();
-		table.integer('user_id').notNull().unsigned();
-		table.string('type', 30).notNull();
-		table.string('secret').notNull();
-		table.json('meta').notNull();
-		table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.createTable("auth", (table) => {
+			table.increments().primary();
+			table.dateTime("created_on").notNull();
+			table.dateTime("modified_on").notNull();
+			table.integer("user_id").notNull().unsigned();
+			table.string("type", 30).notNull();
+			table.string("secret").notNull();
+			table.json("meta").notNull();
+			table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] auth Table created');
+			logger.info(`[${migrateName}] auth Table created`);
 
-			return knex.schema.createTable('user', (table) => {
+			return knex.schema.createTable("user", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.integer('is_disabled').notNull().unsigned().defaultTo(0);
-				table.string('email').notNull();
-				table.string('name').notNull();
-				table.string('nickname').notNull();
-				table.string('avatar').notNull();
-				table.json('roles').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.integer("is_disabled").notNull().unsigned().defaultTo(0);
+				table.string("email").notNull();
+				table.string("name").notNull();
+				table.string("nickname").notNull();
+				table.string("avatar").notNull();
+				table.json("roles").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] user Table created');
+			logger.info(`[${migrateName}] user Table created`);
 
-			return knex.schema.createTable('user_permission', (table) => {
+			return knex.schema.createTable("user_permission", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('user_id').notNull().unsigned();
-				table.string('visibility').notNull();
-				table.string('proxy_hosts').notNull();
-				table.string('redirection_hosts').notNull();
-				table.string('dead_hosts').notNull();
-				table.string('streams').notNull();
-				table.string('access_lists').notNull();
-				table.string('certificates').notNull();
-				table.unique('user_id');
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("user_id").notNull().unsigned();
+				table.string("visibility").notNull();
+				table.string("proxy_hosts").notNull();
+				table.string("redirection_hosts").notNull();
+				table.string("dead_hosts").notNull();
+				table.string("streams").notNull();
+				table.string("access_lists").notNull();
+				table.string("certificates").notNull();
+				table.unique("user_id");
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] user_permission Table created');
+			logger.info(`[${migrateName}] user_permission Table created`);
 
-			return knex.schema.createTable('proxy_host', (table) => {
+			return knex.schema.createTable("proxy_host", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.json('domain_names').notNull();
-				table.string('forward_ip').notNull();
-				table.integer('forward_port').notNull().unsigned();
-				table.integer('access_list_id').notNull().unsigned().defaultTo(0);
-				table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-				table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
-				table.integer('caching_enabled').notNull().unsigned().defaultTo(0);
-				table.integer('block_exploits').notNull().unsigned().defaultTo(0);
-				table.text('advanced_config').notNull().defaultTo('');
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.json("domain_names").notNull();
+				table.string("forward_ip").notNull();
+				table.integer("forward_port").notNull().unsigned();
+				table.integer("access_list_id").notNull().unsigned().defaultTo(0);
+				table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+				table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
+				table.integer("caching_enabled").notNull().unsigned().defaultTo(0);
+				table.integer("block_exploits").notNull().unsigned().defaultTo(0);
+				table.text("advanced_config").notNull().defaultTo("");
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table created');
+			logger.info(`[${migrateName}] proxy_host Table created`);
 
-			return knex.schema.createTable('redirection_host', (table) => {
+			return knex.schema.createTable("redirection_host", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.json('domain_names').notNull();
-				table.string('forward_domain_name').notNull();
-				table.integer('preserve_path').notNull().unsigned().defaultTo(0);
-				table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-				table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
-				table.integer('block_exploits').notNull().unsigned().defaultTo(0);
-				table.text('advanced_config').notNull().defaultTo('');
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.json("domain_names").notNull();
+				table.string("forward_domain_name").notNull();
+				table.integer("preserve_path").notNull().unsigned().defaultTo(0);
+				table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+				table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
+				table.integer("block_exploits").notNull().unsigned().defaultTo(0);
+				table.text("advanced_config").notNull().defaultTo("");
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table created');
+			logger.info(`[${migrateName}] redirection_host Table created`);
 
-			return knex.schema.createTable('dead_host', (table) => {
+			return knex.schema.createTable("dead_host", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.json('domain_names').notNull();
-				table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-				table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
-				table.text('advanced_config').notNull().defaultTo('');
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.json("domain_names").notNull();
+				table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+				table.integer("ssl_forced").notNull().unsigned().defaultTo(0);
+				table.text("advanced_config").notNull().defaultTo("");
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table created');
+			logger.info(`[${migrateName}] dead_host Table created`);
 
-			return knex.schema.createTable('stream', (table) => {
+			return knex.schema.createTable("stream", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.integer('incoming_port').notNull().unsigned();
-				table.string('forward_ip').notNull();
-				table.integer('forwarding_port').notNull().unsigned();
-				table.integer('tcp_forwarding').notNull().unsigned().defaultTo(0);
-				table.integer('udp_forwarding').notNull().unsigned().defaultTo(0);
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.integer("incoming_port").notNull().unsigned();
+				table.string("forward_ip").notNull();
+				table.integer("forwarding_port").notNull().unsigned();
+				table.integer("tcp_forwarding").notNull().unsigned().defaultTo(0);
+				table.integer("udp_forwarding").notNull().unsigned().defaultTo(0);
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] stream Table created');
+			logger.info(`[${migrateName}] stream Table created`);
 
-			return knex.schema.createTable('access_list', (table) => {
+			return knex.schema.createTable("access_list", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.string('name').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.string("name").notNull();
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table created');
+			logger.info(`[${migrateName}] access_list Table created`);
 
-			return knex.schema.createTable('certificate', (table) => {
+			return knex.schema.createTable("certificate", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('owner_user_id').notNull().unsigned();
-				table.integer('is_deleted').notNull().unsigned().defaultTo(0);
-				table.string('provider').notNull();
-				table.string('nice_name').notNull().defaultTo('');
-				table.json('domain_names').notNull();
-				table.dateTime('expires_on').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("owner_user_id").notNull().unsigned();
+				table.integer("is_deleted").notNull().unsigned().defaultTo(0);
+				table.string("provider").notNull();
+				table.string("nice_name").notNull().defaultTo("");
+				table.json("domain_names").notNull();
+				table.dateTime("expires_on").notNull();
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] certificate Table created');
+			logger.info(`[${migrateName}] certificate Table created`);
 
-			return knex.schema.createTable('access_list_auth', (table) => {
+			return knex.schema.createTable("access_list_auth", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('access_list_id').notNull().unsigned();
-				table.string('username').notNull();
-				table.string('password').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("access_list_id").notNull().unsigned();
+				table.string("username").notNull();
+				table.string("password").notNull();
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list_auth Table created');
+			logger.info(`[${migrateName}] access_list_auth Table created`);
 
-			return knex.schema.createTable('audit_log', (table) => {
+			return knex.schema.createTable("audit_log", (table) => {
 				table.increments().primary();
-				table.dateTime('created_on').notNull();
-				table.dateTime('modified_on').notNull();
-				table.integer('user_id').notNull().unsigned();
-				table.string('object_type').notNull().defaultTo('');
-				table.integer('object_id').notNull().unsigned().defaultTo(0);
-				table.string('action').notNull();
-				table.json('meta').notNull();
+				table.dateTime("created_on").notNull();
+				table.dateTime("modified_on").notNull();
+				table.integer("user_id").notNull().unsigned();
+				table.string("object_type").notNull().defaultTo("");
+				table.integer("object_id").notNull().unsigned().defaultTo(0);
+				table.string("action").notNull();
+				table.json("meta").notNull();
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] audit_log Table created');
+			logger.info(`[${migrateName}] audit_log Table created`);
 		});
-
 };
 
 /**
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };

+ 15 - 14
backend/migrations/20180929054513_websockets.js

@@ -1,5 +1,6 @@
-const migrate_name = 'websockets';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "websockets";
 
 /**
  * Migrate
@@ -7,29 +8,29 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('allow_websocket_upgrade').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("allow_websocket_upgrade").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
-
 };
 
 /**
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
-};
+};
+
+export { up, down };

+ 15 - 13
backend/migrations/20181019052346_forward_host.js

@@ -1,5 +1,6 @@
-const migrate_name = 'forward_host';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "forward_host";
 
 /**
  * Migrate
@@ -7,17 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.renameColumn('forward_ip', 'forward_host');
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.renameColumn("forward_ip", "forward_host");
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
 };
 
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
-};
+};
+
+export { up, down };

+ 19 - 18
backend/migrations/20181113041458_http2_support.js

@@ -1,5 +1,6 @@
-const migrate_name = 'http2_support';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "http2_support";
 
 /**
  * Migrate
@@ -7,31 +8,31 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('http2_support').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("http2_support").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 
-			return knex.schema.table('redirection_host', function (redirection_host) {
-				redirection_host.integer('http2_support').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("redirection_host", (redirection_host) => {
+				redirection_host.integer("http2_support").notNull().unsigned().defaultTo(0);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 
-			return knex.schema.table('dead_host', function (dead_host) {
-				dead_host.integer('http2_support').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("dead_host", (dead_host) => {
+				dead_host.integer("http2_support").notNull().unsigned().defaultTo(0);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table altered');
+			logger.info(`[${migrateName}] dead_host Table altered`);
 		});
 };
 
@@ -39,11 +40,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
 
+export { up, down };

+ 14 - 12
backend/migrations/20181213013211_forward_scheme.js

@@ -1,5 +1,6 @@
-const migrate_name = 'forward_scheme';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "forward_scheme";
 
 /**
  * Migrate
@@ -7,17 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.string('forward_scheme').notNull().defaultTo('http');
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.string("forward_scheme").notNull().defaultTo("http");
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
 };
 
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };

+ 23 - 21
backend/migrations/20190104035154_disabled.js

@@ -1,5 +1,6 @@
-const migrate_name = 'disabled';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "disabled";
 
 /**
  * Migrate
@@ -7,38 +8,38 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('enabled').notNull().unsigned().defaultTo(1);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("enabled").notNull().unsigned().defaultTo(1);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 
-			return knex.schema.table('redirection_host', function (redirection_host) {
-				redirection_host.integer('enabled').notNull().unsigned().defaultTo(1);
+			return knex.schema.table("redirection_host", (redirection_host) => {
+				redirection_host.integer("enabled").notNull().unsigned().defaultTo(1);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 
-			return knex.schema.table('dead_host', function (dead_host) {
-				dead_host.integer('enabled').notNull().unsigned().defaultTo(1);
+			return knex.schema.table("dead_host", (dead_host) => {
+				dead_host.integer("enabled").notNull().unsigned().defaultTo(1);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table altered');
+			logger.info(`[${migrateName}] dead_host Table altered`);
 
-			return knex.schema.table('stream', function (stream) {
-				stream.integer('enabled').notNull().unsigned().defaultTo(1);
+			return knex.schema.table("stream", (stream) => {
+				stream.integer("enabled").notNull().unsigned().defaultTo(1);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] stream Table altered');
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 };
 
@@ -46,10 +47,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };

+ 14 - 12
backend/migrations/20190215115310_customlocations.js

@@ -1,5 +1,6 @@
-const migrate_name = 'custom_locations';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "custom_locations";
 
 /**
  * Migrate
@@ -8,17 +9,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.json('locations');
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.json("locations");
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 		});
 };
 
@@ -26,10 +27,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };

+ 23 - 21
backend/migrations/20190218060101_hsts.js

@@ -1,5 +1,6 @@
-const migrate_name = 'hsts';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "hsts";
 
 /**
  * Migrate
@@ -7,34 +8,34 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('proxy_host', function (proxy_host) {
-		proxy_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
-		proxy_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
-	})
+	return knex.schema
+		.table("proxy_host", (proxy_host) => {
+			proxy_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
+			proxy_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] proxy_host Table altered');
+			logger.info(`[${migrateName}] proxy_host Table altered`);
 
-			return knex.schema.table('redirection_host', function (redirection_host) {
-				redirection_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
-				redirection_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("redirection_host", (redirection_host) => {
+				redirection_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
+				redirection_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 
-			return knex.schema.table('dead_host', function (dead_host) {
-				dead_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
-				dead_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
+			return knex.schema.table("dead_host", (dead_host) => {
+				dead_host.integer("hsts_enabled").notNull().unsigned().defaultTo(0);
+				dead_host.integer("hsts_subdomains").notNull().unsigned().defaultTo(0);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] dead_host Table altered');
+			logger.info(`[${migrateName}] dead_host Table altered`);
 		});
 };
 
@@ -42,10 +43,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };

+ 10 - 9
backend/migrations/20190227065017_settings.js

@@ -1,5 +1,6 @@
-const migrate_name = 'settings';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "settings";
 
 /**
  * Migrate
@@ -7,11 +8,10 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 	return knex.schema.createTable('setting', (table) => {
 		table.string('id').notNull().primary();
@@ -21,7 +21,7 @@ exports.up = function (knex/*, Promise*/) {
 		table.json('meta').notNull();
 	})
 		.then(() => {
-			logger.info('[' + migrate_name + '] setting Table created');
+			logger.info(`[${migrateName}] setting Table created`);
 		});
 };
 
@@ -29,10 +29,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down the initial data.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };

+ 27 - 28
backend/migrations/20200410143839_access_list_client.js

@@ -1,5 +1,6 @@
-const migrate_name = 'access_list_client';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "access_list_client";
 
 /**
  * Migrate
@@ -7,32 +8,30 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.createTable('access_list_client', (table) => {
-		table.increments().primary();
-		table.dateTime('created_on').notNull();
-		table.dateTime('modified_on').notNull();
-		table.integer('access_list_id').notNull().unsigned();
-		table.string('address').notNull();
-		table.string('directive').notNull();
-		table.json('meta').notNull();
-
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] access_list_client Table created');
+	return knex.schema
+		.createTable("access_list_client", (table) => {
+			table.increments().primary();
+			table.dateTime("created_on").notNull();
+			table.dateTime("modified_on").notNull();
+			table.integer("access_list_id").notNull().unsigned();
+			table.string("address").notNull();
+			table.string("directive").notNull();
+			table.json("meta").notNull();
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] access_list_client Table created`);
 
-			return knex.schema.table('access_list', function (access_list) {
-				access_list.integer('satify_any').notNull().defaultTo(0);
+			return knex.schema.table("access_list", (access_list) => {
+				access_list.integer("satify_any").notNull().defaultTo(0);
 			});
 		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table altered');
+			logger.info(`[${migrateName}] access_list Table altered`);
 		});
 };
 
@@ -40,14 +39,14 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param {Object} knex
- * @param {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.dropTable('access_list_client')
-		.then(() => {
-			logger.info('[' + migrate_name + '] access_list_client Table dropped');
-		});
+	return knex.schema.dropTable("access_list_client").then(() => {
+		logger.info(`[${migrateName}] access_list_client Table dropped`);
+	});
 };
+
+export { up, down };

+ 14 - 12
backend/migrations/20200410143840_access_list_client_fix.js

@@ -1,5 +1,6 @@
-const migrate_name = 'access_list_client_fix';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "access_list_client_fix";
 
 /**
  * Migrate
@@ -7,17 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('access_list', function (access_list) {
-		access_list.renameColumn('satify_any', 'satisfy_any');
-	})
+	return knex.schema
+		.table("access_list", (access_list) => {
+			access_list.renameColumn("satify_any", "satisfy_any");
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table altered');
+			logger.info(`[${migrateName}] access_list Table altered`);
 		});
 };
 
@@ -25,10 +26,11 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex, Promise) {
-	logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
+const down = (_knex) => {
+	logger.warn(`[${migrateName}] You can't migrate down this one.`);
 	return Promise.resolve(true);
 };
+
+export { up, down };

+ 19 - 17
backend/migrations/20201014143841_pass_auth.js

@@ -1,5 +1,6 @@
-const migrate_name = 'pass_auth';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "pass_auth";
 
 /**
  * Migrate
@@ -7,18 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object}  knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('access_list', function (access_list) {
-		access_list.integer('pass_auth').notNull().defaultTo(1);
-	})
+	return knex.schema
+		.table("access_list", (access_list) => {
+			access_list.integer("pass_auth").notNull().defaultTo(1);
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list Table altered');
+			logger.info(`[${migrateName}] access_list Table altered`);
 		});
 };
 
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param {Object} knex
- * @param {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.table('access_list', function (access_list) {
-		access_list.dropColumn('pass_auth');
-	})
+	return knex.schema
+		.table("access_list", (access_list) => {
+			access_list.dropColumn("pass_auth");
+		})
 		.then(() => {
-			logger.info('[' + migrate_name + '] access_list pass_auth Column dropped');
+			logger.info(`[${migrateName}] access_list pass_auth Column dropped`);
 		});
 };
+
+export { up, down };

+ 21 - 19
backend/migrations/20210210154702_redirection_scheme.js

@@ -1,5 +1,6 @@
-const migrate_name = 'redirection_scheme';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "redirection_scheme";
 
 /**
  * Migrate
@@ -7,18 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('redirection_host', (table) => {
-		table.string('forward_scheme').notNull().defaultTo('$scheme');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.string("forward_scheme").notNull().defaultTo("$scheme");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 };
 
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.table('redirection_host', (table) => {
-		table.dropColumn('forward_scheme');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.dropColumn("forward_scheme");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 };
+
+export { up, down };

+ 21 - 19
backend/migrations/20210210154703_redirection_status_code.js

@@ -1,5 +1,6 @@
-const migrate_name = 'redirection_status_code';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "redirection_status_code";
 
 /**
  * Migrate
@@ -7,18 +8,17 @@ const logger       = require('../logger').migrate;
  * @see http://knexjs.org/#Schema
  *
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.up = function (knex/*, Promise*/) {
-
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('redirection_host', (table) => {
-		table.integer('forward_http_code').notNull().unsigned().defaultTo(302);
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.integer("forward_http_code").notNull().unsigned().defaultTo(302);
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 };
 
@@ -26,16 +26,18 @@ exports.up = function (knex/*, Promise*/) {
  * Undo Migrate
  *
  * @param   {Object} knex
- * @param   {Promise} Promise
  * @returns {Promise}
  */
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.table('redirection_host', (table) => {
-		table.dropColumn('forward_http_code');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] redirection_host Table altered');
+	return knex.schema
+		.table("redirection_host", (table) => {
+			table.dropColumn("forward_http_code");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] redirection_host Table altered`);
 		});
 };
+
+export { up, down };

+ 33 - 30
backend/migrations/20210423103500_stream_domain.js

@@ -1,40 +1,43 @@
-const migrate_name = 'stream_domain';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "stream_domain";
 
 /**
-	* Migrate
-	*
-	* @see http://knexjs.org/#Schema
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.up = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+ * Migrate
+ *
+ * @see http://knexjs.org/#Schema
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('stream', (table) => {
-		table.renameColumn('forward_ip', 'forwarding_host');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.renameColumn("forward_ip", "forwarding_host");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 };
 
 /**
-	* Undo Migrate
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.down = function (knex/*, Promise*/) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+ * Undo Migrate
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.table('stream', (table) => {
-		table.renameColumn('forwarding_host', 'forward_ip');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.renameColumn("forwarding_host", "forward_ip");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 };
+
+export { up, down };

+ 27 - 25
backend/migrations/20211108145214_regenerate_default_host.js

@@ -1,17 +1,19 @@
-const migrate_name  = 'stream_domain';
-const logger        = require('../logger').migrate;
-const internalNginx = require('../internal/nginx');
+import internalNginx from "../internal/nginx.js";
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "stream_domain";
 
 async function regenerateDefaultHost(knex) {
-	const row = await knex('setting').select('*').where('id', 'default-site').first();
+	const row = await knex("setting").select("*").where("id", "default-site").first();
 
 	if (!row) {
 		return Promise.resolve();
 	}
 
-	return internalNginx.deleteConfig('default')
+	return internalNginx
+		.deleteConfig("default")
 		.then(() => {
-			return internalNginx.generateConfig('default', row);
+			return internalNginx.generateConfig("default", row);
 		})
 		.then(() => {
 			return internalNginx.test();
@@ -22,29 +24,29 @@ async function regenerateDefaultHost(knex) {
 }
 
 /**
-	* Migrate
-	*
-	* @see http://knexjs.org/#Schema
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.up = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+ * Migrate
+ *
+ * @see http://knexjs.org/#Schema
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
 	return regenerateDefaultHost(knex);
 };
 
 /**
-	* Undo Migrate
-	*
-	* @param   {Object} knex
-	* @param   {Promise} Promise
-	* @returns {Promise}
-	*/
-exports.down = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+ * Undo Migrate
+ *
+ * @param   {Object} knex
+ * @returns {Promise}
+ */
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
 	return regenerateDefaultHost(knex);
-};
+};
+
+export { up, down };

+ 21 - 16
backend/migrations/20240427161436_stream_ssl.js

@@ -1,5 +1,6 @@
-const migrate_name = 'stream_ssl';
-const logger       = require('../logger').migrate;
+import { migrate as logger } from "../logger.js";
+
+const migrateName = "stream_ssl";
 
 /**
  * Migrate
@@ -9,14 +10,15 @@ const logger       = require('../logger').migrate;
  * @param   {Object} knex
  * @returns {Promise}
  */
-exports.up = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Up...');
+const up = (knex) => {
+	logger.info(`[${migrateName}] Migrating Up...`);
 
-	return knex.schema.table('stream', (table) => {
-		table.integer('certificate_id').notNull().unsigned().defaultTo(0);
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.integer("certificate_id").notNull().unsigned().defaultTo(0);
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 };
 
@@ -26,13 +28,16 @@ exports.up = function (knex) {
  * @param   {Object} knex
  * @returns {Promise}
  */
-exports.down = function (knex) {
-	logger.info('[' + migrate_name + '] Migrating Down...');
+const down = (knex) => {
+	logger.info(`[${migrateName}] Migrating Down...`);
 
-	return knex.schema.table('stream', (table) => {
-		table.dropColumn('certificate_id');
-	})
-		.then(function () {
-			logger.info('[' + migrate_name + '] stream Table altered');
+	return knex.schema
+		.table("stream", (table) => {
+			table.dropColumn("certificate_id");
+		})
+		.then(() => {
+			logger.info(`[${migrateName}] stream Table altered`);
 		});
 };
+
+export { up, down };

+ 51 - 56
backend/models/access_list.js

@@ -1,103 +1,98 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db               = require('../db');
-const helpers          = require('../lib/helpers');
-const Model            = require('objection').Model;
-const User             = require('./user');
-const AccessListAuth   = require('./access_list_auth');
-const AccessListClient = require('./access_list_client');
-const now              = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import AccessListAuth from "./access_list_auth.js";
+import AccessListClient from "./access_list_client.js";
+import now from "./now_helper.js";
+import ProxyHostModel from "./proxy_host.js";
+import User from "./user.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-	'satisfy_any',
-	'pass_auth',
-];
+const boolFields = ["is_deleted", "satisfy_any", "pass_auth"];
 
 class AccessList extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'AccessList';
+	static get name() {
+		return "AccessList";
 	}
 
-	static get tableName () {
-		return 'access_list';
+	static get tableName() {
+		return "access_list";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
-		const ProxyHost = require('./proxy_host');
-
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'access_list.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "access_list.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			items: {
-				relation:   Model.HasManyRelation,
+				relation: Model.HasManyRelation,
 				modelClass: AccessListAuth,
-				join:       {
-					from: 'access_list.id',
-					to:   'access_list_auth.access_list_id'
-				}
+				join: {
+					from: "access_list.id",
+					to: "access_list_auth.access_list_id",
+				},
 			},
 			clients: {
-				relation:   Model.HasManyRelation,
+				relation: Model.HasManyRelation,
 				modelClass: AccessListClient,
-				join:       {
-					from: 'access_list.id',
-					to:   'access_list_client.access_list_id'
-				}
+				join: {
+					from: "access_list.id",
+					to: "access_list_client.access_list_id",
+				},
 			},
 			proxy_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: ProxyHost,
-				join:       {
-					from: 'access_list.id',
-					to:   'proxy_host.access_list_id'
+				relation: Model.HasManyRelation,
+				modelClass: ProxyHostModel,
+				join: {
+					from: "access_list.id",
+					to: "proxy_host.access_list_id",
+				},
+				modify: (qb) => {
+					qb.where("proxy_host.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('proxy_host.is_deleted', 0);
-				}
-			}
+			},
 		};
 	}
 }
 
-module.exports = AccessList;
+export default AccessList;

+ 25 - 24
backend/models/access_list_auth.js

@@ -1,54 +1,55 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import accessListModel from "./access_list.js";
+import now from "./now_helper.js";
 
 Model.knex(db);
 
 class AccessListAuth extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
-	static get name () {
-		return 'AccessListAuth';
+	static get name() {
+		return "AccessListAuth";
 	}
 
-	static get tableName () {
-		return 'access_list_auth';
+	static get tableName() {
+		return "access_list_auth";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			access_list: {
-				relation:   Model.HasOneRelation,
-				modelClass: require('./access_list'),
-				join:       {
-					from: 'access_list_auth.access_list_id',
-					to:   'access_list.id'
+				relation: Model.HasOneRelation,
+				modelClass: accessListModel,
+				join: {
+					from: "access_list_auth.access_list_id",
+					to: "access_list.id",
 				},
-				modify: function (qb) {
-					qb.where('access_list.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("access_list.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = AccessListAuth;
+export default AccessListAuth;

+ 25 - 24
backend/models/access_list_client.js

@@ -1,54 +1,55 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import accessListModel from "./access_list.js";
+import now from "./now_helper.js";
 
 Model.knex(db);
 
 class AccessListClient extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
-	static get name () {
-		return 'AccessListClient';
+	static get name() {
+		return "AccessListClient";
 	}
 
-	static get tableName () {
-		return 'access_list_client';
+	static get tableName() {
+		return "access_list_client";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			access_list: {
-				relation:   Model.HasOneRelation,
-				modelClass: require('./access_list'),
-				join:       {
-					from: 'access_list_client.access_list_id',
-					to:   'access_list.id'
+				relation: Model.HasOneRelation,
+				modelClass: accessListModel,
+				join: {
+					from: "access_list_client.access_list_id",
+					to: "access_list.id",
 				},
-				modify: function (qb) {
-					qb.where('access_list.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("access_list.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = AccessListClient;
+export default AccessListClient;

+ 22 - 22
backend/models/audit-log.js

@@ -1,52 +1,52 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const User  = require('./user');
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
 class AuditLog extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
-	static get name () {
-		return 'AuditLog';
+	static get name() {
+		return "AuditLog";
 	}
 
-	static get tableName () {
-		return 'audit_log';
+	static get tableName() {
+		return "audit_log";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			user: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'audit_log.user_id',
-					to:   'user.id'
-				}
-			}
+				join: {
+					from: "audit_log.user_id",
+					to: "user.id",
+				},
+			},
 		};
 	}
 }
 
-module.exports = AuditLog;
+export default AuditLog;

+ 36 - 42
backend/models/auth.js

@@ -1,59 +1,53 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const bcrypt  = require('bcrypt');
-const db      = require('../db');
-const helpers = require('../lib/helpers');
-const Model   = require('objection').Model;
-const User    = require('./user');
-const now     = require('./now_helper');
+import bcrypt from "bcrypt";
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-];
+const boolFields = ["is_deleted"];
 
-function encryptPassword () {
-	/* jshint -W040 */
-	let _this = this;
-
-	if (_this.type === 'password' && _this.secret) {
-		return bcrypt.hash(_this.secret, 13)
-			.then(function (hash) {
-				_this.secret = hash;
-			});
+function encryptPassword() {
+	if (this.type === "password" && this.secret) {
+		return bcrypt.hash(this.secret, 13).then((hash) => {
+			this.secret = hash;
+		});
 	}
 
 	return null;
 }
 
 class Auth extends Model {
-	$beforeInsert (queryContext) {
-		this.created_on  = now();
+	$beforeInsert(queryContext) {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 
 		return encryptPassword.apply(this, queryContext);
 	}
 
-	$beforeUpdate (queryContext) {
+	$beforeUpdate(queryContext) {
 		this.modified_on = now();
 		return encryptPassword.apply(this, queryContext);
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
 	/**
@@ -62,37 +56,37 @@ class Auth extends Model {
 	 * @param {String} password
 	 * @returns {Promise}
 	 */
-	verifyPassword (password) {
+	verifyPassword(password) {
 		return bcrypt.compare(password, this.secret);
 	}
 
-	static get name () {
-		return 'Auth';
+	static get name() {
+		return "Auth";
 	}
 
-	static get tableName () {
-		return 'auth';
+	static get tableName() {
+		return "auth";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			user: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'auth.user_id',
-					to:   'user.id'
+				join: {
+					from: "auth.user_id",
+					to: "user.id",
 				},
 				filter: {
-					is_deleted: 0
-				}
-			}
+					is_deleted: 0,
+				},
+			},
 		};
 	}
 }
 
-module.exports = Auth;
+export default Auth;

+ 61 - 64
backend/models/certificate.js

@@ -1,124 +1,121 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db      = require('../db');
-const helpers = require('../lib/helpers');
-const Model   = require('objection').Model;
-const now     = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import deadHostModel from "./dead_host.js";
+import now from "./now_helper.js";
+import proxyHostModel from "./proxy_host.js";
+import redirectionHostModel from "./redirection_host.js";
+import userModel from "./user.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-];
+const boolFields = ["is_deleted"];
 
 class Certificate extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for expires_on
-		if (typeof this.expires_on === 'undefined') {
+		if (typeof this.expires_on === "undefined") {
 			this.expires_on = now();
 		}
 
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 		}
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 
 		this.domain_names.sort();
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 		}
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'Certificate';
+	static get name() {
+		return "Certificate";
 	}
 
-	static get tableName () {
-		return 'certificate';
+	static get tableName() {
+		return "certificate";
 	}
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta"];
 	}
 
-	static get relationMappings () {
-		const ProxyHost       = require('./proxy_host');
-		const DeadHost        = require('./dead_host');
-		const User            = require('./user');
-		const RedirectionHost = require('./redirection_host');
-
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
-				modelClass: User,
-				join:       {
-					from: 'certificate.owner_user_id',
-					to:   'user.id'
+				relation: Model.HasOneRelation,
+				modelClass: userModel,
+				join: {
+					from: "certificate.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			proxy_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: ProxyHost,
-				join:       {
-					from: 'certificate.id',
-					to:   'proxy_host.certificate_id'
+				relation: Model.HasManyRelation,
+				modelClass: proxyHostModel,
+				join: {
+					from: "certificate.id",
+					to: "proxy_host.certificate_id",
+				},
+				modify: (qb) => {
+					qb.where("proxy_host.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('proxy_host.is_deleted', 0);
-				}
 			},
 			dead_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: DeadHost,
-				join:       {
-					from: 'certificate.id',
-					to:   'dead_host.certificate_id'
+				relation: Model.HasManyRelation,
+				modelClass: deadHostModel,
+				join: {
+					from: "certificate.id",
+					to: "dead_host.certificate_id",
+				},
+				modify: (qb) => {
+					qb.where("dead_host.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('dead_host.is_deleted', 0);
-				}
 			},
 			redirection_hosts: {
-				relation:   Model.HasManyRelation,
-				modelClass: RedirectionHost,
-				join:       {
-					from: 'certificate.id',
-					to:   'redirection_host.certificate_id'
+				relation: Model.HasManyRelation,
+				modelClass: redirectionHostModel,
+				join: {
+					from: "certificate.id",
+					to: "redirection_host.certificate_id",
+				},
+				modify: (qb) => {
+					qb.where("redirection_host.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('redirection_host.is_deleted', 0);
-				}
-			}
+			},
 		};
 	}
 }
 
-module.exports = Certificate;
+export default Certificate;

+ 40 - 47
backend/models/dead_host.js

@@ -1,99 +1,92 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const Model       = require('objection').Model;
-const User        = require('./user');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-	'ssl_forced',
-	'http2_support',
-	'enabled',
-	'hsts_enabled',
-	'hsts_subdomains',
-];
+const boolFields = ["is_deleted", "ssl_forced", "http2_support", "enabled", "hsts_enabled", "hsts_subdomains"];
 
 class DeadHost extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 		}
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 
 		this.domain_names.sort();
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 		}
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'DeadHost';
+	static get name() {
+		return "DeadHost";
 	}
 
-	static get tableName () {
-		return 'dead_host';
+	static get tableName() {
+		return "dead_host";
 	}
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'dead_host.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "dead_host.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
-				join:       {
-					from: 'dead_host.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "dead_host.certificate_id",
+					to: "certificate.id",
+				},
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+			},
 		};
 	}
 }
 
-module.exports = DeadHost;
+export default DeadHost;

+ 6 - 7
backend/models/now_helper.js

@@ -1,13 +1,12 @@
-const db     = require('../db');
-const config = require('../lib/config');
-const Model  = require('objection').Model;
+import { Model } from "objection";
+import db from "../db.js";
+import { isSqlite } from "../lib/config.js";
 
 Model.knex(db);
 
-module.exports = function () {
-	if (config.isSqlite()) {
-		// eslint-disable-next-line
+export default () => {
+	if (isSqlite()) {
 		return Model.raw("datetime('now','localtime')");
 	}
-	return Model.raw('NOW()');
+	return Model.raw("NOW()");
 };

+ 56 - 56
backend/models/proxy_host.js

@@ -1,114 +1,114 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const Model       = require('objection').Model;
-const User        = require('./user');
-const AccessList  = require('./access_list');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import AccessList from "./access_list.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
 const boolFields = [
-	'is_deleted',
-	'ssl_forced',
-	'caching_enabled',
-	'block_exploits',
-	'allow_websocket_upgrade',
-	'http2_support',
-	'enabled',
-	'hsts_enabled',
-	'hsts_subdomains',
+	"is_deleted",
+	"ssl_forced",
+	"caching_enabled",
+	"block_exploits",
+	"allow_websocket_upgrade",
+	"http2_support",
+	"enabled",
+	"hsts_enabled",
+	"hsts_subdomains",
 ];
 
 class ProxyHost extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 		}
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 
 		this.domain_names.sort();
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 		}
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'ProxyHost';
+	static get name() {
+		return "ProxyHost";
 	}
 
-	static get tableName () {
-		return 'proxy_host';
+	static get tableName() {
+		return "proxy_host";
 	}
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta', 'locations'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta", "locations"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'proxy_host.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "proxy_host.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			access_list: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: AccessList,
-				join:       {
-					from: 'proxy_host.access_list_id',
-					to:   'access_list.id'
+				join: {
+					from: "proxy_host.access_list_id",
+					to: "access_list.id",
+				},
+				modify: (qb) => {
+					qb.where("access_list.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('access_list.is_deleted', 0);
-				}
 			},
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
-				join:       {
-					from: 'proxy_host.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "proxy_host.certificate_id",
+					to: "certificate.id",
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = ProxyHost;
+export default ProxyHost;

+ 47 - 48
backend/models/redirection_host.js

@@ -1,102 +1,101 @@
-
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const Model       = require('objection').Model;
-const User        = require('./user');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
 const boolFields = [
-	'is_deleted',
-	'enabled',
-	'preserve_path',
-	'ssl_forced',
-	'block_exploits',
-	'hsts_enabled',
-	'hsts_subdomains',
-	'http2_support',
+	"is_deleted",
+	"enabled",
+	"preserve_path",
+	"ssl_forced",
+	"block_exploits",
+	"hsts_enabled",
+	"hsts_subdomains",
+	"http2_support",
 ];
 
 class RedirectionHost extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for domain_names
-		if (typeof this.domain_names === 'undefined') {
+		if (typeof this.domain_names === "undefined") {
 			this.domain_names = [];
 		}
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 
 		this.domain_names.sort();
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 
 		// Sort domain_names
-		if (typeof this.domain_names !== 'undefined') {
+		if (typeof this.domain_names !== "undefined") {
 			this.domain_names.sort();
 		}
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'RedirectionHost';
+	static get name() {
+		return "RedirectionHost";
 	}
 
-	static get tableName () {
-		return 'redirection_host';
+	static get tableName() {
+		return "redirection_host";
 	}
 
-	static get jsonAttributes () {
-		return ['domain_names', 'meta'];
+	static get jsonAttributes() {
+		return ["domain_names", "meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'redirection_host.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "redirection_host.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
-				join:       {
-					from: 'redirection_host.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "redirection_host.certificate_id",
+					to: "certificate.id",
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
+				},
+			},
 		};
 	}
 }
 
-module.exports = RedirectionHost;
+export default RedirectionHost;

+ 3 - 3
backend/models/setting.js

@@ -1,8 +1,8 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db    = require('../db');
-const Model = require('objection').Model;
+import { Model } from "objection";
+import db from "../db.js";
 
 Model.knex(db);
 
@@ -27,4 +27,4 @@ class Setting extends Model {
 	}
 }
 
-module.exports = Setting;
+export default Setting;

+ 38 - 43
backend/models/stream.js

@@ -1,82 +1,77 @@
-const Model       = require('objection').Model;
-const db          = require('../db');
-const helpers     = require('../lib/helpers');
-const User        = require('./user');
-const Certificate = require('./certificate');
-const now         = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import Certificate from "./certificate.js";
+import now from "./now_helper.js";
+import User from "./user.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-	'enabled',
-	'tcp_forwarding',
-	'udp_forwarding',
-];
+const boolFields = ["is_deleted", "enabled", "tcp_forwarding", "udp_forwarding"];
 
 class Stream extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for meta
-		if (typeof this.meta === 'undefined') {
+		if (typeof this.meta === "undefined") {
 			this.meta = {};
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'Stream';
+	static get name() {
+		return "Stream";
 	}
 
-	static get tableName () {
-		return 'stream';
+	static get tableName() {
+		return "stream";
 	}
 
-	static get jsonAttributes () {
-		return ['meta'];
+	static get jsonAttributes() {
+		return ["meta"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			owner: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: User,
-				join:       {
-					from: 'stream.owner_user_id',
-					to:   'user.id'
+				join: {
+					from: "stream.owner_user_id",
+					to: "user.id",
+				},
+				modify: (qb) => {
+					qb.where("user.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('user.is_deleted', 0);
-				}
 			},
 			certificate: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: Certificate,
-				join:       {
-					from: 'stream.certificate_id',
-					to:   'certificate.id'
+				join: {
+					from: "stream.certificate_id",
+					to: "certificate.id",
+				},
+				modify: (qb) => {
+					qb.where("certificate.is_deleted", 0);
 				},
-				modify: function (qb) {
-					qb.where('certificate.is_deleted', 0);
-				}
-			}
+			},
 		};
 	}
 }
 
-module.exports = Stream;
+export default Stream;

+ 52 - 51
backend/models/token.js

@@ -3,16 +3,16 @@
  and then has abilities after that.
  */
 
-const _      = require('lodash');
-const jwt    = require('jsonwebtoken');
-const crypto = require('crypto');
-const config = require('../lib/config');
-const error  = require('../lib/error');
-const logger = require('../logger').global;
-const ALGO   = 'RS256';
+import crypto from "node:crypto";
+import jwt from "jsonwebtoken";
+import _ from "lodash";
+import { getPrivateKey, getPublicKey } from "../lib/config.js";
+import errs from "../lib/error.js";
+import { global as logger } from "../logger.js";
 
-module.exports = function () {
+const ALGO = "RS256";
 
+export default () => {
 	let token_data = {};
 
 	const self = {
@@ -21,28 +21,26 @@ module.exports = function () {
 		 * @returns {Promise}
 		 */
 		create: (payload) => {
-			if (!config.getPrivateKey()) {
-				logger.error('Private key is empty!');
+			if (!getPrivateKey()) {
+				logger.error("Private key is empty!");
 			}
 			// sign with RSA SHA256
 			const options = {
 				algorithm: ALGO,
-				expiresIn: payload.expiresIn || '1d'
+				expiresIn: payload.expiresIn || "1d",
 			};
 
-			payload.jti = crypto.randomBytes(12)
-				.toString('base64')
-				.substring(-8);
+			payload.jti = crypto.randomBytes(12).toString("base64").substring(-8);
 
 			return new Promise((resolve, reject) => {
-				jwt.sign(payload, config.getPrivateKey(), options, (err, token) => {
+				jwt.sign(payload, getPrivateKey(), options, (err, token) => {
 					if (err) {
 						reject(err);
 					} else {
 						token_data = payload;
 						resolve({
-							token:   token,
-							payload: payload
+							token: token,
+							payload: payload,
 						});
 					}
 				});
@@ -53,42 +51,47 @@ module.exports = function () {
 		 * @param {String} token
 		 * @returns {Promise}
 		 */
-		load: function (token) {
-			if (!config.getPublicKey()) {
-				logger.error('Public key is empty!');
+		load: (token) => {
+			if (!getPublicKey()) {
+				logger.error("Public key is empty!");
 			}
 			return new Promise((resolve, reject) => {
 				try {
-					if (!token || token === null || token === 'null') {
-						reject(new error.AuthError('Empty token'));
+					if (!token || token === null || token === "null") {
+						reject(new errs.AuthError("Empty token"));
 					} else {
-						jwt.verify(token, config.getPublicKey(), {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => {
-							if (err) {
-
-								if (err.name === 'TokenExpiredError') {
-									reject(new error.AuthError('Token has expired', err));
+						jwt.verify(
+							token,
+							getPublicKey(),
+							{ ignoreExpiration: false, algorithms: [ALGO] },
+							(err, result) => {
+								if (err) {
+									if (err.name === "TokenExpiredError") {
+										reject(new errs.AuthError("Token has expired", err));
+									} else {
+										reject(err);
+									}
 								} else {
-									reject(err);
+									token_data = result;
+
+									// Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
+									// For 30 days at least, we need to replace 'all' with user.
+									if (
+										typeof token_data.scope !== "undefined" &&
+										_.indexOf(token_data.scope, "all") !== -1
+									) {
+										token_data.scope = ["user"];
+									}
+
+									resolve(token_data);
 								}
-
-							} else {
-								token_data = result;
-
-								// Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
-								// For 30 days at least, we need to replace 'all' with user.
-								if ((typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'all') !== -1)) {
-									token_data.scope = ['user'];
-								}
-
-								resolve(token_data);
-							}
-						});
+							},
+						);
 					}
 				} catch (err) {
 					reject(err);
 				}
 			});
-
 		},
 
 		/**
@@ -97,16 +100,14 @@ module.exports = function () {
 		 * @param   {String}  scope
 		 * @returns {Boolean}
 		 */
-		hasScope: function (scope) {
-			return typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, scope) !== -1;
-		},
+		hasScope: (scope) => typeof token_data.scope !== "undefined" && _.indexOf(token_data.scope, scope) !== -1,
 
 		/**
 		 * @param  {String}  key
 		 * @return {*}
 		 */
-		get: function (key) {
-			if (typeof token_data[key] !== 'undefined') {
+		get: (key) => {
+			if (typeof token_data[key] !== "undefined") {
 				return token_data[key];
 			}
 
@@ -117,7 +118,7 @@ module.exports = function () {
 		 * @param  {String}  key
 		 * @param  {*}       value
 		 */
-		set: function (key, value) {
+		set: (key, value) => {
 			token_data[key] = value;
 		},
 
@@ -126,13 +127,13 @@ module.exports = function () {
 		 * @returns {Integer}
 		 */
 		getUserId: (default_value) => {
-			const attrs = self.get('attrs');
-			if (attrs && typeof attrs.id !== 'undefined' && attrs.id) {
+			const attrs = self.get("attrs");
+			if (attrs && typeof attrs.id !== "undefined" && attrs.id) {
 				return attrs.id;
 			}
 
 			return default_value || 0;
-		}
+		},
 	};
 
 	return self;

+ 28 - 32
backend/models/user.js

@@ -1,69 +1,65 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db             = require('../db');
-const helpers        = require('../lib/helpers');
-const Model          = require('objection').Model;
-const UserPermission = require('./user_permission');
-const now            = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.js";
+import now from "./now_helper.js";
+import UserPermission from "./user_permission.js";
 
 Model.knex(db);
 
-const boolFields = [
-	'is_deleted',
-	'is_disabled',
-];
+const boolFields = ["is_deleted", "is_disabled"];
 
 class User extends Model {
-	$beforeInsert () {
-		this.created_on  = now();
+	$beforeInsert() {
+		this.created_on = now();
 		this.modified_on = now();
 
 		// Default for roles
-		if (typeof this.roles === 'undefined') {
+		if (typeof this.roles === "undefined") {
 			this.roles = [];
 		}
 	}
 
-	$beforeUpdate () {
+	$beforeUpdate() {
 		this.modified_on = now();
 	}
 
 	$parseDatabaseJson(json) {
-		json = super.$parseDatabaseJson(json);
-		return helpers.convertIntFieldsToBool(json, boolFields);
+		const thisJson = super.$parseDatabaseJson(json);
+		return convertIntFieldsToBool(thisJson, boolFields);
 	}
 
 	$formatDatabaseJson(json) {
-		json = helpers.convertBoolFieldsToInt(json, boolFields);
-		return super.$formatDatabaseJson(json);
+		const thisJson = convertBoolFieldsToInt(json, boolFields);
+		return super.$formatDatabaseJson(thisJson);
 	}
 
-	static get name () {
-		return 'User';
+	static get name() {
+		return "User";
 	}
 
-	static get tableName () {
-		return 'user';
+	static get tableName() {
+		return "user";
 	}
 
-	static get jsonAttributes () {
-		return ['roles'];
+	static get jsonAttributes() {
+		return ["roles"];
 	}
 
-	static get relationMappings () {
+	static get relationMappings() {
 		return {
 			permissions: {
-				relation:   Model.HasOneRelation,
+				relation: Model.HasOneRelation,
 				modelClass: UserPermission,
-				join:       {
-					from: 'user.id',
-					to:   'user_permission.user_id'
-				}
-			}
+				join: {
+					from: "user.id",
+					to: "user_permission.user_id",
+				},
+			},
 		};
 	}
-
 }
 
-module.exports = User;
+export default User;

+ 4 - 4
backend/models/user_permission.js

@@ -1,9 +1,9 @@
 // Objection Docs:
 // http://vincit.github.io/objection.js/
 
-const db    = require('../db');
-const Model = require('objection').Model;
-const now   = require('./now_helper');
+import { Model } from "objection";
+import db from "../db.js";
+import now from "./now_helper.js";
 
 Model.knex(db);
 
@@ -26,4 +26,4 @@ class UserPermission extends Model {
 	}
 }
 
-module.exports = UserPermission;
+export default UserPermission;

+ 14 - 13
backend/package.json

@@ -1,8 +1,16 @@
 {
 	"name": "nginx-proxy-manager",
-	"version": "0.0.0",
+	"version": "2.0.0",
 	"description": "A beautiful interface for creating Nginx endpoints",
+	"author": "Jamie Curnow <[email protected]>",
+	"license": "MIT",
 	"main": "index.js",
+	"type": "module",
+	"scripts": {
+		"lint": "biome lint",
+		"prettier": "biome format --write .",
+		"validate-schema": "node validate-schema.js"
+	},
 	"dependencies": {
 		"@apidevtools/json-schema-ref-parser": "^11.7.0",
 		"ajv": "^8.17.1",
@@ -28,21 +36,14 @@
 		"sqlite3": "5.1.6",
 		"temp-write": "^4.0.0"
 	},
-	"signale": {
-		"displayDate": true,
-		"displayTimestamp": true
-	},
-	"author": "Jamie Curnow <[email protected]>",
-	"license": "MIT",
 	"devDependencies": {
 		"@apidevtools/swagger-parser": "^10.1.0",
+		"@biomejs/biome": "2.2.0",
 		"chalk": "4.1.2",
-		"eslint": "^8.36.0",
-		"eslint-plugin-align-assignments": "^1.1.2",
-		"nodemon": "^2.0.2",
-		"prettier": "^2.0.4"
+		"nodemon": "^2.0.2"
 	},
-	"scripts": {
-		"validate-schema": "node validate-schema.js"
+	"signale": {
+		"displayDate": true,
+		"displayTimestamp": true
 	}
 }

+ 26 - 24
backend/routes/audit-log.js

@@ -1,19 +1,19 @@
-const express          = require('express');
-const validator        = require('../lib/validator');
-const jwtdecode        = require('../lib/express/jwt-decode');
-const internalAuditLog = require('../internal/audit-log');
+import express from "express";
+import internalAuditLog from "../internal/audit-log.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import validator from "../lib/validator/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/audit-log
  */
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -25,28 +25,30 @@ router
 	 * Retrieve all logs
 	 */
 	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+		validator(
+			{
+				additionalProperties: false,
+				properties: {
+					expand: {
+						$ref: "common#/properties/expand",
+					},
+					query: {
+						$ref: "common#/properties/query",
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
+			},
+			{
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				query: typeof req.query.query === "string" ? req.query.query : null,
+			},
+		)
 			.then((data) => {
 				return internalAuditLog.getAll(res.locals.access, data.expand, data.query);
 			})
 			.then((rows) => {
-				res.status(200)
-					.send(rows);
+				res.status(200).send(rows);
 			})
 			.catch(next);
 	});
 
-module.exports = router;
+export default router;

+ 41 - 29
backend/routes/main.js

@@ -1,51 +1,63 @@
-const express = require('express');
-const pjson   = require('../package.json');
-const error   = require('../lib/error');
+import express from "express";
+import errs from "../lib/error.js";
+import pjson from "../package.json" with { type: "json" };
+import auditLogRoutes from "./audit-log.js";
+import accessListsRoutes from "./nginx/access_lists.js";
+import certificatesHostsRoutes from "./nginx/certificates.js";
+import deadHostsRoutes from "./nginx/dead_hosts.js";
+import proxyHostsRoutes from "./nginx/proxy_hosts.js";
+import redirectionHostsRoutes from "./nginx/redirection_hosts.js";
+import streamsRoutes from "./nginx/streams.js";
+import reportsRoutes from "./reports.js";
+import schemaRoutes from "./schema.js";
+import settingsRoutes from "./settings.js";
+import tokensRoutes from "./tokens.js";
+import usersRoutes from "./users.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * Health Check
  * GET /api
  */
-router.get('/', (req, res/*, next*/) => {
-	let version = pjson.version.split('-').shift().split('.');
+router.get("/", (_, res /*, next*/) => {
+	const version = pjson.version.split("-").shift().split(".");
 
 	res.status(200).send({
-		status:  'OK',
+		status: "OK",
 		version: {
-			major:    parseInt(version.shift(), 10),
-			minor:    parseInt(version.shift(), 10),
-			revision: parseInt(version.shift(), 10)
-		}
+			major: Number.parseInt(version.shift(), 10),
+			minor: Number.parseInt(version.shift(), 10),
+			revision: Number.parseInt(version.shift(), 10),
+		},
 	});
 });
 
-router.use('/schema', require('./schema'));
-router.use('/tokens', require('./tokens'));
-router.use('/users', require('./users'));
-router.use('/audit-log', require('./audit-log'));
-router.use('/reports', require('./reports'));
-router.use('/settings', require('./settings'));
-router.use('/nginx/proxy-hosts', require('./nginx/proxy_hosts'));
-router.use('/nginx/redirection-hosts', require('./nginx/redirection_hosts'));
-router.use('/nginx/dead-hosts', require('./nginx/dead_hosts'));
-router.use('/nginx/streams', require('./nginx/streams'));
-router.use('/nginx/access-lists', require('./nginx/access_lists'));
-router.use('/nginx/certificates', require('./nginx/certificates'));
+router.use("/schema", schemaRoutes);
+router.use("/tokens", tokensRoutes);
+router.use("/users", usersRoutes);
+router.use("/audit-log", auditLogRoutes);
+router.use("/reports", reportsRoutes);
+router.use("/settings", settingsRoutes);
+router.use("/nginx/proxy-hosts", proxyHostsRoutes);
+router.use("/nginx/redirection-hosts", redirectionHostsRoutes);
+router.use("/nginx/dead-hosts", deadHostsRoutes);
+router.use("/nginx/streams", streamsRoutes);
+router.use("/nginx/access-lists", accessListsRoutes);
+router.use("/nginx/certificates", certificatesHostsRoutes);
 
 /**
  * API 404 for all other routes
  *
  * ALL /api/*
  */
-router.all(/(.+)/, function (req, _, next) {
-	req.params.page = req.params['0'];
-	next(new error.ItemNotFoundError(req.params.page));
+router.all(/(.+)/, (req, _, next) => {
+	req.params.page = req.params["0"];
+	next(new errs.ItemNotFoundError(req.params.page));
 });
 
-module.exports = router;
+export default router;

+ 58 - 56
backend/routes/nginx/access_lists.js

@@ -1,22 +1,22 @@
-const express            = require('express');
-const validator          = require('../../lib/validator');
-const jwtdecode          = require('../../lib/express/jwt-decode');
-const apiValidator       = require('../../lib/validator/api');
-const internalAccessList = require('../../internal/access-list');
-const schema             = require('../../schema');
+import express from "express";
+import internalAccessList from "../../internal/access-list.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { getValidationSchema } from "../../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/access-lists
  */
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -27,26 +27,28 @@ router
 	 * Retrieve all access-lists
 	 */
 	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+		validator(
+			{
+				additionalProperties: false,
+				properties: {
+					expand: {
+						$ref: "common#/properties/expand",
+					},
+					query: {
+						$ref: "common#/properties/query",
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
+			},
+			{
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				query: typeof req.query.query === "string" ? req.query.query : null,
+			},
+		)
 			.then((data) => {
 				return internalAccessList.getAll(res.locals.access, data.expand, data.query);
 			})
 			.then((rows) => {
-				res.status(200)
-					.send(rows);
+				res.status(200).send(rows);
 			})
 			.catch(next);
 	})
@@ -57,13 +59,12 @@ router
 	 * Create a new access-list
 	 */
 	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/access-lists', 'post'), req.body)
+		apiValidator(getValidationSchema("/nginx/access-lists", "post"), req.body)
 			.then((payload) => {
 				return internalAccessList.create(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(201)
-					.send(result);
+				res.status(201).send(result);
 			})
 			.catch(next);
 	});
@@ -74,7 +75,7 @@ router
  * /api/nginx/access-lists/123
  */
 router
-	.route('/:list_id')
+	.route("/:list_id")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -86,30 +87,32 @@ router
 	 * Retrieve a specific access-list
 	 */
 	.get((req, res, next) => {
-		validator({
-			required:             ['list_id'],
-			additionalProperties: false,
-			properties:           {
-				list_id: {
-					$ref: 'common#/properties/id'
+		validator(
+			{
+				required: ["list_id"],
+				additionalProperties: false,
+				properties: {
+					list_id: {
+						$ref: "common#/properties/id",
+					},
+					expand: {
+						$ref: "common#/properties/expand",
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			list_id: req.params.list_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
+			},
+			{
+				list_id: req.params.list_id,
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+			},
+		)
 			.then((data) => {
 				return internalAccessList.get(res.locals.access, {
-					id:     parseInt(data.list_id, 10),
-					expand: data.expand
+					id: Number.parseInt(data.list_id, 10),
+					expand: data.expand,
 				});
 			})
 			.then((row) => {
-				res.status(200)
-					.send(row);
+				res.status(200).send(row);
 			})
 			.catch(next);
 	})
@@ -120,14 +123,13 @@ router
 	 * Update and existing access-list
 	 */
 	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/access-lists/{listID}', 'put'), req.body)
+		apiValidator(getValidationSchema("/nginx/access-lists/{listID}", "put"), req.body)
 			.then((payload) => {
-				payload.id = parseInt(req.params.list_id, 10);
+				payload.id = Number.parseInt(req.params.list_id, 10);
 				return internalAccessList.update(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	})
@@ -138,12 +140,12 @@ router
 	 * Delete and existing access-list
 	 */
 	.delete((req, res, next) => {
-		internalAccessList.delete(res.locals.access, {id: parseInt(req.params.list_id, 10)})
+		internalAccessList
+			.delete(res.locals.access, { id: Number.parseInt(req.params.list_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
 
-module.exports = router;
+export default router;

+ 86 - 85
backend/routes/nginx/certificates.js

@@ -1,22 +1,22 @@
-const express             = require('express');
-const error               = require('../../lib/error');
-const validator           = require('../../lib/validator');
-const jwtdecode           = require('../../lib/express/jwt-decode');
-const apiValidator        = require('../../lib/validator/api');
-const internalCertificate = require('../../internal/certificate');
-const schema              = require('../../schema');
+import express from "express";
+import internalCertificate from "../../internal/certificate.js";
+import errs from "../../lib/error.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { getValidationSchema } from "../../schema/index.js";
 
 const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/certificates
  */
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -28,26 +28,28 @@ router
 	 * Retrieve all certificates
 	 */
 	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+		validator(
+			{
+				additionalProperties: false,
+				properties: {
+					expand: {
+						$ref: "common#/properties/expand",
+					},
+					query: {
+						$ref: "common#/properties/query",
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
+			},
+			{
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				query: typeof req.query.query === "string" ? req.query.query : null,
+			},
+		)
 			.then((data) => {
 				return internalCertificate.getAll(res.locals.access, data.expand, data.query);
 			})
 			.then((rows) => {
-				res.status(200)
-					.send(rows);
+				res.status(200).send(rows);
 			})
 			.catch(next);
 	})
@@ -58,14 +60,13 @@ router
 	 * Create a new certificate
 	 */
 	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/certificates', 'post'), req.body)
+		apiValidator(getValidationSchema("/nginx/certificates", "post"), req.body)
 			.then((payload) => {
 				req.setTimeout(900000); // 15 minutes timeout
 				return internalCertificate.create(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(201)
-					.send(result);
+				res.status(201).send(result);
 			})
 			.catch(next);
 	});
@@ -76,7 +77,7 @@ router
  * /api/nginx/certificates/test-http
  */
 router
-	.route('/test-http')
+	.route("/test-http")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -89,14 +90,14 @@ router
 	 */
 	.get((req, res, next) => {
 		if (req.query.domains === undefined) {
-			next(new error.ValidationError('Domains are required as query parameters'));
+			next(new errs.ValidationError("Domains are required as query parameters"));
 			return;
 		}
 
-		internalCertificate.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
+		internalCertificate
+			.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -107,7 +108,7 @@ router
  * /api/nginx/certificates/123
  */
 router
-	.route('/:certificate_id')
+	.route("/:certificate_id")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -119,30 +120,32 @@ router
 	 * Retrieve a specific certificate
 	 */
 	.get((req, res, next) => {
-		validator({
-			required:             ['certificate_id'],
-			additionalProperties: false,
-			properties:           {
-				certificate_id: {
-					$ref: 'common#/properties/id'
+		validator(
+			{
+				required: ["certificate_id"],
+				additionalProperties: false,
+				properties: {
+					certificate_id: {
+						$ref: "common#/properties/id",
+					},
+					expand: {
+						$ref: "common#/properties/expand",
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			certificate_id: req.params.certificate_id,
-			expand:         (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
+			},
+			{
+				certificate_id: req.params.certificate_id,
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+			},
+		)
 			.then((data) => {
 				return internalCertificate.get(res.locals.access, {
-					id:     parseInt(data.certificate_id, 10),
-					expand: data.expand
+					id: Number.parseInt(data.certificate_id, 10),
+					expand: data.expand,
 				});
 			})
 			.then((row) => {
-				res.status(200)
-					.send(row);
+				res.status(200).send(row);
 			})
 			.catch(next);
 	})
@@ -153,10 +156,10 @@ router
 	 * Update and existing certificate
 	 */
 	.delete((req, res, next) => {
-		internalCertificate.delete(res.locals.access, {id: parseInt(req.params.certificate_id, 10)})
+		internalCertificate
+			.delete(res.locals.access, { id: Number.parseInt(req.params.certificate_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -167,7 +170,7 @@ router
  * /api/nginx/certificates/123/upload
  */
 router
-	.route('/:certificate_id/upload')
+	.route("/:certificate_id/upload")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -180,16 +183,15 @@ router
 	 */
 	.post((req, res, next) => {
 		if (!req.files) {
-			res.status(400)
-				.send({error: 'No files were uploaded'});
+			res.status(400).send({ error: "No files were uploaded" });
 		} else {
-			internalCertificate.upload(res.locals.access, {
-				id:    parseInt(req.params.certificate_id, 10),
-				files: req.files
-			})
+			internalCertificate
+				.upload(res.locals.access, {
+					id: Number.parseInt(req.params.certificate_id, 10),
+					files: req.files,
+				})
 				.then((result) => {
-					res.status(200)
-						.send(result);
+					res.status(200).send(result);
 				})
 				.catch(next);
 		}
@@ -201,7 +203,7 @@ router
  * /api/nginx/certificates/123/renew
  */
 router
-	.route('/:certificate_id/renew')
+	.route("/:certificate_id/renew")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -214,12 +216,12 @@ router
 	 */
 	.post((req, res, next) => {
 		req.setTimeout(900000); // 15 minutes timeout
-		internalCertificate.renew(res.locals.access, {
-			id: parseInt(req.params.certificate_id, 10)
-		})
+		internalCertificate
+			.renew(res.locals.access, {
+				id: Number.parseInt(req.params.certificate_id, 10),
+			})
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -230,7 +232,7 @@ router
  * /api/nginx/certificates/123/download
  */
 router
-	.route('/:certificate_id/download')
+	.route("/:certificate_id/download")
 	.options((_req, res) => {
 		res.sendStatus(204);
 	})
@@ -242,12 +244,12 @@ router
 	 * Renew certificate
 	 */
 	.get((req, res, next) => {
-		internalCertificate.download(res.locals.access, {
-			id: parseInt(req.params.certificate_id, 10)
-		})
+		internalCertificate
+			.download(res.locals.access, {
+				id: Number.parseInt(req.params.certificate_id, 10),
+			})
 			.then((result) => {
-				res.status(200)
-					.download(result.fileName);
+				res.status(200).download(result.fileName);
 			})
 			.catch(next);
 	});
@@ -258,7 +260,7 @@ router
  * /api/nginx/certificates/validate
  */
 router
-	.route('/validate')
+	.route("/validate")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -271,18 +273,17 @@ router
 	 */
 	.post((req, res, next) => {
 		if (!req.files) {
-			res.status(400)
-				.send({error: 'No files were uploaded'});
+			res.status(400).send({ error: "No files were uploaded" });
 		} else {
-			internalCertificate.validate({
-				files: req.files
-			})
+			internalCertificate
+				.validate({
+					files: req.files,
+				})
 				.then((result) => {
-					res.status(200)
-						.send(result);
+					res.status(200).send(result);
 				})
 				.catch(next);
 		}
 	});
 
-module.exports = router;
+export default router;

+ 66 - 64
backend/routes/nginx/dead_hosts.js

@@ -1,21 +1,21 @@
-const express          = require('express');
-const validator        = require('../../lib/validator');
-const jwtdecode        = require('../../lib/express/jwt-decode');
-const apiValidator     = require('../../lib/validator/api');
-const internalDeadHost = require('../../internal/dead-host');
-const schema           = require('../../schema');
+import express from "express";
+import internalDeadHost from "../../internal/dead-host.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { getValidationSchema } from "../../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/dead-hosts
  */
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -27,26 +27,28 @@ router
 	 * Retrieve all dead-hosts
 	 */
 	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+		validator(
+			{
+				additionalProperties: false,
+				properties: {
+					expand: {
+						$ref: "common#/properties/expand",
+					},
+					query: {
+						$ref: "common#/properties/query",
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
+			},
+			{
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				query: typeof req.query.query === "string" ? req.query.query : null,
+			},
+		)
 			.then((data) => {
 				return internalDeadHost.getAll(res.locals.access, data.expand, data.query);
 			})
 			.then((rows) => {
-				res.status(200)
-					.send(rows);
+				res.status(200).send(rows);
 			})
 			.catch(next);
 	})
@@ -57,13 +59,12 @@ router
 	 * Create a new dead-host
 	 */
 	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/dead-hosts', 'post'), req.body)
+		apiValidator(getValidationSchema("/nginx/dead-hosts", "post"), req.body)
 			.then((payload) => {
 				return internalDeadHost.create(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(201)
-					.send(result);
+				res.status(201).send(result);
 			})
 			.catch(next);
 	});
@@ -74,8 +75,8 @@ router
  * /api/nginx/dead-hosts/123
  */
 router
-	.route('/:host_id')
-	.options((req, res) => {
+	.route("/:host_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -86,30 +87,32 @@ router
 	 * Retrieve a specific dead-host
 	 */
 	.get((req, res, next) => {
-		validator({
-			required:             ['host_id'],
-			additionalProperties: false,
-			properties:           {
-				host_id: {
-					$ref: 'common#/properties/id'
+		validator(
+			{
+				required: ["host_id"],
+				additionalProperties: false,
+				properties: {
+					host_id: {
+						$ref: "common#/properties/id",
+					},
+					expand: {
+						$ref: "common#/properties/expand",
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			host_id: req.params.host_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
+			},
+			{
+				host_id: req.params.host_id,
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+			},
+		)
 			.then((data) => {
 				return internalDeadHost.get(res.locals.access, {
-					id:     parseInt(data.host_id, 10),
-					expand: data.expand
+					id: Number.parseInt(data.host_id, 10),
+					expand: data.expand,
 				});
 			})
 			.then((row) => {
-				res.status(200)
-					.send(row);
+				res.status(200).send(row);
 			})
 			.catch(next);
 	})
@@ -120,14 +123,13 @@ router
 	 * Update and existing dead-host
 	 */
 	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/dead-hosts/{hostID}', 'put'), req.body)
+		apiValidator(getValidationSchema("/nginx/dead-hosts/{hostID}", "put"), req.body)
 			.then((payload) => {
-				payload.id = parseInt(req.params.host_id, 10);
+				payload.id = Number.parseInt(req.params.host_id, 10);
 				return internalDeadHost.update(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	})
@@ -138,10 +140,10 @@ router
 	 * Update and existing dead-host
 	 */
 	.delete((req, res, next) => {
-		internalDeadHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
+		internalDeadHost
+			.delete(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -152,7 +154,7 @@ router
  * /api/nginx/dead-hosts/123/enable
  */
 router
-	.route('/:host_id/enable')
+	.route("/:host_id/enable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -162,10 +164,10 @@ router
 	 * POST /api/nginx/dead-hosts/123/enable
 	 */
 	.post((req, res, next) => {
-		internalDeadHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
+		internalDeadHost
+			.enable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -176,7 +178,7 @@ router
  * /api/nginx/dead-hosts/123/disable
  */
 router
-	.route('/:host_id/disable')
+	.route("/:host_id/disable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -186,12 +188,12 @@ router
 	 * POST /api/nginx/dead-hosts/123/disable
 	 */
 	.post((req, res, next) => {
-		internalDeadHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
+		internalDeadHost
+			.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
 
-module.exports = router;
+export default router;

+ 67 - 65
backend/routes/nginx/proxy_hosts.js

@@ -1,22 +1,22 @@
-const express           = require('express');
-const validator         = require('../../lib/validator');
-const jwtdecode         = require('../../lib/express/jwt-decode');
-const apiValidator      = require('../../lib/validator/api');
-const internalProxyHost = require('../../internal/proxy-host');
-const schema            = require('../../schema');
+import express from "express";
+import internalProxyHost from "../../internal/proxy-host.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { getValidationSchema } from "../../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/proxy-hosts
  */
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -27,26 +27,28 @@ router
 	 * Retrieve all proxy-hosts
 	 */
 	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+		validator(
+			{
+				additionalProperties: false,
+				properties: {
+					expand: {
+						$ref: "common#/properties/expand",
+					},
+					query: {
+						$ref: "common#/properties/query",
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
+			},
+			{
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				query: typeof req.query.query === "string" ? req.query.query : null,
+			},
+		)
 			.then((data) => {
 				return internalProxyHost.getAll(res.locals.access, data.expand, data.query);
 			})
 			.then((rows) => {
-				res.status(200)
-					.send(rows);
+				res.status(200).send(rows);
 			})
 			.catch(next);
 	})
@@ -57,13 +59,12 @@ router
 	 * Create a new proxy-host
 	 */
 	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/proxy-hosts', 'post'), req.body)
+		apiValidator(getValidationSchema("/nginx/proxy-hosts", "post"), req.body)
 			.then((payload) => {
 				return internalProxyHost.create(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(201)
-					.send(result);
+				res.status(201).send(result);
 			})
 			.catch(next);
 	});
@@ -74,8 +75,8 @@ router
  * /api/nginx/proxy-hosts/123
  */
 router
-	.route('/:host_id')
-	.options((req, res) => {
+	.route("/:host_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -86,30 +87,32 @@ router
 	 * Retrieve a specific proxy-host
 	 */
 	.get((req, res, next) => {
-		validator({
-			required:             ['host_id'],
-			additionalProperties: false,
-			properties:           {
-				host_id: {
-					$ref: 'common#/properties/id'
+		validator(
+			{
+				required: ["host_id"],
+				additionalProperties: false,
+				properties: {
+					host_id: {
+						$ref: "common#/properties/id",
+					},
+					expand: {
+						$ref: "common#/properties/expand",
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			host_id: req.params.host_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
+			},
+			{
+				host_id: req.params.host_id,
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+			},
+		)
 			.then((data) => {
 				return internalProxyHost.get(res.locals.access, {
-					id:     parseInt(data.host_id, 10),
-					expand: data.expand
+					id: Number.parseInt(data.host_id, 10),
+					expand: data.expand,
 				});
 			})
 			.then((row) => {
-				res.status(200)
-					.send(row);
+				res.status(200).send(row);
 			})
 			.catch(next);
 	})
@@ -120,14 +123,13 @@ router
 	 * Update and existing proxy-host
 	 */
 	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/proxy-hosts/{hostID}', 'put'), req.body)
+		apiValidator(getValidationSchema("/nginx/proxy-hosts/{hostID}", "put"), req.body)
 			.then((payload) => {
-				payload.id = parseInt(req.params.host_id, 10);
+				payload.id = Number.parseInt(req.params.host_id, 10);
 				return internalProxyHost.update(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	})
@@ -138,10 +140,10 @@ router
 	 * Update and existing proxy-host
 	 */
 	.delete((req, res, next) => {
-		internalProxyHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
+		internalProxyHost
+			.delete(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -152,7 +154,7 @@ router
  * /api/nginx/proxy-hosts/123/enable
  */
 router
-	.route('/:host_id/enable')
+	.route("/:host_id/enable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -162,10 +164,10 @@ router
 	 * POST /api/nginx/proxy-hosts/123/enable
 	 */
 	.post((req, res, next) => {
-		internalProxyHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
+		internalProxyHost
+			.enable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -176,7 +178,7 @@ router
  * /api/nginx/proxy-hosts/123/disable
  */
 router
-	.route('/:host_id/disable')
+	.route("/:host_id/disable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -186,12 +188,12 @@ router
 	 * POST /api/nginx/proxy-hosts/123/disable
 	 */
 	.post((req, res, next) => {
-		internalProxyHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
+		internalProxyHost
+			.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
 
-module.exports = router;
+export default router;

+ 69 - 67
backend/routes/nginx/redirection_hosts.js

@@ -1,22 +1,22 @@
-const express                 = require('express');
-const validator               = require('../../lib/validator');
-const jwtdecode               = require('../../lib/express/jwt-decode');
-const apiValidator            = require('../../lib/validator/api');
-const internalRedirectionHost = require('../../internal/redirection-host');
-const schema                  = require('../../schema');
+import express from "express";
+import internalRedirectionHost from "../../internal/redirection-host.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { getValidationSchema } from "../../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/redirection-hosts
  */
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -27,26 +27,28 @@ router
 	 * Retrieve all redirection-hosts
 	 */
 	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+		validator(
+			{
+				additionalProperties: false,
+				properties: {
+					expand: {
+						$ref: "common#/properties/expand",
+					},
+					query: {
+						$ref: "common#/properties/query",
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
+			},
+			{
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				query: typeof req.query.query === "string" ? req.query.query : null,
+			},
+		)
 			.then((data) => {
 				return internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
 			})
 			.then((rows) => {
-				res.status(200)
-					.send(rows);
+				res.status(200).send(rows);
 			})
 			.catch(next);
 	})
@@ -57,13 +59,12 @@ router
 	 * Create a new redirection-host
 	 */
 	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/redirection-hosts', 'post'), req.body)
+		apiValidator(getValidationSchema("/nginx/redirection-hosts", "post"), req.body)
 			.then((payload) => {
 				return internalRedirectionHost.create(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(201)
-					.send(result);
+				res.status(201).send(result);
 			})
 			.catch(next);
 	});
@@ -74,8 +75,8 @@ router
  * /api/nginx/redirection-hosts/123
  */
 router
-	.route('/:host_id')
-	.options((req, res) => {
+	.route("/:host_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -86,30 +87,32 @@ router
 	 * Retrieve a specific redirection-host
 	 */
 	.get((req, res, next) => {
-		validator({
-			required:             ['host_id'],
-			additionalProperties: false,
-			properties:           {
-				host_id: {
-					$ref: 'common#/properties/id'
+		validator(
+			{
+				required: ["host_id"],
+				additionalProperties: false,
+				properties: {
+					host_id: {
+						$ref: "common#/properties/id",
+					},
+					expand: {
+						$ref: "common#/properties/expand",
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			host_id: req.params.host_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
+			},
+			{
+				host_id: req.params.host_id,
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+			},
+		)
 			.then((data) => {
 				return internalRedirectionHost.get(res.locals.access, {
-					id:     parseInt(data.host_id, 10),
-					expand: data.expand
+					id: Number.parseInt(data.host_id, 10),
+					expand: data.expand,
 				});
 			})
 			.then((row) => {
-				res.status(200)
-					.send(row);
+				res.status(200).send(row);
 			})
 			.catch(next);
 	})
@@ -120,14 +123,13 @@ router
 	 * Update and existing redirection-host
 	 */
 	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/redirection-hosts/{hostID}', 'put'), req.body)
+		apiValidator(getValidationSchema("/nginx/redirection-hosts/{hostID}", "put"), req.body)
 			.then((payload) => {
-				payload.id = parseInt(req.params.host_id, 10);
+				payload.id = Number.parseInt(req.params.host_id, 10);
 				return internalRedirectionHost.update(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	})
@@ -138,10 +140,10 @@ router
 	 * Update and existing redirection-host
 	 */
 	.delete((req, res, next) => {
-		internalRedirectionHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
+		internalRedirectionHost
+			.delete(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -152,8 +154,8 @@ router
  * /api/nginx/redirection-hosts/123/enable
  */
 router
-	.route('/:host_id/enable')
-	.options((req, res) => {
+	.route("/:host_id/enable")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -162,10 +164,10 @@ router
 	 * POST /api/nginx/redirection-hosts/123/enable
 	 */
 	.post((req, res, next) => {
-		internalRedirectionHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
+		internalRedirectionHost
+			.enable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -176,8 +178,8 @@ router
  * /api/nginx/redirection-hosts/123/disable
  */
 router
-	.route('/:host_id/disable')
-	.options((req, res) => {
+	.route("/:host_id/disable")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -186,12 +188,12 @@ router
 	 * POST /api/nginx/redirection-hosts/123/disable
 	 */
 	.post((req, res, next) => {
-		internalRedirectionHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
+		internalRedirectionHost
+			.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
 
-module.exports = router;
+export default router;

+ 67 - 65
backend/routes/nginx/streams.js

@@ -1,22 +1,22 @@
-const express        = require('express');
-const validator      = require('../../lib/validator');
-const jwtdecode      = require('../../lib/express/jwt-decode');
-const apiValidator   = require('../../lib/validator/api');
-const internalStream = require('../../internal/stream');
-const schema         = require('../../schema');
+import express from "express";
+import internalStream from "../../internal/stream.js";
+import jwtdecode from "../../lib/express/jwt-decode.js";
+import apiValidator from "../../lib/validator/api.js";
+import validator from "../../lib/validator/index.js";
+import { getValidationSchema } from "../../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/nginx/streams
  */
 router
-	.route('/')
-	.options((req, res) => {
+	.route("/")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
@@ -27,26 +27,28 @@ router
 	 * Retrieve all streams
 	 */
 	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+		validator(
+			{
+				additionalProperties: false,
+				properties: {
+					expand: {
+						$ref: "common#/properties/expand",
+					},
+					query: {
+						$ref: "common#/properties/query",
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
+			},
+			{
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				query: typeof req.query.query === "string" ? req.query.query : null,
+			},
+		)
 			.then((data) => {
 				return internalStream.getAll(res.locals.access, data.expand, data.query);
 			})
 			.then((rows) => {
-				res.status(200)
-					.send(rows);
+				res.status(200).send(rows);
 			})
 			.catch(next);
 	})
@@ -57,13 +59,12 @@ router
 	 * Create a new stream
 	 */
 	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/streams', 'post'), req.body)
+		apiValidator(getValidationSchema("/nginx/streams", "post"), req.body)
 			.then((payload) => {
 				return internalStream.create(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(201)
-					.send(result);
+				res.status(201).send(result);
 			})
 			.catch(next);
 	});
@@ -74,8 +75,8 @@ router
  * /api/nginx/streams/123
  */
 router
-	.route('/:stream_id')
-	.options((req, res) => {
+	.route("/:stream_id")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
@@ -86,30 +87,32 @@ router
 	 * Retrieve a specific stream
 	 */
 	.get((req, res, next) => {
-		validator({
-			required:             ['stream_id'],
-			additionalProperties: false,
-			properties:           {
-				stream_id: {
-					$ref: 'common#/properties/id'
+		validator(
+			{
+				required: ["stream_id"],
+				additionalProperties: false,
+				properties: {
+					stream_id: {
+						$ref: "common#/properties/id",
+					},
+					expand: {
+						$ref: "common#/properties/expand",
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			stream_id: req.params.stream_id,
-			expand:    (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
+			},
+			{
+				stream_id: req.params.stream_id,
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+			},
+		)
 			.then((data) => {
 				return internalStream.get(res.locals.access, {
-					id:     parseInt(data.stream_id, 10),
-					expand: data.expand
+					id: Number.parseInt(data.stream_id, 10),
+					expand: data.expand,
 				});
 			})
 			.then((row) => {
-				res.status(200)
-					.send(row);
+				res.status(200).send(row);
 			})
 			.catch(next);
 	})
@@ -120,14 +123,13 @@ router
 	 * Update and existing stream
 	 */
 	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/nginx/streams/{streamID}', 'put'), req.body)
+		apiValidator(getValidationSchema("/nginx/streams/{streamID}", "put"), req.body)
 			.then((payload) => {
-				payload.id = parseInt(req.params.stream_id, 10);
+				payload.id = Number.parseInt(req.params.stream_id, 10);
 				return internalStream.update(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	})
@@ -138,10 +140,10 @@ router
 	 * Update and existing stream
 	 */
 	.delete((req, res, next) => {
-		internalStream.delete(res.locals.access, {id: parseInt(req.params.stream_id, 10)})
+		internalStream
+			.delete(res.locals.access, { id: Number.parseInt(req.params.stream_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -152,7 +154,7 @@ router
  * /api/nginx/streams/123/enable
  */
 router
-	.route('/:host_id/enable')
+	.route("/:host_id/enable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -162,10 +164,10 @@ router
 	 * POST /api/nginx/streams/123/enable
 	 */
 	.post((req, res, next) => {
-		internalStream.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
+		internalStream
+			.enable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -176,7 +178,7 @@ router
  * /api/nginx/streams/123/disable
  */
 router
-	.route('/:host_id/disable')
+	.route("/:host_id/disable")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -186,12 +188,12 @@ router
 	 * POST /api/nginx/streams/123/disable
 	 */
 	.post((req, res, next) => {
-		internalStream.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
+		internalStream
+			.disable(res.locals.access, { id: Number.parseInt(req.params.host_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
 
-module.exports = router;
+export default router;

+ 11 - 11
backend/routes/reports.js

@@ -1,15 +1,15 @@
-const express        = require('express');
-const jwtdecode      = require('../lib/express/jwt-decode');
-const internalReport = require('../internal/report');
+import express from "express";
+import internalReport from "../internal/report.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 router
-	.route('/hosts')
+	.route("/hosts")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -18,12 +18,12 @@ router
 	 * GET /reports/hosts
 	 */
 	.get(jwtdecode(), (_, res, next) => {
-		internalReport.getHostsReport(res.locals.access)
+		internalReport
+			.getHostsReport(res.locals.access)
 			.then((data) => {
-				res.status(200)
-					.send(data);
+				res.status(200).send(data);
 			})
 			.catch(next);
 	});
 
-module.exports = router;
+export default router;

+ 14 - 14
backend/routes/schema.js

@@ -1,15 +1,15 @@
-const express = require('express');
-const schema  = require('../schema');
-const PACKAGE = require('../package.json');
+import express from "express";
+import PACKAGE from "../package.json" with { type: "json" };
+import { getCompiledSchema } from "../schema/index.js";
 
 const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -18,21 +18,21 @@ router
 	 * GET /schema
 	 */
 	.get(async (req, res) => {
-		let swaggerJSON = await schema.getCompiledSchema();
+		const swaggerJSON = await getCompiledSchema();
 
 		let proto = req.protocol;
-		if (typeof req.headers['x-forwarded-proto'] !== 'undefined' && req.headers['x-forwarded-proto']) {
-			proto = req.headers['x-forwarded-proto'];
+		if (typeof req.headers["x-forwarded-proto"] !== "undefined" && req.headers["x-forwarded-proto"]) {
+			proto = req.headers["x-forwarded-proto"];
 		}
 
-		let origin = proto + '://' + req.hostname;
-		if (typeof req.headers.origin !== 'undefined' && req.headers.origin) {
+		let origin = `${proto}://${req.hostname}`;
+		if (typeof req.headers.origin !== "undefined" && req.headers.origin) {
 			origin = req.headers.origin;
 		}
 
-		swaggerJSON.info.version   = PACKAGE.version;
-		swaggerJSON.servers[0].url = origin + '/api';
+		swaggerJSON.info.version = PACKAGE.version;
+		swaggerJSON.servers[0].url = `${origin}/api`;
 		res.status(200).send(swaggerJSON);
 	});
 
-module.exports = router;
+export default router;

+ 34 - 33
backend/routes/settings.js

@@ -1,21 +1,21 @@
-const express         = require('express');
-const validator       = require('../lib/validator');
-const jwtdecode       = require('../lib/express/jwt-decode');
-const apiValidator    = require('../lib/validator/api');
-const internalSetting = require('../internal/setting');
-const schema          = require('../schema');
+import express from "express";
+import internalSetting from "../internal/setting.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import apiValidator from "../lib/validator/api.js";
+import validator from "../lib/validator/index.js";
+import { getValidationSchema } from "../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/settings
  */
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -27,10 +27,10 @@ router
 	 * Retrieve all settings
 	 */
 	.get((_, res, next) => {
-		internalSetting.getAll(res.locals.access)
+		internalSetting
+			.getAll(res.locals.access)
 			.then((rows) => {
-				res.status(200)
-					.send(rows);
+				res.status(200).send(rows);
 			})
 			.catch(next);
 	});
@@ -41,7 +41,7 @@ router
  * /api/settings/something
  */
 router
-	.route('/:setting_id')
+	.route("/:setting_id")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -53,26 +53,28 @@ router
 	 * Retrieve a specific setting
 	 */
 	.get((req, res, next) => {
-		validator({
-			required:             ['setting_id'],
-			additionalProperties: false,
-			properties:           {
-				setting_id: {
-					type:      'string',
-					minLength: 1
-				}
-			}
-		}, {
-			setting_id: req.params.setting_id
-		})
+		validator(
+			{
+				required: ["setting_id"],
+				additionalProperties: false,
+				properties: {
+					setting_id: {
+						type: "string",
+						minLength: 1,
+					},
+				},
+			},
+			{
+				setting_id: req.params.setting_id,
+			},
+		)
 			.then((data) => {
 				return internalSetting.get(res.locals.access, {
-					id: data.setting_id
+					id: data.setting_id,
 				});
 			})
 			.then((row) => {
-				res.status(200)
-					.send(row);
+				res.status(200).send(row);
 			})
 			.catch(next);
 	})
@@ -83,16 +85,15 @@ router
 	 * Update and existing setting
 	 */
 	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/settings/{settingID}', 'put'), req.body)
+		apiValidator(getValidationSchema("/settings/{settingID}", "put"), req.body)
 			.then((payload) => {
 				payload.id = req.params.setting_id;
 				return internalSetting.update(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
 
-module.exports = router;
+export default router;

+ 18 - 19
backend/routes/tokens.js

@@ -1,17 +1,17 @@
-const express       = require('express');
-const jwtdecode     = require('../lib/express/jwt-decode');
-const apiValidator  = require('../lib/validator/api');
-const internalToken = require('../internal/token');
-const schema        = require('../schema');
+import express from "express";
+import internalToken from "../internal/token.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import apiValidator from "../lib/validator/api.js";
+import { getValidationSchema } from "../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -24,13 +24,13 @@ router
 	 * for services like Job board and Worker.
 	 */
 	.get(jwtdecode(), (req, res, next) => {
-		internalToken.getFreshToken(res.locals.access, {
-			expiry: (typeof req.query.expiry !== 'undefined' ? req.query.expiry : null),
-			scope:  (typeof req.query.scope !== 'undefined' ? req.query.scope : null)
-		})
+		internalToken
+			.getFreshToken(res.locals.access, {
+				expiry: typeof req.query.expiry !== "undefined" ? req.query.expiry : null,
+				scope: typeof req.query.scope !== "undefined" ? req.query.scope : null,
+			})
 			.then((data) => {
-				res.status(200)
-					.send(data);
+				res.status(200).send(data);
 			})
 			.catch(next);
 	})
@@ -41,13 +41,12 @@ router
 	 * Create a new Token
 	 */
 	.post(async (req, res, next) => {
-		apiValidator(schema.getValidationSchema('/tokens', 'post'), req.body)
+		apiValidator(getValidationSchema("/tokens", "post"), req.body)
 			.then(internalToken.getTokenFromEmail)
 			.then((data) => {
-				res.status(200)
-					.send(data);
+				res.status(200).send(data);
 			})
 			.catch(next);
 	});
 
-module.exports = router;
+export default router;

+ 69 - 69
backend/routes/users.js

@@ -1,22 +1,22 @@
-const express      = require('express');
-const validator    = require('../lib/validator');
-const jwtdecode    = require('../lib/express/jwt-decode');
-const userIdFromMe = require('../lib/express/user-id-from-me');
-const internalUser = require('../internal/user');
-const apiValidator = require('../lib/validator/api');
-const schema       = require('../schema');
+import express from "express";
+import internalUser from "../internal/user.js";
+import jwtdecode from "../lib/express/jwt-decode.js";
+import userIdFromMe from "../lib/express/user-id-from-me.js";
+import apiValidator from "../lib/validator/api.js";
+import validator from "../lib/validator/index.js";
+import { getValidationSchema } from "../schema/index.js";
 
-let router = express.Router({
+const router = express.Router({
 	caseSensitive: true,
-	strict:        true,
-	mergeParams:   true
+	strict: true,
+	mergeParams: true,
 });
 
 /**
  * /api/users
  */
 router
-	.route('/')
+	.route("/")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -28,26 +28,28 @@ router
 	 * Retrieve all users
 	 */
 	.get((req, res, next) => {
-		validator({
-			additionalProperties: false,
-			properties:           {
-				expand: {
-					$ref: 'common#/properties/expand'
+		validator(
+			{
+				additionalProperties: false,
+				properties: {
+					expand: {
+						$ref: "common#/properties/expand",
+					},
+					query: {
+						$ref: "common#/properties/query",
+					},
 				},
-				query: {
-					$ref: 'common#/properties/query'
-				}
-			}
-		}, {
-			expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
-			query:  (typeof req.query.query === 'string' ? req.query.query : null)
-		})
+			},
+			{
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+				query: typeof req.query.query === "string" ? req.query.query : null,
+			},
+		)
 			.then((data) => {
 				return internalUser.getAll(res.locals.access, data.expand, data.query);
 			})
 			.then((users) => {
-				res.status(200)
-					.send(users);
+				res.status(200).send(users);
 			})
 			.catch((err) => {
 				console.log(err);
@@ -62,13 +64,12 @@ router
 	 * Create a new User
 	 */
 	.post((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users', 'post'), req.body)
+		apiValidator(getValidationSchema("/users", "post"), req.body)
 			.then((payload) => {
 				return internalUser.create(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(201)
-					.send(result);
+				res.status(201).send(result);
 			})
 			.catch(next);
 	});
@@ -79,7 +80,7 @@ router
  * /api/users/123
  */
 router
-	.route('/:user_id')
+	.route("/:user_id")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -92,31 +93,33 @@ router
 	 * Retrieve a specific user
 	 */
 	.get((req, res, next) => {
-		validator({
-			required:             ['user_id'],
-			additionalProperties: false,
-			properties:           {
-				user_id: {
-					$ref: 'common#/properties/id'
+		validator(
+			{
+				required: ["user_id"],
+				additionalProperties: false,
+				properties: {
+					user_id: {
+						$ref: "common#/properties/id",
+					},
+					expand: {
+						$ref: "common#/properties/expand",
+					},
 				},
-				expand: {
-					$ref: 'common#/properties/expand'
-				}
-			}
-		}, {
-			user_id: req.params.user_id,
-			expand:  (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
-		})
+			},
+			{
+				user_id: req.params.user_id,
+				expand: typeof req.query.expand === "string" ? req.query.expand.split(",") : null,
+			},
+		)
 			.then((data) => {
 				return internalUser.get(res.locals.access, {
-					id:     data.user_id,
+					id: data.user_id,
 					expand: data.expand,
-					omit:   internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id)
+					omit: internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id),
 				});
 			})
 			.then((user) => {
-				res.status(200)
-					.send(user);
+				res.status(200).send(user);
 			})
 			.catch((err) => {
 				console.log(err);
@@ -130,14 +133,13 @@ router
 	 * Update and existing user
 	 */
 	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users/{userID}', 'put'), req.body)
+		apiValidator(getValidationSchema("/users/{userID}", "put"), req.body)
 			.then((payload) => {
 				payload.id = req.params.user_id;
 				return internalUser.update(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	})
@@ -148,10 +150,10 @@ router
 	 * Update and existing user
 	 */
 	.delete((req, res, next) => {
-		internalUser.delete(res.locals.access, {id: req.params.user_id})
+		internalUser
+			.delete(res.locals.access, { id: req.params.user_id })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -162,8 +164,8 @@ router
  * /api/users/123/auth
  */
 router
-	.route('/:user_id/auth')
-	.options((req, res) => {
+	.route("/:user_id/auth")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -175,14 +177,13 @@ router
 	 * Update password for a user
 	 */
 	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users/{userID}/auth', 'put'), req.body)
+		apiValidator(getValidationSchema("/users/{userID}/auth", "put"), req.body)
 			.then((payload) => {
 				payload.id = req.params.user_id;
 				return internalUser.setPassword(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -193,8 +194,8 @@ router
  * /api/users/123/permissions
  */
 router
-	.route('/:user_id/permissions')
-	.options((req, res) => {
+	.route("/:user_id/permissions")
+	.options((_, res) => {
 		res.sendStatus(204);
 	})
 	.all(jwtdecode())
@@ -206,14 +207,13 @@ router
 	 * Set some or all permissions for a user
 	 */
 	.put((req, res, next) => {
-		apiValidator(schema.getValidationSchema('/users/{userID}/permissions', 'put'), req.body)
+		apiValidator(getValidationSchema("/users/{userID}/permissions", "put"), req.body)
 			.then((payload) => {
 				payload.id = req.params.user_id;
 				return internalUser.setPermissions(res.locals.access, payload);
 			})
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
@@ -224,7 +224,7 @@ router
  * /api/users/123/login
  */
 router
-	.route('/:user_id/login')
+	.route("/:user_id/login")
 	.options((_, res) => {
 		res.sendStatus(204);
 	})
@@ -236,12 +236,12 @@ router
 	 * Log in as a user
 	 */
 	.post((req, res, next) => {
-		internalUser.loginAs(res.locals.access, {id: parseInt(req.params.user_id, 10)})
+		internalUser
+			.loginAs(res.locals.access, { id: Number.parseInt(req.params.user_id, 10) })
 			.then((result) => {
-				res.status(200)
-					.send(result);
+				res.status(200).send(result);
 			})
 			.catch(next);
 	});
 
-module.exports = router;
+export default router;

+ 40 - 35
backend/schema/index.js

@@ -1,41 +1,46 @@
-const refParser = require('@apidevtools/json-schema-ref-parser');
+import { dirname } from "node:path";
+import { fileURLToPath } from "node:url";
+import $RefParser from "@apidevtools/json-schema-ref-parser";
 
-let compiledSchema = null;
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
 
-module.exports = {
+let compiledSchema = null;
 
-	/**
-	 * Compiles the schema, by dereferencing it, only once
-	 * and returns the memory cached value
-	 */
-	getCompiledSchema: async () => {
-		if (compiledSchema === null) {
-			compiledSchema = await refParser.dereference(__dirname + '/swagger.json', {
-				mutateInputSchema: false,
-			});
-		}
-		return compiledSchema;
-	},
+/**
+ * Compiles the schema, by dereferencing it, only once
+ * and returns the memory cached value
+ */
+const getCompiledSchema = async () => {
+	if (compiledSchema === null) {
+		compiledSchema = await $RefParser.dereference(`${__dirname}/swagger.json`, {
+			mutateInputSchema: false,
+		});
+	}
+	return compiledSchema;
+};
 
-	/**
-	 * Scans the schema for the validation schema for the given path and method
-	 * and returns it.
-	 *
-	 * @param {string} path
-	 * @param {string} method
-	 * @returns string|null
-	 */
-	getValidationSchema: (path, method) => {
-		if (compiledSchema !== null &&
-			typeof compiledSchema.paths[path] !== 'undefined' &&
-			typeof compiledSchema.paths[path][method] !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody.content !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody.content['application/json'] !== 'undefined' &&
-			typeof compiledSchema.paths[path][method].requestBody.content['application/json'].schema !== 'undefined'
-		) {
-			return compiledSchema.paths[path][method].requestBody.content['application/json'].schema;
-		}
-		return null;
+/**
+ * Scans the schema for the validation schema for the given path and method
+ * and returns it.
+ *
+ * @param {string} path
+ * @param {string} method
+ * @returns string|null
+ */
+const getValidationSchema = (path, method) => {
+	if (
+		compiledSchema !== null &&
+		typeof compiledSchema.paths[path] !== "undefined" &&
+		typeof compiledSchema.paths[path][method] !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody.content !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody.content["application/json"] !== "undefined" &&
+		typeof compiledSchema.paths[path][method].requestBody.content["application/json"].schema !== "undefined"
+	) {
+		return compiledSchema.paths[path][method].requestBody.content["application/json"].schema;
 	}
+	return null;
 };
+
+export { getCompiledSchema, getValidationSchema };

+ 5 - 5
backend/scripts/install-certbot-plugins

@@ -10,10 +10,10 @@
 //    docker exec npm_core /command/s6-setuidgid 1000:1000 bash -c "/app/scripts/install-certbot-plugins"
 //
 
-const dnsPlugins = require('../global/certbot-dns-plugins.json');
-const certbot    = require('../lib/certbot');
-const logger     = require('../logger').certbot;
-const batchflow  = require('batchflow');
+import dnsPlugins from "../global/certbot-dns-plugins.json" with { type: "json" };
+import { installPlugin } from "../lib/certbot.js";
+import { certbot as logger } from "../logger.js";
+import batchflow from "batchflow";
 
 let hasErrors      = false;
 let failingPlugins = [];
@@ -25,7 +25,7 @@ if (process.argv.length > 2) {
 
 batchflow(pluginKeys).sequential()
 	.each((i, pluginKey, next) => {
-		certbot.installPlugin(pluginKey)
+		installPlugin(pluginKey)
 			.then(() => {
 				next();
 			})

+ 62 - 66
backend/setup.js

@@ -1,12 +1,12 @@
-const config              = require('./lib/config');
-const logger              = require('./logger').setup;
-const certificateModel    = require('./models/certificate');
-const userModel           = require('./models/user');
-const userPermissionModel = require('./models/user_permission');
-const utils               = require('./lib/utils');
-const authModel           = require('./models/auth');
-const settingModel        = require('./models/setting');
-const certbot             = require('./lib/certbot');
+import { installPlugins } from "./lib/certbot.js";
+import utils from "./lib/utils.js";
+import { setup as logger } from "./logger.js";
+import authModel from "./models/auth.js";
+import certificateModel from "./models/certificate.js";
+import settingModel from "./models/setting.js";
+import userModel from "./models/user.js";
+import userPermissionModel from "./models/user_permission.js";
+
 /**
  * Creates a default admin users if one doesn't already exist in the database
  *
@@ -15,24 +15,24 @@ const certbot             = require('./lib/certbot');
 const setupDefaultUser = () => {
 	return userModel
 		.query()
-		.select('id', )
-		.where('is_deleted', 0)
+		.select("id")
+		.where("is_deleted", 0)
 		.first()
 		.then((row) => {
 			if (!row || !row.id) {
 				// Create a new user and set password
 				const email    = (process.env.INITIAL_ADMIN_EMAIL || '[email protected]').toLowerCase();
-				const password = process.env.INITIAL_ADMIN_PASSWORD || 'changeme';
+				const password = process.env.INITIAL_ADMIN_PASSWORD || "changeme";
 
 				logger.info(`Creating a new user: ${email} with password: ${password}`);
 
 				const data = {
 					is_deleted: 0,
-					email:      email,
-					name:       'Administrator',
-					nickname:   'Admin',
-					avatar:     '',
-					roles:      ['admin'],
+					email: email,
+					name: "Administrator",
+					nickname: "Admin",
+					avatar: "",
+					roles: ["admin"],
 				};
 
 				return userModel
@@ -43,29 +43,28 @@ const setupDefaultUser = () => {
 							.query()
 							.insert({
 								user_id: user.id,
-								type:    'password',
-								secret:  password,
-								meta:    {},
+								type: "password",
+								secret: password,
+								meta: {},
 							})
 							.then(() => {
 								return userPermissionModel.query().insert({
-									user_id:           user.id,
-									visibility:        'all',
-									proxy_hosts:       'manage',
-									redirection_hosts: 'manage',
-									dead_hosts:        'manage',
-									streams:           'manage',
-									access_lists:      'manage',
-									certificates:      'manage',
+									user_id: user.id,
+									visibility: "all",
+									proxy_hosts: "manage",
+									redirection_hosts: "manage",
+									dead_hosts: "manage",
+									streams: "manage",
+									access_lists: "manage",
+									certificates: "manage",
 								});
 							});
 					})
 					.then(() => {
-						logger.info('Initial admin setup completed');
+						logger.info("Initial admin setup completed");
 					});
-			} else if (config.debug()) {
-				logger.info('Admin user setup not required');
 			}
+			logger.debug("Admin user setup not required");
 		});
 };
 
@@ -77,27 +76,25 @@ const setupDefaultUser = () => {
 const setupDefaultSettings = () => {
 	return settingModel
 		.query()
-		.select('id')
-		.where({id: 'default-site'})
+		.select("id")
+		.where({ id: "default-site" })
 		.first()
 		.then((row) => {
 			if (!row || !row.id) {
 				settingModel
 					.query()
 					.insert({
-						id:          'default-site',
-						name:        'Default Site',
-						description: 'What to show when Nginx is hit with an unknown Host',
-						value:       'congratulations',
-						meta:        {},
+						id: "default-site",
+						name: "Default Site",
+						description: "What to show when Nginx is hit with an unknown Host",
+						value: "congratulations",
+						meta: {},
 					})
 					.then(() => {
-						logger.info('Default settings added');
+						logger.info("Default settings added");
 					});
 			}
-			if (config.debug()) {
-				logger.info('Default setting setup not required');
-			}
+			logger.debug("Default setting setup not required");
 		});
 };
 
@@ -109,11 +106,11 @@ const setupDefaultSettings = () => {
 const setupCertbotPlugins = () => {
 	return certificateModel
 		.query()
-		.where('is_deleted', 0)
-		.andWhere('provider', 'letsencrypt')
+		.where("is_deleted", 0)
+		.andWhere("provider", "letsencrypt")
 		.then((certificates) => {
-			if (certificates && certificates.length) {
-				const plugins  = [];
+			if (certificates?.length) {
+				const plugins = [];
 				const promises = [];
 
 				certificates.map((certificate) => {
@@ -125,26 +122,26 @@ const setupCertbotPlugins = () => {
 						// Make sure credentials file exists
 						const credentials_loc = `/etc/letsencrypt/credentials/credentials-${certificate.id}`;
 						// Escape single quotes and backslashes
-						const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
-						const credentials_cmd    = `[ -f '${credentials_loc}' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo '${escapedCredentials}' > '${credentials_loc}' && chmod 600 '${credentials_loc}'; }`;
+						const escapedCredentials = certificate.meta.dns_provider_credentials
+							.replaceAll("'", "\\'")
+							.replaceAll("\\", "\\\\");
+						const credentials_cmd = `[ -f '${credentials_loc}' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo '${escapedCredentials}' > '${credentials_loc}' && chmod 600 '${credentials_loc}'; }`;
 						promises.push(utils.exec(credentials_cmd));
 					}
+					return true;
 				});
 
-				return certbot.installPlugins(plugins)
-					.then(() => {
-						if (promises.length) {
-							return Promise.all(promises)
-								.then(() => {
-									logger.info(`Added Certbot plugins ${plugins.join(', ')}`);
-								});
-						}
-					});
+				return installPlugins(plugins).then(() => {
+					if (promises.length) {
+						return Promise.all(promises).then(() => {
+							logger.info(`Added Certbot plugins ${plugins.join(", ")}`);
+						});
+					}
+				});
 			}
 		});
 };
 
-
 /**
  * Starts a timer to call run the logrotation binary every two days
  * @returns {Promise}
@@ -154,18 +151,17 @@ const setupLogrotation = () => {
 
 	const runLogrotate = async () => {
 		try {
-			await utils.exec('logrotate /etc/logrotate.d/nginx-proxy-manager');
-			logger.info('Logrotate completed.');
-		} catch (e) { logger.warn(e); }
+			await utils.exec("logrotate /etc/logrotate.d/nginx-proxy-manager");
+			logger.info("Logrotate completed.");
+		} catch (e) {
+			logger.warn(e);
+		}
 	};
 
-	logger.info('Logrotate Timer initialized');
+	logger.info("Logrotate Timer initialized");
 	setInterval(runLogrotate, intervalTimeout);
 	// And do this now as well
 	return runLogrotate();
 };
 
-module.exports = () => setupDefaultUser()
-	.then(setupDefaultSettings)
-	.then(setupCertbotPlugins)
-	.then(setupLogrotation);
+export default () => setupDefaultUser().then(setupDefaultSettings).then(setupCertbotPlugins).then(setupLogrotation);

+ 11 - 8
backend/validate-schema.js

@@ -1,16 +1,19 @@
-const SwaggerParser = require('@apidevtools/swagger-parser');
-const chalk         = require('chalk');
-const schema        = require('./schema');
-const log           = console.log;
+#!/usr/bin/node
 
-schema.getCompiledSchema().then(async (swaggerJSON) => {
+import SwaggerParser from "@apidevtools/swagger-parser";
+import chalk from "chalk";
+import { getCompiledSchema } from "./schema/index.js";
+
+const log = console.log;
+
+getCompiledSchema().then(async (swaggerJSON) => {
 	try {
 		const api = await SwaggerParser.validate(swaggerJSON);
-		console.log('API name: %s, Version: %s', api.info.title, api.info.version);
-		log(chalk.green('❯ Schema is valid'));
+		console.log("API name: %s, Version: %s", api.info.title, api.info.version);
+		log(chalk.green("❯ Schema is valid"));
 	} catch (e) {
 		console.error(e);
-		log(chalk.red('❯', e.message), '\n');
+		log(chalk.red("❯", e.message), "\n");
 		process.exit(1);
 	}
 });

+ 54 - 483
backend/yarn.lock

@@ -43,62 +43,65 @@
     ajv-draft-04 "^1.0.0"
     call-me-maybe "^1.0.1"
 
-"@eslint-community/eslint-utils@^4.2.0":
-  version "4.3.0"
-  resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.3.0.tgz#a556790523a351b4e47e9d385f47265eaaf9780a"
-  integrity sha512-v3oplH6FYCULtFuCeqyuTd9D2WKO937Dxdq+GmHOLL72TTRriLxz2VLlNfkZRsvj6PKnOPAtuT6dwrs/pA5DvA==
-  dependencies:
-    eslint-visitor-keys "^3.3.0"
+"@biomejs/[email protected]":
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/@biomejs/biome/-/biome-2.2.0.tgz#823ba77363651f310c47909747c879791ebd15c9"
+  integrity sha512-3On3RSYLsX+n9KnoSgfoYlckYBoU6VRM22cw1gB4Y0OuUVSYd/O/2saOJMrA4HFfA1Ff0eacOvMN1yAAvHtzIw==
+  optionalDependencies:
+    "@biomejs/cli-darwin-arm64" "2.2.0"
+    "@biomejs/cli-darwin-x64" "2.2.0"
+    "@biomejs/cli-linux-arm64" "2.2.0"
+    "@biomejs/cli-linux-arm64-musl" "2.2.0"
+    "@biomejs/cli-linux-x64" "2.2.0"
+    "@biomejs/cli-linux-x64-musl" "2.2.0"
+    "@biomejs/cli-win32-arm64" "2.2.0"
+    "@biomejs/cli-win32-x64" "2.2.0"
+
+"@biomejs/[email protected]":
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.2.0.tgz#1abf9508e7d0776871710687ddad36e692dce3bc"
+  integrity sha512-zKbwUUh+9uFmWfS8IFxmVD6XwqFcENjZvEyfOxHs1epjdH3wyyMQG80FGDsmauPwS2r5kXdEM0v/+dTIA9FXAg==
 
-"@eslint-community/regexpp@^4.4.0":
-  version "4.4.0"
-  resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.4.0.tgz#3e61c564fcd6b921cb789838631c5ee44df09403"
-  integrity sha512-A9983Q0LnDGdLPjxyXQ00sbV+K+O+ko2Dr+CZigbHWtX9pNfxlaBkMR8X1CztI73zuEyEBXTVjx7CE+/VSwDiQ==
+"@biomejs/[email protected].0":
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.2.0.tgz#3a51aa569505fedd3a32bb914d608ec27d87f26d"
+  integrity sha512-+OmT4dsX2eTfhD5crUOPw3RPhaR+SKVspvGVmSdZ9y9O/AgL8pla6T4hOn1q+VAFBHuHhsdxDRJgFCSC7RaMOw==
 
-"@eslint/eslintrc@^2.0.1":
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.0.1.tgz#7888fe7ec8f21bc26d646dbd2c11cd776e21192d"
-  integrity sha512-eFRmABvW2E5Ho6f5fHLqgena46rOj7r7OKHYfLElqcBfGFHHpjBhivyi5+jOEQuSpdc/1phIZJlbC2te+tZNIw==
-  dependencies:
-    ajv "^6.12.4"
-    debug "^4.3.2"
-    espree "^9.5.0"
-    globals "^13.19.0"
-    ignore "^5.2.0"
-    import-fresh "^3.2.1"
-    js-yaml "^4.1.0"
-    minimatch "^3.1.2"
-    strip-json-comments "^3.1.1"
+"@biomejs/[email protected]":
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.2.0.tgz#4d720930732a825b7a8c7cfe1741aec9e7d5ae1d"
+  integrity sha512-egKpOa+4FL9YO+SMUMLUvf543cprjevNc3CAgDNFLcjknuNMcZ0GLJYa3EGTCR2xIkIUJDVneBV3O9OcIlCEZQ==
+
+"@biomejs/[email protected]":
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.2.0.tgz#d0a5c153ff9243b15600781947d70d6038226feb"
+  integrity sha512-6eoRdF2yW5FnW9Lpeivh7Mayhq0KDdaDMYOJnH9aT02KuSIX5V1HmWJCQQPwIQbhDh68Zrcpl8inRlTEan0SXw==
+
+"@biomejs/[email protected]":
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.2.0.tgz#946095b0a444f395b2df9244153e1cd6b07404c0"
+  integrity sha512-I5J85yWwUWpgJyC1CcytNSGusu2p9HjDnOPAFG4Y515hwRD0jpR9sT9/T1cKHtuCvEQ/sBvx+6zhz9l9wEJGAg==
 
-"@eslint/[email protected]":
-  version "8.36.0"
-  resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.36.0.tgz#9837f768c03a1e4a30bd304a64fb8844f0e72efe"
-  integrity sha512-lxJ9R5ygVm8ZWgYdUweoq5ownDlJ4upvoWmO4eLxBYHdMo+vZ/Rx0EN6MbKWDJOSUGrqJy2Gt+Dyv/VKml0fjg==
+"@biomejs/[email protected]":
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-linux-x64/-/cli-linux-x64-2.2.0.tgz#ae01e0a70c7cd9f842c77dfb4ebd425734667a34"
+  integrity sha512-5UmQx/OZAfJfi25zAnAGHUMuOd+LOsliIt119x2soA2gLggQYrVPA+2kMUxR6Mw5M1deUF/AWWP2qpxgH7Nyfw==
+
+"@biomejs/[email protected]":
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.2.0.tgz#09a3988b9d4bab8b8b3a41b4de9560bf70943964"
+  integrity sha512-n9a1/f2CwIDmNMNkFs+JI0ZjFnMO0jdOyGNtihgUNFnlmd84yIYY2KMTBmMV58ZlVHjgmY5Y6E1hVTnSRieggA==
+
+"@biomejs/[email protected]":
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/@biomejs/cli-win32-x64/-/cli-win32-x64-2.2.0.tgz#5d2523b421d847b13fac146cf745436ea8a72b95"
+  integrity sha512-Nawu5nHjP/zPKTIryh2AavzTc/KEg4um/MxWdXW0A6P/RZOyIpa7+QSjeXwAwX/utJGaCoXRPWtF3m5U/bB3Ww==
 
 "@gar/promisify@^1.0.1":
   version "1.1.3"
   resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6"
   integrity sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==
 
-"@humanwhocodes/config-array@^0.11.8":
-  version "0.11.8"
-  resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.8.tgz#03595ac2075a4dc0f191cc2131de14fbd7d410b9"
-  integrity sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==
-  dependencies:
-    "@humanwhocodes/object-schema" "^1.2.1"
-    debug "^4.1.1"
-    minimatch "^3.0.5"
-
-"@humanwhocodes/module-importer@^1.0.1":
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c"
-  integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==
-
-"@humanwhocodes/object-schema@^1.2.1":
-  version "1.2.1"
-  resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45"
-  integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==
-
 "@jsdevtools/ono@^7.1.3":
   version "7.1.3"
   resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796"
@@ -119,27 +122,6 @@
     semver "^7.3.5"
     tar "^6.1.11"
 
-"@nodelib/[email protected]":
-  version "2.1.5"
-  resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5"
-  integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==
-  dependencies:
-    "@nodelib/fs.stat" "2.0.5"
-    run-parallel "^1.1.9"
-
-"@nodelib/[email protected]":
-  version "2.0.5"
-  resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b"
-  integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==
-
-"@nodelib/fs.walk@^1.2.8":
-  version "1.2.8"
-  resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a"
-  integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==
-  dependencies:
-    "@nodelib/fs.scandir" "2.1.5"
-    fastq "^1.6.0"
-
 "@npmcli/fs@^1.0.0":
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/@npmcli/fs/-/fs-1.1.1.tgz#72f719fe935e687c56a4faecf3c03d06ba593257"
@@ -196,16 +178,6 @@ accepts@~1.3.5, accepts@~1.3.8:
     mime-types "~2.1.34"
     negotiator "0.6.3"
 
-acorn-jsx@^5.3.2:
-  version "5.3.2"
-  resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
-  integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
-
-acorn@^8.8.0:
-  version "8.8.2"
-  resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.2.tgz#1b2f25db02af965399b9776b0c2c391276d37c4a"
-  integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==
-
 agent-base@6, agent-base@^6.0.2:
   version "6.0.2"
   resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77"
@@ -235,16 +207,6 @@ ajv-draft-04@^1.0.0:
   resolved "https://registry.yarnpkg.com/ajv-draft-04/-/ajv-draft-04-1.0.0.tgz#3b64761b268ba0b9e668f0b41ba53fce0ad77fc8"
   integrity sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==
 
-ajv@^6.10.0, ajv@^6.12.4:
-  version "6.12.6"
-  resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
-  integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==
-  dependencies:
-    fast-deep-equal "^3.1.1"
-    fast-json-stable-stringify "^2.0.0"
-    json-schema-traverse "^0.4.1"
-    uri-js "^4.2.2"
-
 ajv@^8.17.1, ajv@^8.6.3:
   version "8.17.1"
   resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.17.1.tgz#37d9a5c776af6bc92d7f4f9510eba4c0a60d11a6"
@@ -594,17 +556,12 @@ call-me-maybe@^1.0.1:
   resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b"
   integrity sha1-JtII6onje1y95gJQoV8DHBak1ms=
 
-callsites@^3.0.0:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
-  integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
-
 camelcase@^5.0.0, camelcase@^5.3.1:
   version "5.3.1"
   resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
   integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
 
[email protected], chalk@^4.0.0:
[email protected]:
   version "4.1.2"
   resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
   integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
@@ -829,15 +786,6 @@ crc32-stream@^4.0.2:
     crc-32 "^1.2.0"
     readable-stream "^3.4.0"
 
-cross-spawn@^7.0.2:
-  version "7.0.6"
-  resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f"
-  integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==
-  dependencies:
-    path-key "^3.1.0"
-    shebang-command "^2.0.0"
-    which "^2.0.1"
-
 crypto-random-string@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
@@ -855,7 +803,7 @@ [email protected], debug@^2.2.0:
   dependencies:
     ms "2.0.0"
 
-debug@4, [email protected], debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.3:
+debug@4, [email protected], debug@^4.1.0, debug@^4.3.3:
   version "4.3.4"
   resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
   integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
@@ -886,11 +834,6 @@ deep-extend@^0.6.0:
   resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac"
   integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==
 
-deep-is@^0.1.3:
-  version "0.1.4"
-  resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831"
-  integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==
-
 defer-to-connect@^1.0.1:
   version "1.1.3"
   resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591"
@@ -942,13 +885,6 @@ [email protected]:
   dependencies:
     streamsearch "0.1.2"
 
-doctrine@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961"
-  integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==
-  dependencies:
-    esutils "^2.0.2"
-
 dot-prop@^5.2.0:
   version "5.2.0"
   resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.2.0.tgz#c34ecc29556dc45f1f4c22697b6f4904e0cc4fcb"
@@ -1061,123 +997,16 @@ escape-string-regexp@^1.0.5:
   resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
   integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
 
-escape-string-regexp@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34"
-  integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==
-
-eslint-plugin-align-assignments@^1.1.2:
-  version "1.1.2"
-  resolved "https://registry.yarnpkg.com/eslint-plugin-align-assignments/-/eslint-plugin-align-assignments-1.1.2.tgz#83e1a8a826d4adf29e82b52d0bb39c88b301b576"
-  integrity sha512-I1ZJgk9EjHfGVU9M2Ex8UkVkkjLL5Y9BS6VNnQHq79eHj2H4/Cgxf36lQSUTLgm2ntB03A2NtF+zg9fyi5vChg==
-
-eslint-scope@^7.1.1:
-  version "7.1.1"
-  resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642"
-  integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==
-  dependencies:
-    esrecurse "^4.3.0"
-    estraverse "^5.2.0"
-
-eslint-visitor-keys@^3.3.0:
-  version "3.3.0"
-  resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826"
-  integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==
-
-eslint@^8.36.0:
-  version "8.36.0"
-  resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.36.0.tgz#1bd72202200a5492f91803b113fb8a83b11285cf"
-  integrity sha512-Y956lmS7vDqomxlaaQAHVmeb4tNMp2FWIvU/RnU5BD3IKMD/MJPr76xdyr68P8tV1iNMvN2mRK0yy3c+UjL+bw==
-  dependencies:
-    "@eslint-community/eslint-utils" "^4.2.0"
-    "@eslint-community/regexpp" "^4.4.0"
-    "@eslint/eslintrc" "^2.0.1"
-    "@eslint/js" "8.36.0"
-    "@humanwhocodes/config-array" "^0.11.8"
-    "@humanwhocodes/module-importer" "^1.0.1"
-    "@nodelib/fs.walk" "^1.2.8"
-    ajv "^6.10.0"
-    chalk "^4.0.0"
-    cross-spawn "^7.0.2"
-    debug "^4.3.2"
-    doctrine "^3.0.0"
-    escape-string-regexp "^4.0.0"
-    eslint-scope "^7.1.1"
-    eslint-visitor-keys "^3.3.0"
-    espree "^9.5.0"
-    esquery "^1.4.2"
-    esutils "^2.0.2"
-    fast-deep-equal "^3.1.3"
-    file-entry-cache "^6.0.1"
-    find-up "^5.0.0"
-    glob-parent "^6.0.2"
-    globals "^13.19.0"
-    grapheme-splitter "^1.0.4"
-    ignore "^5.2.0"
-    import-fresh "^3.0.0"
-    imurmurhash "^0.1.4"
-    is-glob "^4.0.0"
-    is-path-inside "^3.0.3"
-    js-sdsl "^4.1.4"
-    js-yaml "^4.1.0"
-    json-stable-stringify-without-jsonify "^1.0.1"
-    levn "^0.4.1"
-    lodash.merge "^4.6.2"
-    minimatch "^3.1.2"
-    natural-compare "^1.4.0"
-    optionator "^0.9.1"
-    strip-ansi "^6.0.1"
-    strip-json-comments "^3.1.0"
-    text-table "^0.2.0"
-
 esm@^3.2.25:
   version "3.2.25"
   resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10"
   integrity sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==
 
-espree@^9.5.0:
-  version "9.5.0"
-  resolved "https://registry.yarnpkg.com/espree/-/espree-9.5.0.tgz#3646d4e3f58907464edba852fa047e6a27bdf113"
-  integrity sha512-JPbJGhKc47++oo4JkEoTe2wjy4fmMwvFpgJT9cQzmfXKp22Dr6Hf1tdCteLz1h0P3t+mGvWZ+4Uankvh8+c6zw==
-  dependencies:
-    acorn "^8.8.0"
-    acorn-jsx "^5.3.2"
-    eslint-visitor-keys "^3.3.0"
-
 esprima@^4.0.0:
   version "4.0.1"
   resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
   integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
 
-esquery@^1.4.2:
-  version "1.5.0"
-  resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b"
-  integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==
-  dependencies:
-    estraverse "^5.1.0"
-
-esrecurse@^4.3.0:
-  version "4.3.0"
-  resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921"
-  integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==
-  dependencies:
-    estraverse "^5.2.0"
-
-estraverse@^5.1.0:
-  version "5.2.0"
-  resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880"
-  integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==
-
-estraverse@^5.2.0:
-  version "5.3.0"
-  resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123"
-  integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==
-
-esutils@^2.0.2:
-  version "2.0.3"
-  resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
-  integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
-
 etag@~1.8.1:
   version "1.8.1"
   resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
@@ -1237,28 +1066,11 @@ fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
   resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
   integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
 
-fast-json-stable-stringify@^2.0.0:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
-  integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
-
-fast-levenshtein@^2.0.6:
-  version "2.0.6"
-  resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
-  integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==
-
 fast-uri@^3.0.1:
   version "3.0.2"
   resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.0.2.tgz#d78b298cf70fd3b752fd951175a3da6a7b48f024"
   integrity sha512-GR6f0hD7XXyNJa25Tb9BuIdN0tdr+0BMi6/CJPH3wJO1JjNG3n/VsSw38AwRdKZABm8lGbPfakLRkYzx2V9row==
 
-fastq@^1.6.0:
-  version "1.15.0"
-  resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a"
-  integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==
-  dependencies:
-    reusify "^1.0.4"
-
 figures@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962"
@@ -1266,13 +1078,6 @@ figures@^2.0.0:
   dependencies:
     escape-string-regexp "^1.0.5"
 
-file-entry-cache@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027"
-  integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==
-  dependencies:
-    flat-cache "^3.0.4"
-
 fill-range@^7.1.1:
   version "7.1.1"
   resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292"
@@ -1308,27 +1113,6 @@ find-up@^4.1.0:
     locate-path "^5.0.0"
     path-exists "^4.0.0"
 
-find-up@^5.0.0:
-  version "5.0.0"
-  resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc"
-  integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==
-  dependencies:
-    locate-path "^6.0.0"
-    path-exists "^4.0.0"
-
-flat-cache@^3.0.4:
-  version "3.0.4"
-  resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11"
-  integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==
-  dependencies:
-    flatted "^3.1.0"
-    rimraf "^3.0.2"
-
-flatted@^3.1.0:
-  version "3.2.7"
-  resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787"
-  integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==
-
 [email protected]:
   version "0.2.0"
   resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811"
@@ -1468,13 +1252,6 @@ [email protected]:
   resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.3.0.tgz#71e5593284807e03e2427449d4f6712a268666f4"
   integrity sha512-5eDf9fuSXwxBL6q5HX+dhDj+dslFGWzU5thZ9kNKUkcPtaPdatmUFKwHFrLb/uf/WpA4BHET+AX3Scl56cAjpA==
 
-glob-parent@^6.0.2:
-  version "6.0.2"
-  resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3"
-  integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==
-  dependencies:
-    is-glob "^4.0.3"
-
 glob-parent@~5.1.0:
   version "5.1.2"
   resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
@@ -1513,13 +1290,6 @@ global-dirs@^2.0.1:
   dependencies:
     ini "^1.3.5"
 
-globals@^13.19.0:
-  version "13.20.0"
-  resolved "https://registry.yarnpkg.com/globals/-/globals-13.20.0.tgz#ea276a1e508ffd4f1612888f9d1bad1e2717bf82"
-  integrity sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==
-  dependencies:
-    type-fest "^0.20.2"
-
 gopd@^1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c"
@@ -1559,11 +1329,6 @@ graceful-fs@^4.2.6:
   resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3"
   integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
 
-grapheme-splitter@^1.0.4:
-  version "1.0.4"
-  resolved "https://registry.yarnpkg.com/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e"
-  integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==
-
 gravatar@^1.8.0:
   version "1.8.1"
   resolved "https://registry.yarnpkg.com/gravatar/-/gravatar-1.8.1.tgz#743bbdf3185c3433172e00e0e6ff5f6b30c58997"
@@ -1696,27 +1461,6 @@ ignore-walk@^3.0.1:
   dependencies:
     minimatch "^3.0.4"
 
-ignore@^5.2.0:
-  version "5.2.4"
-  resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324"
-  integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==
-
-import-fresh@^3.0.0:
-  version "3.2.1"
-  resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.2.1.tgz#633ff618506e793af5ac91bf48b72677e15cbe66"
-  integrity sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==
-  dependencies:
-    parent-module "^1.0.0"
-    resolve-from "^4.0.0"
-
-import-fresh@^3.2.1:
-  version "3.3.0"
-  resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b"
-  integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==
-  dependencies:
-    parent-module "^1.0.0"
-    resolve-from "^4.0.0"
-
 import-lazy@^2.1.0:
   version "2.1.0"
   resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43"
@@ -1823,20 +1567,13 @@ is-fullwidth-code-point@^3.0.0:
   resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d"
   integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==
 
-is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1:
+is-glob@^4.0.1, is-glob@~4.0.1:
   version "4.0.1"
   resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
   integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==
   dependencies:
     is-extglob "^2.1.1"
 
-is-glob@^4.0.3:
-  version "4.0.3"
-  resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
-  integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==
-  dependencies:
-    is-extglob "^2.1.1"
-
 is-installed-globally@^0.3.1:
   version "0.3.2"
   resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.3.2.tgz#fd3efa79ee670d1187233182d5b0a1dd00313141"
@@ -1870,11 +1607,6 @@ is-path-inside@^3.0.1:
   resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.2.tgz#f5220fc82a3e233757291dddc9c5877f2a1f3017"
   integrity sha512-/2UGPSgmtqwo1ktx8NDHjuPwZWmHhO+gj0f93EkhLB5RgW9RZevWYYlIkS6zePc6U2WpOdQYIwHe9YC4DWEBVg==
 
-is-path-inside@^3.0.3:
-  version "3.0.3"
-  resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283"
-  integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==
-
 is-property@^1.0.2:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/is-property/-/is-property-1.0.2.tgz#57fe1c4e48474edd65b09911f26b1cd4095dda84"
@@ -1905,11 +1637,6 @@ isexe@^2.0.0:
   resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
   integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
 
-js-sdsl@^4.1.4:
-  version "4.3.0"
-  resolved "https://registry.yarnpkg.com/js-sdsl/-/js-sdsl-4.3.0.tgz#aeefe32a451f7af88425b11fdb5f58c90ae1d711"
-  integrity sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==
-
 js-yaml@^3.13.1:
   version "3.14.0"
   resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482"
@@ -1935,21 +1662,11 @@ json-parse-better-errors@^1.0.1:
   resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9"
   integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==
 
-json-schema-traverse@^0.4.1:
-  version "0.4.1"
-  resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
-  integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==
-
 json-schema-traverse@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2"
   integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==
 
-json-stable-stringify-without-jsonify@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651"
-  integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=
-
 jsonwebtoken@^9.0.0:
   version "9.0.0"
   resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz#d0faf9ba1cc3a56255fe49c0961a67e520c1926d"
@@ -2018,14 +1735,6 @@ lazystream@^1.0.0:
   dependencies:
     readable-stream "^2.0.5"
 
-levn@^0.4.1:
-  version "0.4.1"
-  resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade"
-  integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==
-  dependencies:
-    prelude-ls "^1.2.1"
-    type-check "~0.4.0"
-
 [email protected]:
   version "10.6.1"
   resolved "https://registry.yarnpkg.com/liquidjs/-/liquidjs-10.6.1.tgz#b401662cb8f0cca59b42f79fc08e411c86d92dab"
@@ -2058,13 +1767,6 @@ locate-path@^5.0.0:
   dependencies:
     p-locate "^4.1.0"
 
-locate-path@^6.0.0:
-  version "6.0.0"
-  resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286"
-  integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==
-  dependencies:
-    p-locate "^5.0.0"
-
 lodash.defaults@^4.2.0:
   version "4.2.0"
   resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c"
@@ -2085,11 +1787,6 @@ lodash.isplainobject@^4.0.6:
   resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb"
   integrity sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=
 
-lodash.merge@^4.6.2:
-  version "4.6.2"
-  resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a"
-  integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==
-
 lodash.union@^4.6.0:
   version "4.6.0"
   resolved "https://registry.yarnpkg.com/lodash.union/-/lodash.union-4.6.0.tgz#48bb5088409f16f1821666641c44dd1aaae3cd88"
@@ -2217,13 +1914,6 @@ minimatch@^3.0.4:
   dependencies:
     brace-expansion "^1.1.7"
 
-minimatch@^3.0.5, minimatch@^3.1.2:
-  version "3.1.2"
-  resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b"
-  integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==
-  dependencies:
-    brace-expansion "^1.1.7"
-
 minimist@^1.2.0, minimist@^1.2.5:
   version "1.2.8"
   resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c"
@@ -2357,11 +2047,6 @@ named-placeholders@^1.1.3:
   dependencies:
     lru-cache "^7.14.1"
 
-natural-compare@^1.4.0:
-  version "1.4.0"
-  resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
-  integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
-
 needle@^2.5.0:
   version "2.5.0"
   resolved "https://registry.yarnpkg.com/needle/-/needle-2.5.0.tgz#e6fc4b3cc6c25caed7554bd613a5cf0bac8c31c0"
@@ -2573,18 +2258,6 @@ once@^1.3.0, once@^1.3.1, once@^1.4.0:
   dependencies:
     wrappy "1"
 
-optionator@^0.9.1:
-  version "0.9.1"
-  resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499"
-  integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==
-  dependencies:
-    deep-is "^0.1.3"
-    fast-levenshtein "^2.0.6"
-    levn "^0.4.1"
-    prelude-ls "^1.2.1"
-    type-check "^0.4.0"
-    word-wrap "^1.2.3"
-
 os-homedir@^1.0.0:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
@@ -2622,13 +2295,6 @@ p-limit@^2.2.0:
   dependencies:
     p-try "^2.0.0"
 
-p-limit@^3.0.2:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b"
-  integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==
-  dependencies:
-    yocto-queue "^0.1.0"
-
 p-locate@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43"
@@ -2643,13 +2309,6 @@ p-locate@^4.1.0:
   dependencies:
     p-limit "^2.2.0"
 
-p-locate@^5.0.0:
-  version "5.0.0"
-  resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834"
-  integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==
-  dependencies:
-    p-limit "^3.0.2"
-
 p-map@^4.0.0:
   version "4.0.0"
   resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b"
@@ -2677,13 +2336,6 @@ package-json@^6.3.0:
     registry-url "^5.0.0"
     semver "^6.2.0"
 
-parent-module@^1.0.0:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2"
-  integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==
-  dependencies:
-    callsites "^3.0.0"
-
 parse-json@^4.0.0:
   version "4.0.0"
   resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0"
@@ -2712,11 +2364,6 @@ path-is-absolute@^1.0.0:
   resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
   integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
 
-path-key@^3.1.0:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375"
-  integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
-
 path-parse@^1.0.7:
   version "1.0.7"
   resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
@@ -2836,21 +2483,11 @@ postgres-interval@^1.1.0:
   dependencies:
     xtend "^4.0.0"
 
-prelude-ls@^1.2.1:
-  version "1.2.1"
-  resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396"
-  integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==
-
 prepend-http@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897"
   integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=
 
-prettier@^2.0.4:
-  version "2.0.5"
-  resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.0.5.tgz#d6d56282455243f2f92cc1716692c08aa31522d4"
-  integrity sha512-7PtVymN48hGcO4fGjybyBSIWDsLU4H4XlvOHfq91pz9kkGlonzwTfYkaIEwiRg/dAJF9YlbsduBAgtYLi+8cFg==
-
 printj@~1.1.0:
   version "1.1.2"
   resolved "https://registry.yarnpkg.com/printj/-/printj-1.1.2.tgz#d90deb2975a8b9f600fb3a1c94e3f4c53c78a222"
@@ -2931,11 +2568,6 @@ [email protected]:
   resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
   integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=
 
-queue-microtask@^1.2.2:
-  version "1.2.3"
-  resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243"
-  integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
-
 range-parser@~1.2.1:
   version "1.2.1"
   resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031"
@@ -3033,11 +2665,6 @@ require-main-filename@^2.0.0:
   resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
   integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
 
-resolve-from@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
-  integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
-
 resolve-from@^5.0.0:
   version "5.0.0"
   resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69"
@@ -3064,11 +2691,6 @@ retry@^0.12.0:
   resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b"
   integrity sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==
 
-reusify@^1.0.4:
-  version "1.0.4"
-  resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76"
-  integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==
-
 rimraf@^2.6.1:
   version "2.7.1"
   resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
@@ -3083,13 +2705,6 @@ rimraf@^3.0.2:
   dependencies:
     glob "^7.1.3"
 
-run-parallel@^1.1.9:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee"
-  integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==
-  dependencies:
-    queue-microtask "^1.2.2"
-
 [email protected], safe-buffer@~5.1.0, safe-buffer@~5.1.1:
   version "5.1.2"
   resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
@@ -3209,18 +2824,6 @@ [email protected]:
   resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424"
   integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==
 
-shebang-command@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
-  integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==
-  dependencies:
-    shebang-regex "^3.0.0"
-
-shebang-regex@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
-  integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
-
 side-channel@^1.0.4, side-channel@^1.0.6:
   version "1.0.6"
   resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2"
@@ -3413,11 +3016,6 @@ strip-bom@^3.0.0:
   resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
   integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=
 
-strip-json-comments@^3.1.0, strip-json-comments@^3.1.1:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
-  integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
-
 strip-json-comments@~2.0.1:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
@@ -3504,11 +3102,6 @@ term-size@^2.1.0:
   resolved "https://registry.yarnpkg.com/term-size/-/term-size-2.2.0.tgz#1f16adedfe9bdc18800e1776821734086fcc6753"
   integrity sha512-a6sumDlzyHVJWb8+YofY4TW112G6p2FCPEAFk+59gIYHv3XHRhm9ltVQ9kli4hNWeQBwSpe8cRN25x0ROunMOw==
 
-text-table@^0.2.0:
-  version "0.2.0"
-  resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
-  integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=
-
 [email protected]:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/tildify/-/tildify-2.0.0.tgz#f205f3674d677ce698b7067a99e949ce03b4754a"
@@ -3543,18 +3136,6 @@ tr46@~0.0.3:
   resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
   integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==
 
-type-check@^0.4.0, type-check@~0.4.0:
-  version "0.4.0"
-  resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1"
-  integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==
-  dependencies:
-    prelude-ls "^1.2.1"
-
-type-fest@^0.20.2:
-  version "0.20.2"
-  resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
-  integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
-
 type-fest@^0.8.1:
   version "0.8.1"
   resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
@@ -3686,7 +3267,7 @@ which-module@^2.0.0:
   resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
   integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
 
-which@^2.0.1, which@^2.0.2:
+which@^2.0.2:
   version "2.0.2"
   resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1"
   integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==
@@ -3714,11 +3295,6 @@ widest-line@^3.1.0:
   dependencies:
     string-width "^4.0.0"
 
-word-wrap@^1.2.3:
-  version "1.2.4"
-  resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.4.tgz#cb4b50ec9aca570abd1f52f33cd45b6c61739a9f"
-  integrity sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==
-
 wrap-ansi@^6.2.0:
   version "6.2.0"
   resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
@@ -3793,11 +3369,6 @@ yargs@^15.4.1:
     y18n "^4.0.0"
     yargs-parser "^18.1.2"
 
-yocto-queue@^0.1.0:
-  version "0.1.0"
-  resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
-  integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
-
 zip-stream@^4.1.0:
   version "4.1.0"
   resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-4.1.0.tgz#51dd326571544e36aa3f756430b313576dc8fc79"

+ 1 - 1
scripts/ci/frontend-build

@@ -17,7 +17,7 @@ if hash docker 2>/dev/null; then
 		-v "$(pwd)/frontend:/app/frontend" \
 		-v "$(pwd)/global:/app/global" \
 		-w /app/frontend "${DOCKER_IMAGE}" \
-		sh -c "yarn install && yarn build && yarn build && chown -R $(id -u):$(id -g) /app/frontend"
+		sh -c "yarn install && yarn build && chown -R $(id -u):$(id -g) /app/frontend"
 
 	echo -e "${BLUE}❯ ${GREEN}Building Frontend Complete${RESET}"
 else

+ 1 - 1
scripts/ci/test-and-build

@@ -13,7 +13,7 @@ docker run --rm \
 	-v "$(pwd)/global:/app/global" \
 	-w /app \
 	"${TESTING_IMAGE}" \
-	sh -c 'yarn install && yarn eslint . && rm -rf node_modules'
+	sh -c 'yarn install && yarn lint . && rm -rf node_modules'
 echo -e "${BLUE}❯ ${GREEN}Testing Complete${RESET}"
 
 # Build

Alguns ficheiros não foram mostrados porque muitos ficheiros mudaram neste diff