Ver código fonte

Postgres Support

- Combines #4086 and #4087 PRs
- Adds authentik in CI stack
Jamie Curnow 10 meses atrás
pai
commit
ca3ee98c68

+ 38 - 0
Jenkinsfile

@@ -167,6 +167,44 @@ pipeline {
 				}
 			}
 		}
+		stage('Test Postgres') {
+			environment {
+				COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_postgres"
+				COMPOSE_FILE         = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.postgres.yml'
+			}
+			when {
+				not {
+					equals expected: 'UNSTABLE', actual: currentBuild.result
+				}
+			}
+			steps {
+				sh 'rm -rf ./test/results/junit/*'
+				sh './scripts/ci/fulltest-cypress'
+			}
+			post {
+				always {
+					// Dumps to analyze later
+					sh 'mkdir -p debug/postgres'
+					sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q db-postgres) > debug/postgres/docker_db-postgres.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
+
+					junit 'test/results/junit/*'
+					sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
+				}
+				unstable {
+					dir(path: 'testing/results') {
+						archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
+					}
+				}
+			}
+		}
 		stage('MultiArch Build') {
 			when {
 				not {

+ 10 - 4
backend/internal/access-list.js

@@ -81,7 +81,7 @@ const internalAccessList = {
 
 				return internalAccessList.build(row)
 					.then(() => {
-						if (row.proxy_host_count) {
+						if (parseInt(row.proxy_host_count, 10)) {
 							return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
 						}
 					})
@@ -223,7 +223,7 @@ const internalAccessList = {
 			.then((row) => {
 				return internalAccessList.build(row)
 					.then(() => {
-						if (row.proxy_host_count) {
+						if (parseInt(row.proxy_host_count, 10)) {
 							return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
 						}
 					}).then(internalNginx.reload)
@@ -252,7 +252,10 @@ const internalAccessList = {
 				let query = accessListModel
 					.query()
 					.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
-					.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
+					.leftJoin('proxy_host', function() {
+						this.on('proxy_host.access_list_id', '=', 'access_list.id')
+							.andOn('proxy_host.is_deleted', '=', 0);
+					})
 					.where('access_list.is_deleted', 0)
 					.andWhere('access_list.id', data.id)
 					.allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]')
@@ -373,7 +376,10 @@ const internalAccessList = {
 				let query = accessListModel
 					.query()
 					.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
-					.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
+					.leftJoin('proxy_host', function() {
+						this.on('proxy_host.access_list_id', '=', 'access_list.id')
+							.andOn('proxy_host.is_deleted', '=', 0);
+					})
 					.where('access_list.is_deleted', 0)
 					.groupBy('access_list.id')
 					.allowGraph('[owner,items,clients]')

+ 5 - 4
backend/internal/audit-log.js

@@ -1,5 +1,6 @@
-const error         = require('../lib/error');
-const auditLogModel = require('../models/audit-log');
+const error            = require('../lib/error');
+const auditLogModel    = require('../models/audit-log');
+const {castJsonIfNeed} = require('../lib/helpers');
 
 const internalAuditLog = {
 
@@ -22,9 +23,9 @@ const internalAuditLog = {
 					.allowGraph('[user]');
 
 				// Query is used for searching
-				if (typeof search_query === 'string') {
+				if (typeof search_query === 'string' && search_query.length > 0) {
 					query.where(function () {
-						this.where('meta', 'like', '%' + search_query + '%');
+						this.where(castJsonIfNeed('meta'), 'like', '%' + search_query + '%');
 					});
 				}
 

+ 4 - 3
backend/internal/dead-host.js

@@ -6,6 +6,7 @@ const internalHost        = require('./host');
 const internalNginx       = require('./nginx');
 const internalAuditLog    = require('./audit-log');
 const internalCertificate = require('./certificate');
+const {castJsonIfNeed}    = require('../lib/helpers');
 
 function omissions () {
 	return ['is_deleted'];
@@ -409,16 +410,16 @@ const internalDeadHost = {
 					.where('is_deleted', 0)
 					.groupBy('id')
 					.allowGraph('[owner,certificate]')
-					.orderBy('domain_names', 'ASC');
+					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
 
 				if (access_data.permission_visibility !== 'all') {
 					query.andWhere('owner_user_id', access.token.getUserId(1));
 				}
 
 				// Query is used for searching
-				if (typeof search_query === 'string') {
+				if (typeof search_query === 'string' && search_query.length > 0) {
 					query.where(function () {
-						this.where('domain_names', 'like', '%' + search_query + '%');
+						this.where(castJsonIfNeed('domain_names'), 'like', '%' + search_query + '%');
 					});
 				}
 

+ 7 - 6
backend/internal/host.js

@@ -2,6 +2,7 @@ const _                    = require('lodash');
 const proxyHostModel       = require('../models/proxy_host');
 const redirectionHostModel = require('../models/redirection_host');
 const deadHostModel        = require('../models/dead_host');
+const {castJsonIfNeed}     = require('../lib/helpers');
 
 const internalHost = {
 
@@ -17,7 +18,7 @@ const internalHost = {
 	cleanSslHstsData: function (data, existing_data) {
 		existing_data = existing_data === undefined ? {} : existing_data;
 
-		let combined_data = _.assign({}, existing_data, data);
+		const combined_data = _.assign({}, existing_data, data);
 
 		if (!combined_data.certificate_id) {
 			combined_data.ssl_forced    = false;
@@ -73,7 +74,7 @@ const internalHost = {
 	 * @returns {Promise}
 	 */
 	getHostsWithDomains: function (domain_names) {
-		let promises = [
+		const promises = [
 			proxyHostModel
 				.query()
 				.where('is_deleted', 0),
@@ -125,19 +126,19 @@ const internalHost = {
 	 * @returns {Promise}
 	 */
 	isHostnameTaken: function (hostname, ignore_type, ignore_id) {
-		let promises = [
+		const promises = [
 			proxyHostModel
 				.query()
 				.where('is_deleted', 0)
-				.andWhere('domain_names', 'like', '%' + hostname + '%'),
+				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
 			redirectionHostModel
 				.query()
 				.where('is_deleted', 0)
-				.andWhere('domain_names', 'like', '%' + hostname + '%'),
+				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%'),
 			deadHostModel
 				.query()
 				.where('is_deleted', 0)
-				.andWhere('domain_names', 'like', '%' + hostname + '%')
+				.andWhere(castJsonIfNeed('domain_names'), 'like', '%' + hostname + '%')
 		];
 
 		return Promise.all(promises)

+ 4 - 3
backend/internal/proxy-host.js

@@ -6,6 +6,7 @@ const internalHost        = require('./host');
 const internalNginx       = require('./nginx');
 const internalAuditLog    = require('./audit-log');
 const internalCertificate = require('./certificate');
+const {castJsonIfNeed}    = require('../lib/helpers');
 
 function omissions () {
 	return ['is_deleted', 'owner.is_deleted'];
@@ -416,16 +417,16 @@ const internalProxyHost = {
 					.where('is_deleted', 0)
 					.groupBy('id')
 					.allowGraph('[owner,access_list,certificate]')
-					.orderBy('domain_names', 'ASC');
+					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
 
 				if (access_data.permission_visibility !== 'all') {
 					query.andWhere('owner_user_id', access.token.getUserId(1));
 				}
 
 				// Query is used for searching
-				if (typeof search_query === 'string') {
+				if (typeof search_query === 'string' && search_query.length > 0) {
 					query.where(function () {
-						this.where('domain_names', 'like', '%' + search_query + '%');
+						this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
 					});
 				}
 

+ 4 - 3
backend/internal/redirection-host.js

@@ -6,6 +6,7 @@ const internalHost         = require('./host');
 const internalNginx        = require('./nginx');
 const internalAuditLog     = require('./audit-log');
 const internalCertificate  = require('./certificate');
+const {castJsonIfNeed}     = require('../lib/helpers');
 
 function omissions () {
 	return ['is_deleted'];
@@ -409,16 +410,16 @@ const internalRedirectionHost = {
 					.where('is_deleted', 0)
 					.groupBy('id')
 					.allowGraph('[owner,certificate]')
-					.orderBy('domain_names', 'ASC');
+					.orderBy(castJsonIfNeed('domain_names'), 'ASC');
 
 				if (access_data.permission_visibility !== 'all') {
 					query.andWhere('owner_user_id', access.token.getUserId(1));
 				}
 
 				// Query is used for searching
-				if (typeof search_query === 'string') {
+				if (typeof search_query === 'string' && search_query.length > 0) {
 					query.where(function () {
-						this.where('domain_names', 'like', '%' + search_query + '%');
+						this.where(castJsonIfNeed('domain_names'), 'like', `%${search_query}%`);
 					});
 				}
 

+ 7 - 6
backend/internal/stream.js

@@ -4,6 +4,7 @@ const utils            = require('../lib/utils');
 const streamModel      = require('../models/stream');
 const internalNginx    = require('./nginx');
 const internalAuditLog = require('./audit-log');
+const {castJsonIfNeed} = require('../lib/helpers');
 
 function omissions () {
 	return ['is_deleted'];
@@ -293,21 +294,21 @@ const internalStream = {
 	getAll: (access, expand, search_query) => {
 		return access.can('streams:list')
 			.then((access_data) => {
-				let query = streamModel
+				const query = streamModel
 					.query()
 					.where('is_deleted', 0)
 					.groupBy('id')
 					.allowGraph('[owner]')
-					.orderBy('incoming_port', 'ASC');
+					.orderByRaw('CAST(incoming_port AS INTEGER) ASC');
 
 				if (access_data.permission_visibility !== 'all') {
 					query.andWhere('owner_user_id', access.token.getUserId(1));
 				}
 
 				// Query is used for searching
-				if (typeof search_query === 'string') {
+				if (typeof search_query === 'string' && search_query.length > 0) {
 					query.where(function () {
-						this.where('incoming_port', 'like', '%' + search_query + '%');
+						this.where(castJsonIfNeed('incoming_port'), 'like', `%${search_query}%`);
 					});
 				}
 
@@ -327,9 +328,9 @@ const internalStream = {
 	 * @returns {Promise}
 	 */
 	getCount: (user_id, visibility) => {
-		let query = streamModel
+		const query = streamModel
 			.query()
-			.count('id as count')
+			.count('id AS count')
 			.where('is_deleted', 0);
 
 		if (visibility !== 'all') {

+ 48 - 5
backend/lib/config.js

@@ -2,7 +2,10 @@ const fs      = require('fs');
 const NodeRSA = require('node-rsa');
 const logger  = require('../logger').global;
 
-const keysFile = '/data/keys.json';
+const keysFile         = '/data/keys.json';
+const mysqlEngine      = 'mysql2';
+const postgresEngine   = 'pg';
+const sqliteClientName = 'sqlite3';
 
 let instance = null;
 
@@ -14,7 +17,7 @@ const configure = () => {
 		let configData;
 		try {
 			configData = require(filename);
-		} catch (err) {
+		} catch (_) {
 			// do nothing
 		}
 
@@ -34,7 +37,7 @@ const configure = () => {
 		logger.info('Using MySQL configuration');
 		instance = {
 			database: {
-				engine:   'mysql2',
+				engine:   mysqlEngine,
 				host:     envMysqlHost,
 				port:     process.env.DB_MYSQL_PORT || 3306,
 				user:     envMysqlUser,
@@ -46,13 +49,33 @@ const configure = () => {
 		return;
 	}
 
+	const envPostgresHost = process.env.DB_POSTGRES_HOST || null;
+	const envPostgresUser = process.env.DB_POSTGRES_USER || null;
+	const envPostgresName = process.env.DB_POSTGRES_NAME || null;
+	if (envPostgresHost && envPostgresUser && envPostgresName) {
+		// we have enough postgres creds to go with postgres
+		logger.info('Using Postgres configuration');
+		instance = {
+			database: {
+				engine:   postgresEngine,
+				host:     envPostgresHost,
+				port:     process.env.DB_POSTGRES_PORT || 5432,
+				user:     envPostgresUser,
+				password: process.env.DB_POSTGRES_PASSWORD,
+				name:     envPostgresName,
+			},
+			keys: getKeys(),
+		};
+		return;
+	}
+
 	const envSqliteFile = process.env.DB_SQLITE_FILE || '/data/database.sqlite';
 	logger.info(`Using Sqlite: ${envSqliteFile}`);
 	instance = {
 		database: {
 			engine: 'knex-native',
 			knex:   {
-				client:     'sqlite3',
+				client:     sqliteClientName,
 				connection: {
 					filename: envSqliteFile
 				},
@@ -143,7 +166,27 @@ module.exports = {
 	 */
 	isSqlite: function () {
 		instance === null && configure();
-		return instance.database.knex && instance.database.knex.client === 'sqlite3';
+		return instance.database.knex && instance.database.knex.client === sqliteClientName;
+	},
+
+	/**
+	 * Is this a mysql configuration?
+	 *
+	 * @returns {boolean}
+	 */
+	isMysql: function () {
+		instance === null && configure();
+		return instance.database.engine === mysqlEngine;
+	},
+	
+	/**
+		 * Is this a postgres configuration?
+		 *
+		 * @returns {boolean}
+		 */
+	isPostgres: function () {
+		instance === null && configure();
+		return instance.database.engine === postgresEngine;
 	},
 
 	/**

+ 13 - 1
backend/lib/helpers.js

@@ -1,4 +1,6 @@
-const moment = require('moment');
+const moment       = require('moment');
+const {isPostgres} = require('./config');
+const {ref}        = require('objection');
 
 module.exports = {
 
@@ -45,6 +47,16 @@ module.exports = {
 			}
 		});
 		return obj;
+	},
+
+	/**
+	 * Casts a column to json if using postgres
+	 *
+	 * @param {string} colName
+	 * @returns {string|Objection.ReferenceBuilder}
+	 */
+	castJsonIfNeed: function (colName) {
+		return isPostgres() ? ref(colName).castText() : colName;
 	}
 
 };

+ 3 - 0
backend/models/redirection_host.js

@@ -17,6 +17,9 @@ const boolFields = [
 	'preserve_path',
 	'ssl_forced',
 	'block_exploits',
+	'hsts_enabled',
+	'hsts_subdomains',
+	'http2_support',
 ];
 
 class RedirectionHost extends Model {

+ 1 - 0
backend/package.json

@@ -23,6 +23,7 @@
 		"node-rsa": "^1.0.8",
 		"objection": "3.0.1",
 		"path": "^0.12.7",
+		"pg": "^8.13.1",
 		"signale": "1.4.0",
 		"sqlite3": "5.1.6",
 		"temp-write": "^4.0.0"

+ 8 - 8
backend/setup.js

@@ -15,18 +15,18 @@ const certbot             = require('./lib/certbot');
 const setupDefaultUser = () => {
 	return userModel
 		.query()
-		.select(userModel.raw('COUNT(`id`) as `count`'))
+		.select('id', )
 		.where('is_deleted', 0)
 		.first()
 		.then((row) => {
-			if (!row.count) {
+			if (!row || !row.id) {
 				// Create a new user and set password
-				let email    = process.env.INITIAL_ADMIN_EMAIL || '[email protected]';
-				let password = process.env.INITIAL_ADMIN_PASSWORD || 'changeme';
-				
+				const email    = process.env.INITIAL_ADMIN_EMAIL || '[email protected]';
+				const password = process.env.INITIAL_ADMIN_PASSWORD || 'changeme';
+
 				logger.info('Creating a new user: ' + email + ' with password: ' + password);
 
-				let data = {
+				const data = {
 					is_deleted: 0,
 					email:      email,
 					name:       'Administrator',
@@ -77,11 +77,11 @@ const setupDefaultUser = () => {
 const setupDefaultSettings = () => {
 	return settingModel
 		.query()
-		.select(settingModel.raw('COUNT(`id`) as `count`'))
+		.select('id')
 		.where({id: 'default-site'})
 		.first()
 		.then((row) => {
-			if (!row.count) {
+			if (!row || !row.id) {
 				settingModel
 					.query()
 					.insert({

+ 88 - 0
backend/yarn.lock

@@ -2735,11 +2735,67 @@ path@^0.12.7:
     process "^0.11.1"
     util "^0.10.3"
 
+pg-cloudflare@^1.1.1:
+  version "1.1.1"
+  resolved "https://registry.yarnpkg.com/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz#e6d5833015b170e23ae819e8c5d7eaedb472ca98"
+  integrity sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==
+
 [email protected]:
   version "2.5.0"
   resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34"
   integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==
 
+pg-connection-string@^2.7.0:
+  version "2.7.0"
+  resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.7.0.tgz#f1d3489e427c62ece022dba98d5262efcb168b37"
+  integrity sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA==
+
[email protected]:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c"
+  integrity sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==
+
+pg-pool@^3.7.0:
+  version "3.7.0"
+  resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.7.0.tgz#d4d3c7ad640f8c6a2245adc369bafde4ebb8cbec"
+  integrity sha512-ZOBQForurqh4zZWjrgSwwAtzJ7QiRX0ovFkZr2klsen3Nm0aoh33Ls0fzfv3imeH/nw/O27cjdz5kzYJfeGp/g==
+
+pg-protocol@^1.7.0:
+  version "1.7.0"
+  resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.7.0.tgz#ec037c87c20515372692edac8b63cf4405448a93"
+  integrity sha512-hTK/mE36i8fDDhgDFjy6xNOG+LCorxLG3WO17tku+ij6sVHXh1jQUJ8hYAnRhNla4QVD2H8er/FOjc/+EgC6yQ==
+
+pg-types@^2.1.0:
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3"
+  integrity sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==
+  dependencies:
+    pg-int8 "1.0.1"
+    postgres-array "~2.0.0"
+    postgres-bytea "~1.0.0"
+    postgres-date "~1.0.4"
+    postgres-interval "^1.1.0"
+
+pg@^8.13.1:
+  version "8.13.1"
+  resolved "https://registry.yarnpkg.com/pg/-/pg-8.13.1.tgz#6498d8b0a87ff76c2df7a32160309d3168c0c080"
+  integrity sha512-OUir1A0rPNZlX//c7ksiu7crsGZTKSOXJPgtNiHGIlC9H0lO+NC6ZDYksSgBYY/thSWhnSRBv8w1lieNNGATNQ==
+  dependencies:
+    pg-connection-string "^2.7.0"
+    pg-pool "^3.7.0"
+    pg-protocol "^1.7.0"
+    pg-types "^2.1.0"
+    pgpass "1.x"
+  optionalDependencies:
+    pg-cloudflare "^1.1.1"
+
[email protected]:
+  version "1.0.5"
+  resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.5.tgz#9b873e4a564bb10fa7a7dbd55312728d422a223d"
+  integrity sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==
+  dependencies:
+    split2 "^4.1.0"
+
 picomatch@^2.0.4, picomatch@^2.2.1:
   version "2.2.2"
   resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
@@ -2758,6 +2814,28 @@ pkg-conf@^2.1.0:
     find-up "^2.0.0"
     load-json-file "^4.0.0"
 
+postgres-array@~2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-2.0.0.tgz#48f8fce054fbc69671999329b8834b772652d82e"
+  integrity sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==
+
+postgres-bytea@~1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-1.0.0.tgz#027b533c0aa890e26d172d47cf9ccecc521acd35"
+  integrity sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==
+
+postgres-date@~1.0.4:
+  version "1.0.7"
+  resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-1.0.7.tgz#51bc086006005e5061c591cee727f2531bf641a8"
+  integrity sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==
+
+postgres-interval@^1.1.0:
+  version "1.2.0"
+  resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-1.2.0.tgz#b460c82cb1587507788819a06aa0fffdb3544695"
+  integrity sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==
+  dependencies:
+    xtend "^4.0.0"
+
 prelude-ls@^1.2.1:
   version "1.2.1"
   resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396"
@@ -3194,6 +3272,11 @@ socks@^2.6.2:
     ip "^2.0.0"
     smart-buffer "^4.2.0"
 
+split2@^4.1.0:
+  version "4.2.0"
+  resolved "https://registry.yarnpkg.com/split2/-/split2-4.2.0.tgz#c9c5920904d148bab0b9f67145f245a86aadbfa4"
+  integrity sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==
+
 sprintf-js@~1.0.2:
   version "1.0.3"
   resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
@@ -3665,6 +3748,11 @@ xdg-basedir@^4.0.0:
   resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13"
   integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==
 
+xtend@^4.0.0:
+  version "4.0.2"
+  resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
+  integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
+
 y18n@^4.0.0:
   version "4.0.1"
   resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4"

+ 8 - 0
docker/ci.env

@@ -0,0 +1,8 @@
+AUTHENTIK_SECRET_KEY=gl8woZe8L6IIX8SC0c5Ocsj0xPkX5uJo5DVZCFl+L/QGbzuplfutYuua2ODNLEiDD3aFd9H2ylJmrke0
+AUTHENTIK_REDIS__HOST=authentik-redis
+AUTHENTIK_POSTGRESQL__HOST=db-postgres
+AUTHENTIK_POSTGRESQL__USER=authentik
+AUTHENTIK_POSTGRESQL__NAME=authentik
+AUTHENTIK_POSTGRESQL__PASSWORD=07EKS5NLI6Tpv68tbdvrxfvj
+AUTHENTIK_BOOTSTRAP_PASSWORD=admin
[email protected]

BIN
docker/ci/postgres/authentik.sql.gz


+ 2 - 1
docker/dev/Dockerfile

@@ -29,7 +29,8 @@ COPY scripts/install-s6 /tmp/install-s6
 RUN rm -f /etc/nginx/conf.d/production.conf \
 	&& chmod 644 /etc/logrotate.d/nginx-proxy-manager \
 	&& /tmp/install-s6 "${TARGETPLATFORM}" \
-	&& rm -f /tmp/install-s6
+	&& rm -f /tmp/install-s6 \
+	&& chmod 644 -R /root/.cache
 
 # Certs for testing purposes
 COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem

+ 78 - 0
docker/docker-compose.ci.postgres.yml

@@ -0,0 +1,78 @@
+# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
+services:
+
+  cypress:
+    environment:
+      CYPRESS_stack: 'postgres'
+
+  fullstack:
+    environment:
+      DB_POSTGRES_HOST: 'db-postgres'
+      DB_POSTGRES_PORT: '5432'
+      DB_POSTGRES_USER: 'npm'
+      DB_POSTGRES_PASSWORD: 'npmpass'
+      DB_POSTGRES_NAME: 'npm'
+    depends_on:
+      - db-postgres
+      - authentik
+      - authentik-worker
+      - authentik-ldap
+
+  db-postgres:
+    image: postgres:latest
+    environment:
+      POSTGRES_USER: 'npm'
+      POSTGRES_PASSWORD: 'npmpass'
+      POSTGRES_DB: 'npm'
+    volumes:
+      - psql_vol:/var/lib/postgresql/data
+      - ./ci/postgres:/docker-entrypoint-initdb.d
+    networks:
+      - fulltest
+
+  authentik-redis:
+    image: 'redis:alpine'
+    command: --save 60 1 --loglevel warning
+    restart: unless-stopped
+    healthcheck:
+      test: ['CMD-SHELL', 'redis-cli ping | grep PONG']
+      start_period: 20s
+      interval: 30s
+      retries: 5
+      timeout: 3s
+    volumes:
+      - redis_vol:/data
+
+  authentik:
+    image: ghcr.io/goauthentik/server:2024.10.1
+    restart: unless-stopped
+    command: server
+    env_file:
+      - ci.env
+    depends_on:
+      - authentik-redis
+      - db-postgres
+
+  authentik-worker:
+    image: ghcr.io/goauthentik/server:2024.10.1
+    restart: unless-stopped
+    command: worker
+    env_file:
+      - ci.env
+    depends_on:
+      - authentik-redis
+      - db-postgres
+
+  authentik-ldap:
+    image: ghcr.io/goauthentik/ldap:2024.10.1
+    environment:
+      AUTHENTIK_HOST: 'http://authentik:9000'
+      AUTHENTIK_INSECURE: 'true'
+      AUTHENTIK_TOKEN: 'wKYZuRcI0ETtb8vWzMCr04oNbhrQUUICy89hSpDln1OEKLjiNEuQ51044Vkp'
+    restart: unless-stopped
+    depends_on:
+      - authentik
+
+volumes:
+  psql_vol:
+  redis_vol:

+ 103 - 16
docker/docker-compose.dev.yml

@@ -2,8 +2,8 @@
 services:
 
   fullstack:
-    image: nginxproxymanager:dev
-    container_name: npm_core
+    image: npm2dev:core
+    container_name: npm2dev.core
     build:
       context: ./
       dockerfile: ./dev/Dockerfile
@@ -26,11 +26,17 @@ services:
       DEVELOPMENT: 'true'
       LE_STAGING: 'true'
       # db:
-      DB_MYSQL_HOST: 'db'
-      DB_MYSQL_PORT: '3306'
-      DB_MYSQL_USER: 'npm'
-      DB_MYSQL_PASSWORD: 'npm'
-      DB_MYSQL_NAME: 'npm'
+      # DB_MYSQL_HOST: 'db'
+      # DB_MYSQL_PORT: '3306'
+      # DB_MYSQL_USER: 'npm'
+      # DB_MYSQL_PASSWORD: 'npm'
+      # DB_MYSQL_NAME: 'npm'
+      # db-postgres:
+      DB_POSTGRES_HOST: 'db-postgres'
+      DB_POSTGRES_PORT: '5432'
+      DB_POSTGRES_USER: 'npm'
+      DB_POSTGRES_PASSWORD: 'npmpass'
+      DB_POSTGRES_NAME: 'npm'
       # DB_SQLITE_FILE: "/data/database.sqlite"
       # DISABLE_IPV6: "true"
       # Required for DNS Certificate provisioning testing:
@@ -49,11 +55,15 @@ services:
       timeout: 3s
     depends_on:
       - db
+      - db-postgres
+      - authentik
+      - authentik-worker
+      - authentik-ldap
     working_dir: /app
 
   db:
     image: jc21/mariadb-aria
-    container_name: npm_db
+    container_name: npm2dev.db
     ports:
       - 33306:3306
     networks:
@@ -66,8 +76,22 @@ services:
     volumes:
       - db_data:/var/lib/mysql
 
+  db-postgres:
+    image: postgres:latest
+    container_name: npm2dev.db-postgres
+    networks:
+      - nginx_proxy_manager
+    environment:
+      POSTGRES_USER: 'npm'
+      POSTGRES_PASSWORD: 'npmpass'
+      POSTGRES_DB: 'npm'
+    volumes:
+      - psql_data:/var/lib/postgresql/data
+      - ./ci/postgres:/docker-entrypoint-initdb.d
+
   stepca:
     image: jc21/testca
+    container_name: npm2dev.stepca
     volumes:
       - './dev/resolv.conf:/etc/resolv.conf:ro'
       - '/etc/localtime:/etc/localtime:ro'
@@ -78,6 +102,7 @@ services:
 
   dnsrouter:
     image: jc21/dnsrouter
+    container_name: npm2dev.dnsrouter
     volumes:
       - ./dev/dnsrouter-config.json.tmp:/dnsrouter-config.json:ro
     networks:
@@ -85,7 +110,7 @@ services:
 
   swagger:
     image: swaggerapi/swagger-ui:latest
-    container_name: npm_swagger
+    container_name: npm2dev.swagger
     ports:
       - 3082:80
     environment:
@@ -96,7 +121,7 @@ services:
 
   squid:
     image: ubuntu/squid
-    container_name: npm_squid
+    container_name: npm2dev.squid
     volumes:
       - './dev/squid.conf:/etc/squid/squid.conf:ro'
       - './dev/resolv.conf:/etc/resolv.conf:ro'
@@ -108,6 +133,7 @@ services:
 
   pdns:
     image: pschiffe/pdns-mysql
+    container_name: npm2dev.pdns
     volumes:
       - '/etc/localtime:/etc/localtime:ro'
     environment:
@@ -136,6 +162,7 @@ services:
 
   pdns-db:
     image: mariadb
+    container_name: npm2dev.pdns-db
     environment:
       MYSQL_ROOT_PASSWORD: 'pdns'
       MYSQL_DATABASE: 'pdns'
@@ -149,7 +176,8 @@ services:
       - nginx_proxy_manager
 
   cypress:
-    image: "npm_dev_cypress"
+    image: npm2dev:cypress
+    container_name: npm2dev.cypress
     build:
       context: ../
       dockerfile: test/cypress/Dockerfile
@@ -164,16 +192,75 @@ services:
     networks:
       - nginx_proxy_manager
 
+  authentik-redis:
+    image: 'redis:alpine'
+    container_name: npm2dev.authentik-redis
+    command: --save 60 1 --loglevel warning
+    networks:
+      - nginx_proxy_manager
+    restart: unless-stopped
+    healthcheck:
+      test: ['CMD-SHELL', 'redis-cli ping | grep PONG']
+      start_period: 20s
+      interval: 30s
+      retries: 5
+      timeout: 3s
+    volumes:
+      - redis_data:/data
+
+  authentik:
+    image: ghcr.io/goauthentik/server:2024.10.1
+    container_name: npm2dev.authentik
+    restart: unless-stopped
+    command: server
+    networks:
+      - nginx_proxy_manager
+    env_file:
+      - ci.env
+    depends_on:
+      - authentik-redis
+      - db-postgres
+
+  authentik-worker:
+    image: ghcr.io/goauthentik/server:2024.10.1
+    container_name: npm2dev.authentik-worker
+    restart: unless-stopped
+    command: worker
+    networks:
+      - nginx_proxy_manager
+    env_file:
+      - ci.env
+    depends_on:
+      - authentik-redis
+      - db-postgres
+
+  authentik-ldap:
+    image: ghcr.io/goauthentik/ldap:2024.10.1
+    container_name: npm2dev.authentik-ldap
+    networks:
+      - nginx_proxy_manager
+    environment:
+      AUTHENTIK_HOST: 'http://authentik:9000'
+      AUTHENTIK_INSECURE: 'true'
+      AUTHENTIK_TOKEN: 'wKYZuRcI0ETtb8vWzMCr04oNbhrQUUICy89hSpDln1OEKLjiNEuQ51044Vkp'
+    restart: unless-stopped
+    depends_on:
+      - authentik
+
 volumes:
   npm_data:
-    name: npm_core_data
+    name: npm2dev_core_data
   le_data:
-    name: npm_le_data
+    name: npm2dev_le_data
   db_data:
-    name: npm_db_data
+    name: npm2dev_db_data
   pdns_mysql:
-    name: npm_pdns_mysql
+    name: npnpm2dev_pdns_mysql
+  psql_data:
+    name: npm2dev_psql_data
+  redis_data:
+    name: npm2dev_redis_data
 
 networks:
   nginx_proxy_manager:
-    name: npm_network
+    name: npm2dev_network

+ 47 - 0
docs/src/setup/index.md

@@ -99,6 +99,53 @@ Please note, that `DB_MYSQL_*` environment variables will take precedent over `D
 
 :::
 
+## Using Postgres database
+
+Similar to the MySQL server setup:
+
+```yml
+services:
+  app:
+    image: 'jc21/nginx-proxy-manager:latest'
+    restart: unless-stopped
+    ports:
+      # These ports are in format <host-port>:<container-port>
+      - '80:80' # Public HTTP Port
+      - '443:443' # Public HTTPS Port
+      - '81:81' # Admin Web Port
+      # Add any other Stream port you want to expose
+      # - '21:21' # FTP
+    environment:
+      # Postgres parameters:
+      DB_POSTGRES_HOST: 'db'
+      DB_POSTGRES_PORT: '5432'
+      DB_POSTGRES_USER: 'npm'
+      DB_POSTGRES_PASSWORD: 'npmpass'
+      DB_POSTGRES_NAME: 'npm'
+      # Uncomment this if IPv6 is not enabled on your host
+      # DISABLE_IPV6: 'true'
+    volumes:
+      - ./data:/data
+      - ./letsencrypt:/etc/letsencrypt
+    depends_on:
+      - db
+
+  db:
+    image: postgres:latest
+    environment:
+      POSTGRES_USER: 'npm'
+      POSTGRES_PASSWORD: 'npmpass'
+      POSTGRES_DB: 'npm'
+    volumes:
+      - ./postgres:/var/lib/postgresql/data
+```
+
+::: warning
+
+Custom Postgres schema is not supported, as such `public` will be used.
+
+:::
+
 ## Running on Raspberry PI / ARM devices
 
 The docker images support the following architectures:

+ 1 - 1
scripts/.common.sh

@@ -11,7 +11,7 @@ YELLOW='\E[1;33m'
 export BLUE CYAN GREEN RED RESET YELLOW
 
 # Docker Compose
-COMPOSE_PROJECT_NAME="npmdev"
+COMPOSE_PROJECT_NAME="npm2dev"
 COMPOSE_FILE="docker/docker-compose.dev.yml"
 
 export COMPOSE_FILE COMPOSE_PROJECT_NAME

+ 2 - 0
scripts/ci/fulltest-cypress

@@ -67,6 +67,8 @@ printf "nameserver %s\noptions ndots:0" "${DNSROUTER_IP}" > "${LOCAL_RESOLVE}"
 # bring up all remaining containers, except cypress!
 docker-compose up -d --remove-orphans stepca squid
 docker-compose pull db-mysql || true # ok to fail
+docker-compose pull db-postgres || true # ok to fail
+docker-compose pull authentik authentik-redis authentik-ldap || true # ok to fail
 docker-compose up -d --remove-orphans --pull=never fullstack
 
 # wait for main container to be healthy

+ 5 - 6
scripts/start-dev

@@ -36,12 +36,11 @@ if hash docker-compose 2>/dev/null; then
 
 	# bring up all remaining containers, except cypress!
 	docker-compose up -d --remove-orphans stepca squid
-	docker-compose pull db
-	docker-compose up -d --remove-orphans --pull=never fullstack
+	docker-compose pull db db-postgres authentik-redis authentik authentik-worker authentik-ldap
+	docker-compose build --pull --parallel fullstack
+	docker-compose up -d --remove-orphans fullstack
 	docker-compose up -d --remove-orphans swagger
 
-	# docker-compose up -d --remove-orphans --force-recreate --build
-
 	# wait for main container to be healthy
 	bash "$DIR/wait-healthy" "$(docker-compose ps --all -q fullstack)" 120
 
@@ -53,10 +52,10 @@ if hash docker-compose 2>/dev/null; then
 
 	if [ "$1" == "-f" ]; then
 		echo -e "${BLUE}❯ ${YELLOW}Following Backend Container:${RESET}"
-		docker logs -f npm_core
+		docker logs -f npm2dev.core
 	else
 		echo -e "${YELLOW}Hint:${RESET} You can follow the output of some of the containers with:"
-		echo "  docker logs -f npm_core"
+		echo "  docker logs -f npm2dev.core"
 	fi
 else
 	echo -e "${RED}❯ docker-compose command is not available${RESET}"

+ 64 - 0
test/cypress/e2e/api/Ldap.cy.js

@@ -0,0 +1,64 @@
+/// <reference types="cypress" />
+
+describe('LDAP with Authentik', () => {
+	let token;
+	if (Cypress.env('skipStackCheck') === 'true' || Cypress.env('stack') === 'postgres') {
+
+		before(() => {
+			cy.getToken().then((tok) => {
+				token = tok;
+
+				// cy.task('backendApiPut', {
+				// 	token: token,
+				// 	path:  '/api/settings/ldap-auth',
+				// 	data:  {
+				// 		value: {
+				// 			host: 'authentik-ldap:3389',
+				// 			base_dn: 'ou=users,DC=ldap,DC=goauthentik,DC=io',
+				// 			user_dn: 'cn={{USERNAME}},ou=users,DC=ldap,DC=goauthentik,DC=io',
+				// 			email_property: 'mail',
+				// 			name_property: 'sn',
+				// 			self_filter: '(&(cn={{USERNAME}})(ak-active=TRUE))',
+				// 			auto_create_user: true
+				// 		}
+				// 	}
+				// }).then((data) => {
+				// 	cy.validateSwaggerSchema('put', 200, '/settings/{name}', data);
+				// 	expect(data.result).to.have.property('id');
+				// 	expect(data.result.id).to.be.greaterThan(0);
+				// });
+
+				// cy.task('backendApiPut', {
+				// 	token: token,
+				// 	path:  '/api/settings/auth-methods',
+				// 	data:  {
+				// 		value: [
+				// 			'local',
+				// 			'ldap'
+				// 		]
+				// 	}
+				// }).then((data) => {
+				// 	cy.validateSwaggerSchema('put', 200, '/settings/{name}', data);
+				// 	expect(data.result).to.have.property('id');
+				// 	expect(data.result.id).to.be.greaterThan(0);
+				// });
+			});
+		});
+
+		it.skip('Should log in with LDAP', function() {
+			// cy.task('backendApiPost', {
+			// 	token: token,
+			// 	path:  '/api/auth',
+			// 	data:  {
+			// 		// Authentik LDAP creds:
+			// 		type: 'ldap',
+			// 		identity: 'cypress',
+			// 		secret: 'fqXBfUYqHvYqiwBHWW7f'
+			// 	}
+			// }).then((data) => {
+			// 	cy.validateSwaggerSchema('post', 200, '/auth', data);
+			// 	expect(data.result).to.have.property('token');
+			// });
+		});
+	}
+});

+ 97 - 0
test/cypress/e2e/api/OAuth.cy.js

@@ -0,0 +1,97 @@
+/// <reference types="cypress" />
+
+describe('OAuth with Authentik', () => {
+	let token;
+	if (Cypress.env('skipStackCheck') === 'true' || Cypress.env('stack') === 'postgres') {
+
+		before(() => {
+			cy.getToken().then((tok) => {
+				token = tok;
+
+				// cy.task('backendApiPut', {
+				// 	token: token,
+				// 	path:  '/api/settings/oauth-auth',
+				// 	data:  {
+				// 		value: {
+				// 			client_id: '7iO2AvuUp9JxiSVkCcjiIbQn4mHmUMBj7yU8EjqU',
+				// 			client_secret: 'VUMZzaGTrmXJ8PLksyqzyZ6lrtz04VvejFhPMBP9hGZNCMrn2LLBanySs4ta7XGrDr05xexPyZT1XThaf4ubg00WqvHRVvlu4Naa1aMootNmSRx3VAk6RSslUJmGyHzq',
+				// 			authorization_url: 'http://authentik:9000/application/o/authorize/',
+				// 			resource_url: 'http://authentik:9000/application/o/userinfo/',
+				// 			token_url: 'http://authentik:9000/application/o/token/',
+				// 			logout_url: 'http://authentik:9000/application/o/npm/end-session/',
+				// 			identifier: 'preferred_username',
+				// 			scopes: [],
+				// 			auto_create_user: true
+				// 		}
+				// 	}
+				// }).then((data) => {
+				// 	cy.validateSwaggerSchema('put', 200, '/settings/{name}', data);
+				// 	expect(data.result).to.have.property('id');
+				// 	expect(data.result.id).to.be.greaterThan(0);
+				// });
+
+				// cy.task('backendApiPut', {
+				// 	token: token,
+				// 	path:  '/api/settings/auth-methods',
+				// 	data:  {
+				// 		value: [
+				// 			'local',
+				// 			'oauth'
+				// 		]
+				// 	}
+				// }).then((data) => {
+				// 	cy.validateSwaggerSchema('put', 200, '/settings/{name}', data);
+				// 	expect(data.result).to.have.property('id');
+				// 	expect(data.result.id).to.be.greaterThan(0);
+				// });
+			});
+		});
+
+		it.skip('Should log in with OAuth', function() {
+			// cy.task('backendApiGet', {
+			// 	path:  '/oauth/login?redirect_base=' + encodeURI(Cypress.config('baseUrl')),
+			// }).then((data) => {
+			// 	expect(data).to.have.property('result');
+
+			// 	cy.origin('http://authentik:9000', {args: data.result}, (url) => {
+			// 		cy.visit(url);
+			// 		cy.get('ak-flow-executor')
+			// 		.shadow()
+			// 		.find('ak-stage-identification')
+			// 		.shadow()
+			// 		.find('input[name="uidField"]', { visible: true })
+			// 		.type('cypress');
+
+			// 	cy.get('ak-flow-executor')
+			// 		.shadow()
+			// 		.find('ak-stage-identification')
+			// 		.shadow()
+			// 		.find('button[type="submit"]', { visible: true })
+			// 		.click();
+
+			// 	cy.get('ak-flow-executor')
+			// 		.shadow()
+			// 		.find('ak-stage-password')
+			// 		.shadow()
+			// 		.find('input[name="password"]', { visible: true })
+			// 		.type('fqXBfUYqHvYqiwBHWW7f');
+
+			// 	cy.get('ak-flow-executor')
+			// 		.shadow()
+			// 		.find('ak-stage-password')
+			// 		.shadow()
+			// 		.find('button[type="submit"]', { visible: true })
+			// 		.click();
+			// 	})
+
+			// 	// we should be logged in
+			// 	cy.get('#root p.chakra-text')
+			// 		.first()
+			// 		.should('have.text', 'Nginx Proxy Manager');
+
+			// 	// logout:
+			// 	cy.clearLocalStorage();
+			// });
+		});
+	}
+});