Переглянути джерело

Major update to cypress

- Updated cypress
- Ground work for testing DNS certs in CI
Jamie Curnow 1 рік тому
батько
коміт
6ac9a82279

+ 4 - 0
.gitignore

@@ -3,3 +3,7 @@
 ._*
 .vscode
 certbot-help.txt
+test/node_modules
+*/node_modules
+docker/dev/dnsrouter-config.json.tmp
+docker/dev/resolv.conf

+ 52 - 68
Jenkinsfile

@@ -18,10 +18,8 @@ pipeline {
 		BUILD_VERSION              = getVersion()
 		MAJOR_VERSION              = '2'
 		BRANCH_LOWER               = "${BRANCH_NAME.toLowerCase().replaceAll('\\\\', '-').replaceAll('/', '-').replaceAll('\\.', '-')}"
-		COMPOSE_PROJECT_NAME       = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
-		COMPOSE_FILE               = 'docker/docker-compose.ci.yml'
+		BUILDX_NAME                = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
 		COMPOSE_INTERACTIVE_NO_CLI = 1
-		BUILDX_NAME                = "${COMPOSE_PROJECT_NAME}"
 	}
 	stages {
 		stage('Environment') {
@@ -94,75 +92,61 @@ pipeline {
 						}
 					}
 				}
-				stage('Cypress') {
-					steps {
-						// Creating will also create the network prior to
-						// using it in parallel stages below and mitigating
-						// a race condition.
-						sh 'docker-compose build cypress-sqlite'
-						sh 'docker-compose build cypress-mysql'
-						sh 'docker-compose create cypress-sqlite'
-						sh 'docker-compose create cypress-mysql'
-					}
+			}
+		}
+		stage('Test Sqlite') {
+			environment {
+				COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_sqlite"
+				COMPOSE_FILE         = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.sqlite.yml'
+			}
+			when {
+				not {
+					equals expected: 'UNSTABLE', actual: currentBuild.result
+				}
+			}
+			steps {
+				sh 'rm -rf ./test/results/junit/*'
+				sh './scripts/ci/fulltest-cypress'
+			}
+			post {
+				always {
+					// Dumps to analyze later
+					sh 'mkdir -p debug/sqlite'
+					sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/sqlite/docker_fullstack.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q stepca) > debug/sqlite/docker_stepca.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q pdns) > debug/sqlite/docker_pdns.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/sqlite/docker_pdns-db.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/sqlite/docker_dnsrouter.log 2>&1'
+					junit 'test/results/junit/*'
+					sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
 				}
 			}
 		}
-		stage('Integration Tests') {
-			parallel {
-				stage('Sqlite') {
-					steps {
-						// Bring up a stack
-						sh 'docker-compose up -d fullstack-sqlite'
-						sh './scripts/wait-healthy $(docker-compose ps --all -q fullstack-sqlite) 120'
-						// Stop and Start it, as this will test it's ability to restart with existing data
-						sh 'docker-compose stop fullstack-sqlite'
-						sh 'docker-compose start fullstack-sqlite'
-						sh './scripts/wait-healthy $(docker-compose ps --all -q fullstack-sqlite) 120'
-
-						// Run tests
-						sh 'rm -rf test/results-sqlite'
-						sh 'docker-compose up cypress-sqlite'
-						// Get results
-						sh 'docker cp -L "$(docker-compose ps --all -q cypress-sqlite):/test/results" test/results-sqlite'
-					}
-					post {
-						always {
-							// Dumps to analyze later
-							sh 'mkdir -p debug/sqlite'
-							sh 'docker-compose logs fullstack-sqlite > debug/sqlite/docker_fullstack_sqlite.log'
-							// Cypress videos and screenshot artifacts
-							dir(path: 'test/results-sqlite') {
-								archiveArtifacts allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml'
-							}
-							junit 'test/results-sqlite/junit/*'
-						}
-					}
+		stage('Test Mysql') {
+			environment {
+				COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_mysql"
+				COMPOSE_FILE         = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.mysql.yml'
+			}
+			when {
+				not {
+					equals expected: 'UNSTABLE', actual: currentBuild.result
 				}
-				stage('Mysql') {
-					steps {
-						// Bring up a stack
-						sh 'docker-compose up -d fullstack-mysql'
-						sh './scripts/wait-healthy $(docker-compose ps --all -q fullstack-mysql) 120'
-
-						// Run tests
-						sh 'rm -rf test/results-mysql'
-						sh 'docker-compose up cypress-mysql'
-						// Get results
-						sh 'docker cp -L "$(docker-compose ps --all -q cypress-mysql):/test/results" test/results-mysql'
-					}
-					post {
-						always {
-							// Dumps to analyze later
-							sh 'mkdir -p debug/mysql'
-							sh 'docker-compose logs fullstack-mysql > debug/mysql/docker_fullstack_mysql.log'
-							sh 'docker-compose logs db > debug/mysql/docker_db.log'
-							// Cypress videos and screenshot artifacts
-							dir(path: 'test/results-mysql') {
-								archiveArtifacts allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml'
-							}
-							junit 'test/results-mysql/junit/*'
-						}
-					}
+			}
+			steps {
+				sh 'rm -rf ./test/results/junit/*'
+				sh './scripts/ci/fulltest-cypress'
+			}
+			post {
+				always {
+					// Dumps to analyze later
+					sh 'mkdir -p debug/mysql'
+					sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/mysql/docker_fullstack.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q stepca) > debug/mysql/docker_stepca.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q pdns) > debug/mysql/docker_pdns.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/mysql/docker_pdns-db.log 2>&1'
+					sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/mysql/docker_dnsrouter.log 2>&1'
+					junit 'test/results/junit/*'
+					sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
 				}
 			}
 		}

+ 28 - 0
docker/dev/dnsrouter-config.json

@@ -0,0 +1,28 @@
+{
+    "log": {
+        "format": "nice",
+        "level": "debug"
+    },
+    "servers": [
+        {
+            "host": "0.0.0.0",
+            "port": 53,
+            "upstreams": [
+                {
+                    "regex": "website[0-9]+.example\\.com",
+                    "upstream": "127.0.0.11"
+                },
+                {
+                    "regex": ".*\\.example\\.com",
+                    "upstream": "1.1.1.1"
+                },
+                {
+                    "regex": "local",
+                    "nxdomain": true
+                }
+            ],
+            "internal": null,
+            "default_upstream": "127.0.0.11"
+        }
+    ]
+}

+ 7 - 0
docker/dev/letsencrypt.ini

@@ -0,0 +1,7 @@
+text = True
+non-interactive = True
+webroot-path = /data/letsencrypt-acme-challenge
+key-type = ecdsa
+elliptic-curve = secp384r1
+preferred-chain = ISRG Root X1
+server =

+ 255 - 0
docker/dev/pdns-db.sql

@@ -0,0 +1,255 @@
+/*
+
+How this was generated:
+1. bring up an empty pdns stack
+2. use api to create a zone ...
+
+curl -X POST \
+  'http://npm.dev:8081/api/v1/servers/localhost/zones' \
+  --header 'X-API-Key: npm' \
+  --header 'Content-Type: application/json' \
+  --data-raw '{
+  "name": "example.com.",
+  "kind": "Native",
+  "masters": [],
+  "nameservers": [
+    "ns1.pdns.",
+    "ns2.pdns."
+  ]
+}'
+
+3. Dump sql:
+
+docker exec -ti npm.pdns.db mysqldump -u pdns -p pdns
+
+*/
+
+----------------------------------------------------------------------
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8mb4 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `comments`
+--
+
+DROP TABLE IF EXISTS `comments`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `comments` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `domain_id` int(11) NOT NULL,
+  `name` varchar(255) NOT NULL,
+  `type` varchar(10) NOT NULL,
+  `modified_at` int(11) NOT NULL,
+  `account` varchar(40) CHARACTER SET utf8mb3 DEFAULT NULL,
+  `comment` text CHARACTER SET utf8mb3 NOT NULL,
+  PRIMARY KEY (`id`),
+  KEY `comments_name_type_idx` (`name`,`type`),
+  KEY `comments_order_idx` (`domain_id`,`modified_at`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `comments`
+--
+
+LOCK TABLES `comments` WRITE;
+/*!40000 ALTER TABLE `comments` DISABLE KEYS */;
+/*!40000 ALTER TABLE `comments` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `cryptokeys`
+--
+
+DROP TABLE IF EXISTS `cryptokeys`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `cryptokeys` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `domain_id` int(11) NOT NULL,
+  `flags` int(11) NOT NULL,
+  `active` tinyint(1) DEFAULT NULL,
+  `published` tinyint(1) DEFAULT 1,
+  `content` text DEFAULT NULL,
+  PRIMARY KEY (`id`),
+  KEY `domainidindex` (`domain_id`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `cryptokeys`
+--
+
+LOCK TABLES `cryptokeys` WRITE;
+/*!40000 ALTER TABLE `cryptokeys` DISABLE KEYS */;
+/*!40000 ALTER TABLE `cryptokeys` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `domainmetadata`
+--
+
+DROP TABLE IF EXISTS `domainmetadata`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `domainmetadata` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `domain_id` int(11) NOT NULL,
+  `kind` varchar(32) DEFAULT NULL,
+  `content` text DEFAULT NULL,
+  PRIMARY KEY (`id`),
+  KEY `domainmetadata_idx` (`domain_id`,`kind`)
+) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `domainmetadata`
+--
+
+LOCK TABLES `domainmetadata` WRITE;
+/*!40000 ALTER TABLE `domainmetadata` DISABLE KEYS */;
+INSERT INTO `domainmetadata` VALUES
+(1,1,'SOA-EDIT-API','DEFAULT');
+/*!40000 ALTER TABLE `domainmetadata` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `domains`
+--
+
+DROP TABLE IF EXISTS `domains`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `domains` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `name` varchar(255) NOT NULL,
+  `master` varchar(128) DEFAULT NULL,
+  `last_check` int(11) DEFAULT NULL,
+  `type` varchar(8) NOT NULL,
+  `notified_serial` int(10) unsigned DEFAULT NULL,
+  `account` varchar(40) CHARACTER SET utf8mb3 DEFAULT NULL,
+  `options` varchar(64000) DEFAULT NULL,
+  `catalog` varchar(255) DEFAULT NULL,
+  PRIMARY KEY (`id`),
+  UNIQUE KEY `name_index` (`name`),
+  KEY `catalog_idx` (`catalog`)
+) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `domains`
+--
+
+LOCK TABLES `domains` WRITE;
+/*!40000 ALTER TABLE `domains` DISABLE KEYS */;
+INSERT INTO `domains` VALUES
+(1,'example.com','',NULL,'NATIVE',NULL,'',NULL,NULL);
+/*!40000 ALTER TABLE `domains` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `records`
+--
+
+DROP TABLE IF EXISTS `records`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `records` (
+  `id` bigint(20) NOT NULL AUTO_INCREMENT,
+  `domain_id` int(11) DEFAULT NULL,
+  `name` varchar(255) DEFAULT NULL,
+  `type` varchar(10) DEFAULT NULL,
+  `content` varchar(64000) DEFAULT NULL,
+  `ttl` int(11) DEFAULT NULL,
+  `prio` int(11) DEFAULT NULL,
+  `disabled` tinyint(1) DEFAULT 0,
+  `ordername` varchar(255) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
+  `auth` tinyint(1) DEFAULT 1,
+  PRIMARY KEY (`id`),
+  KEY `nametype_index` (`name`,`type`),
+  KEY `domain_id` (`domain_id`),
+  KEY `ordername` (`ordername`)
+) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `records`
+--
+
+LOCK TABLES `records` WRITE;
+/*!40000 ALTER TABLE `records` DISABLE KEYS */;
+INSERT INTO `records` VALUES
+(1,1,'example.com','NS','ns1.pdns',1500,0,0,NULL,1),
+(2,1,'example.com','NS','ns2.pdns',1500,0,0,NULL,1),
+(3,1,'example.com','SOA','a.misconfigured.dns.server.invalid hostmaster.example.com 2023030501 10800 3600 604800 3600',1500,0,0,NULL,1);
+/*!40000 ALTER TABLE `records` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `supermasters`
+--
+
+DROP TABLE IF EXISTS `supermasters`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `supermasters` (
+  `ip` varchar(64) NOT NULL,
+  `nameserver` varchar(255) NOT NULL,
+  `account` varchar(40) CHARACTER SET utf8mb3 NOT NULL,
+  PRIMARY KEY (`ip`,`nameserver`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `supermasters`
+--
+
+LOCK TABLES `supermasters` WRITE;
+/*!40000 ALTER TABLE `supermasters` DISABLE KEYS */;
+/*!40000 ALTER TABLE `supermasters` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `tsigkeys`
+--
+
+DROP TABLE IF EXISTS `tsigkeys`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `tsigkeys` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `name` varchar(255) DEFAULT NULL,
+  `algorithm` varchar(50) DEFAULT NULL,
+  `secret` varchar(255) DEFAULT NULL,
+  PRIMARY KEY (`id`),
+  UNIQUE KEY `namealgoindex` (`name`,`algorithm`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `tsigkeys`
+--
+
+LOCK TABLES `tsigkeys` WRITE;
+/*!40000 ALTER TABLE `tsigkeys` DISABLE KEYS */;
+/*!40000 ALTER TABLE `tsigkeys` ENABLE KEYS */;
+UNLOCK TABLES;
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;

+ 12 - 0
docker/dev/pebble-config.json

@@ -0,0 +1,12 @@
+{
+	"pebble": {
+		"listenAddress": "0.0.0.0:443",
+		"managementListenAddress": "0.0.0.0:15000",
+		"certificate": "test/certs/localhost/cert.pem",
+		"privateKey": "test/certs/localhost/key.pem",
+		"httpPort": 80,
+		"tlsPort": 443,
+		"ocspResponderURL": "",
+		"externalAccountBindingRequired": false
+	}
+}

+ 25 - 0
docker/docker-compose.ci.mysql.yml

@@ -0,0 +1,25 @@
+# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
+services:
+
+  fullstack:
+    environment:
+      DB_MYSQL_HOST: 'db-mysql'
+      DB_MYSQL_PORT: '3306'
+      DB_MYSQL_USER: 'npm'
+      DB_MYSQL_PASSWORD: 'npmpass'
+      DB_MYSQL_NAME: 'npm'
+    depends_on:
+      - db-mysql
+
+  db-mysql:
+    image: jc21/mariadb-aria
+    environment:
+      MYSQL_ROOT_PASSWORD: 'npm'
+      MYSQL_DATABASE: 'npm'
+      MYSQL_USER: 'npm'
+      MYSQL_PASSWORD: 'npmpass'
+    volumes:
+      - mysql_vol:/var/lib/mysql
+
+volumes:
+  mysql_vol:

+ 9 - 0
docker/docker-compose.ci.sqlite.yml

@@ -0,0 +1,9 @@
+# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
+services:
+
+  fullstack:
+    environment:
+      DB_SQLITE_FILE: '/data/mydb.sqlite'
+      PUID: 1000
+      PGID: 1000
+      DISABLE_IPV6: 'true'

+ 76 - 67
docker/docker-compose.ci.yml

@@ -1,91 +1,100 @@
-# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
-version: '3.8'
+# WARNING: This is a CI docker-compose file used for building
+# and testing of the entire app, it should not be used for production.
+# This is a base compose file, it should be extended with a
+# docker-compose.ci.*.yml file
 services:
 
-  fullstack-mysql:
-    image: "${IMAGE}:ci-${BUILD_NUMBER}"
+  fullstack:
+    image: "${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}"
     environment:
       DEBUG: 'true'
-      LE_STAGING: 'true'
       FORCE_COLOR: 1
-      DB_MYSQL_HOST: 'db'
-      DB_MYSQL_PORT: '3306'
-      DB_MYSQL_USER: 'npm'
-      DB_MYSQL_PASSWORD: 'npm'
-      DB_MYSQL_NAME: 'npm'
     volumes:
-      - npm_data_mysql:/data
-      - npm_le_mysql:/etc/letsencrypt
-    expose:
-      - 81
-      - 80
-      - 443
-    depends_on:
-      - db
+      - 'npm_data_ci:/data'
+      - 'npm_le_ci:/etc/letsencrypt'
+      - './dev/letsencrypt.ini:/etc/letsencrypt.ini:ro'
+      - './dev/resolv.conf:/etc/resolv.conf:ro'
+      - '/etc/localtime:/etc/localtime:ro'
     healthcheck:
       test: ["CMD", "/usr/bin/check-health"]
       interval: 10s
       timeout: 3s
+    networks:
+      default:
+        aliases:
+          - website1.example.com
+          - website2.example.com
+          - website3.example.com
 
-  fullstack-sqlite:
-    image: "${IMAGE}:ci-${BUILD_NUMBER}"
-    environment:
-      DEBUG: 'true'
-      LE_STAGING: 'true'
-      FORCE_COLOR: 1
-      DB_SQLITE_FILE: '/data/mydb.sqlite'
-      PUID: 1000
-      PGID: 1000
-      DISABLE_IPV6: 'true'
+  stepca:
+    image: jc21/testca
     volumes:
-      - npm_data_sqlite:/data
-      - npm_le_sqlite:/etc/letsencrypt
-    expose:
-      - 81
-      - 80
-      - 443
-    healthcheck:
-      test: ["CMD", "/usr/bin/check-health"]
-      interval: 10s
-      timeout: 3s
+      - './dev/resolv.conf:/etc/resolv.conf:ro'
+      - '/etc/localtime:/etc/localtime:ro'
+    networks:
+      default:
+        aliases:
+          - ca.internal
 
-  db:
-    image: jc21/mariadb-aria
-    environment:
-      MYSQL_ROOT_PASSWORD: 'npm'
-      MYSQL_DATABASE: 'npm'
-      MYSQL_USER: 'npm'
-      MYSQL_PASSWORD: 'npm'
+  pdns:
+    image: pschiffe/pdns-mysql
     volumes:
-      - mysql_data:/var/lib/mysql
+      - '/etc/localtime:/etc/localtime:ro'
+    environment:
+      PDNS_master: 'yes'
+      PDNS_api: 'yes'
+      PDNS_api_key: 'npm'
+      PDNS_webserver: 'yes'
+      PDNS_webserver_address: '0.0.0.0'
+      PDNS_webserver_password: 'npm'
+      PDNS_webserver-allow-from: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
+      PDNS_version_string: 'anonymous'
+      PDNS_default_ttl: 1500
+      PDNS_allow_axfr_ips: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
+      PDNS_gmysql_host: pdns-db
+      PDNS_gmysql_port: 3306
+      PDNS_gmysql_user: pdns
+      PDNS_gmysql_password: pdns
+      PDNS_gmysql_dbname: pdns
+    depends_on:
+      - pdns-db
+    networks:
+      default:
+        aliases:
+          - ns1.pdns
+          - ns2.pdns
 
-  cypress-mysql:
-    image: "${IMAGE}-cypress:ci-${BUILD_NUMBER}"
-    build:
-      context: ../test/
-      dockerfile: cypress/Dockerfile
+  pdns-db:
+    image: mariadb
     environment:
-      CYPRESS_baseUrl: 'http://fullstack-mysql:81'
+      MYSQL_ROOT_PASSWORD: 'pdns'
+      MYSQL_DATABASE: 'pdns'
+      MYSQL_USER: 'pdns'
+      MYSQL_PASSWORD: 'pdns'
+    volumes:
+      - 'pdns_mysql_vol:/var/lib/mysql'
+      - '/etc/localtime:/etc/localtime:ro'
+      - './dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro'
+
+  dnsrouter:
+    image: jc21/dnsrouter
     volumes:
-      - cypress_logs_mysql:/results
-    command: cypress run --browser chrome --config-file=${CYPRESS_CONFIG:-cypress/config/ci.json}
+      - ./dev/dnsrouter-config.json.tmp:/dnsrouter-config.json:ro
 
-  cypress-sqlite:
+  cypress:
     image: "${IMAGE}-cypress:ci-${BUILD_NUMBER}"
     build:
-      context: ../test/
-      dockerfile: cypress/Dockerfile
+      context: ../
+      dockerfile: test/cypress/Dockerfile
     environment:
-      CYPRESS_baseUrl: "http://fullstack-sqlite:81"
+      CYPRESS_baseUrl: 'http://fullstack:81'
     volumes:
-      - cypress_logs_sqlite:/results
-    command: cypress run --browser chrome --config-file=${CYPRESS_CONFIG:-cypress/config/ci.json}
+      - 'cypress_logs:/results'
+      - './dev/resolv.conf:/etc/resolv.conf:ro'
+    command: cypress run --browser chrome --config-file=cypress/config/ci.js
 
 volumes:
-  cypress_logs_mysql:
-  cypress_logs_sqlite:
-  npm_data_mysql:
-  npm_data_sqlite:
-  npm_le_sqlite:
-  npm_le_mysql:
-  mysql_data:
+  cypress_logs:
+  npm_data_ci:
+  npm_le_ci:
+  pdns_mysql_vol:

+ 0 - 1
docker/docker-compose.dev.yml

@@ -1,5 +1,4 @@
 # WARNING: This is a DEVELOPMENT docker-compose file, it should not be used for production.
-version: '3.8'
 services:
 
   npm:

+ 1 - 1
scripts/ci/frontend-build

@@ -16,7 +16,7 @@ if hash docker 2>/dev/null; then
 		-e NODE_OPTIONS=--openssl-legacy-provider \
 		-v "$(pwd)/frontend:/app/frontend" \
 		-v "$(pwd)/global:/app/global" \
-		-w /app/frontend "$DOCKER_IMAGE" \
+		-w /app/frontend "${DOCKER_IMAGE}" \
 		sh -c "yarn install && yarn build && yarn build && chown -R $(id -u):$(id -g) /app/frontend"
 
 	echo -e "${BLUE}❯ ${GREEN}Building Frontend Complete${RESET}"

+ 94 - 0
scripts/ci/fulltest-cypress

@@ -0,0 +1,94 @@
+#!/bin/bash
+set -e
+
+STACK="${1:-sqlite}"
+
+DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+# remember this is running in "ci" folder..
+
+# Some defaults for running this script outside of CI
+export COMPOSE_PROJECT_NAME="${COMPOSE_PROJECT_NAME:-npm_local_fulltest}"
+export IMAGE="${IMAGE:-nginx-proxy-manager}"
+export BRANCH_LOWER="${BRANCH_LOWER:-unknown}"
+export BUILD_NUMBER="${BUILD_NUMBER:-0000}"
+
+if [ "${COMPOSE_FILE:-}" = "" ]; then
+	export COMPOSE_FILE="docker/docker-compose.ci.yml:docker/docker-compose.ci.${STACK}.yml"
+fi
+
+# Colors
+BLUE='\E[1;34m'
+CYAN='\E[1;36m'
+GREEN='\E[1;32m'
+RESET='\E[0m'
+YELLOW='\E[1;33m'
+
+export BLUE CYAN GREEN RESET YELLOW
+
+echo -e "${BLUE}❯ ${CYAN}Starting fullstack cypress testing ...${RESET}"
+
+NETWORK_NAME="${COMPOSE_PROJECT_NAME}_default"
+
+# $1: container_name
+get_container_ip () {
+	local container_name=$1
+	local container
+	local ip
+	container=$(docker-compose ps --all -q "${container_name}" | tail -n1)
+	ip=$(docker inspect -f "{{.NetworkSettings.Networks.${NETWORK_NAME}.IPAddress}}" "$container")
+	echo "$ip"
+}
+
+# $1: container_name
+get_container_aliases () {
+	local container_name=$1
+	local container
+	local ip
+	container=$(docker-compose ps --all -q "${container_name}" | tail -n1)
+	ip=$(docker inspect -f "{{.NetworkSettings.Networks.${NETWORK_NAME}.Aliases}}" "$container")
+	echo "$ip"
+}
+
+# Bring up a stack, in steps so we can inject IPs everywhere
+docker-compose up -d pdns pdns-db
+PDNS_IP=$(get_container_ip "pdns")
+echo -e "${BLUE}❯ ${YELLOW}PDNS IP is ${PDNS_IP}${RESET}"
+
+# adjust the dnsrouter config
+LOCAL_DNSROUTER_CONFIG="$DIR/../../docker/dev/dnsrouter-config.json"
+rm -rf "$LOCAL_DNSROUTER_CONFIG.tmp"
+# IMPORTANT: changes to dnsrouter-config.json will affect this line:
+jq --arg a "$PDNS_IP" '.servers[0].upstreams[1].upstream = $a' "$LOCAL_DNSROUTER_CONFIG" > "$LOCAL_DNSROUTER_CONFIG.tmp"
+
+docker-compose up -d dnsrouter
+DNSROUTER_IP=$(get_container_ip "dnsrouter")
+echo -e "${BLUE}❯ ${YELLOW}DNS Router IP is ${DNSROUTER_IP}"
+
+# mount the resolver
+LOCAL_RESOLVE="$DIR/../../docker/dev/resolv.conf"
+rm -rf "${LOCAL_RESOLVE}"
+printf "nameserver %s\noptions ndots:0" "${DNSROUTER_IP}" > "${LOCAL_RESOLVE}"
+
+# bring up all remaining containers, except cypress!
+docker-compose up -d --remove-orphans stepca
+docker-compose pull db-mysql || true # ok to fail
+docker-compose up -d --remove-orphans --pull=never fullstack
+
+# wait for main container to be healthy
+bash "$DIR/../wait-healthy" "$(docker-compose ps --all -q fullstack)" 120
+
+# Run tests
+rm -rf "$DIR/../../test/results"
+docker-compose up --build cypress
+
+# Get results
+docker cp -L "$(docker-compose ps --all -q cypress):/test/results" "$DIR/../../test/"
+docker cp -L "$(docker-compose ps --all -q fullstack):/data/logs" "$DIR/../../test/results/"
+
+if [ "$2" = "cleanup" ]; then
+	echo -e "${BLUE}❯ ${CYAN}Cleaning up containers ...${RESET}"
+	docker-compose down --remove-orphans --volumes -t 30
+fi
+
+echo -e "${BLUE}❯ ${GREEN}Fullstack cypress testing complete${RESET}"
+

+ 6 - 6
scripts/ci/test-and-build

@@ -3,8 +3,8 @@
 DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
 . "$DIR/../.common.sh"
 
-DOCKER_IMAGE=nginxproxymanager/nginx-full:certbot-node
-docker pull "${DOCKER_IMAGE}"
+TESTING_IMAGE=nginxproxymanager/nginx-full:certbot-node
+docker pull "${TESTING_IMAGE}"
 
 # Test
 echo -e "${BLUE}❯ ${CYAN}Testing backend ...${RESET}"
@@ -12,20 +12,20 @@ docker run --rm \
 	-v "$(pwd)/backend:/app" \
 	-v "$(pwd)/global:/app/global" \
 	-w /app \
-	"${DOCKER_IMAGE}" \
+	"${TESTING_IMAGE}" \
 	sh -c 'yarn install && yarn eslint . && rm -rf node_modules'
 echo -e "${BLUE}❯ ${GREEN}Testing Complete${RESET}"
 
 # Build
 echo -e "${BLUE}❯ ${CYAN}Building ...${RESET}"
 docker build --pull --no-cache --compress \
-	-t "${IMAGE}:ci-${BUILD_NUMBER}" \
+	-t "${IMAGE:-nginx-proxy-manager}:${BRANCH_LOWER:-unknown}-ci-${BUILD_NUMBER:-0000}" \
 	-f docker/Dockerfile \
 	--progress=plain \
 	--build-arg TARGETPLATFORM=linux/amd64 \
 	--build-arg BUILDPLATFORM=linux/amd64 \
-	--build-arg BUILD_VERSION="${BUILD_VERSION}" \
-	--build-arg BUILD_COMMIT="${BUILD_COMMIT}" \
+	--build-arg BUILD_VERSION="${BUILD_VERSION:-unknown}" \
+	--build-arg BUILD_COMMIT="${BUILD_COMMIT:-unknown}" \
 	--build-arg BUILD_DATE="$(date '+%Y-%m-%d %T %Z')" \
 	.
 echo -e "${BLUE}❯ ${GREEN}Building Complete${RESET}"

+ 1 - 2
scripts/wait-healthy

@@ -23,9 +23,8 @@ until [ "${HEALTHY}" = "healthy" ]; do
 	((LOOPCOUNT++))
 
 	if [ "$LOOPCOUNT" == "$LIMIT" ]; then
-		echo ""
-		echo ""
 		echo -e "${BLUE}❯ ${RED}Timed out waiting for healthy${RESET}"
+		docker logs --tail 50 "$SERVICE"
 		exit 1
 	fi
 done

+ 0 - 1
test/.dockerignore

@@ -1 +0,0 @@
-node_modules

+ 1 - 1
test/.eslintrc.json

@@ -73,4 +73,4 @@
 			}
 		]
 	}
-}
+}

+ 2 - 4
test/.gitignore

@@ -1,4 +1,2 @@
-.vscode
-node_modules
-results
-cypress/videos
+results/*
+cypress/results/*

+ 6 - 7
test/cypress/Dockerfile

@@ -1,13 +1,12 @@
-FROM cypress/included:9.4.1
+FROM cypress/included:13.9.0
 
-COPY --chown=1000 ./ /test
+COPY --chown=1000 ./test /test
 
-# mkcert
-ENV MKCERT=1.4.2
-RUN wget -O /usr/bin/mkcert "https://github.com/FiloSottile/mkcert/releases/download/v${MKCERT}/mkcert-v${MKCERT}-linux-amd64" \
-	&& chmod +x /usr/bin/mkcert
+# Disable Cypress CLI colors
+ENV FORCE_COLOR=0
+ENV NO_COLOR=1
 
 WORKDIR /test
-RUN yarn install
+RUN yarn install && yarn cache clean
 ENTRYPOINT []
 CMD ["cypress", "run"]

+ 22 - 0
test/cypress/config/ci.js

@@ -0,0 +1,22 @@
+const { defineConfig } = require('cypress');
+
+module.exports = defineConfig({
+	requestTimeout: 30000,
+	defaultCommandTimeout: 20000,
+	reporter: 'cypress-multi-reporters',
+	reporterOptions: {
+		configFile: 'multi-reporter.json'
+	},
+	video: true,
+	videosFolder: 'results/videos',
+	screenshotsFolder: 'results/screenshots',
+	e2e: {
+		setupNodeEvents(on, config) {
+			return require("../plugins/index.js")(on, config);
+		},
+		env: {
+			swaggerBase: '{{baseUrl}}/api/schema',
+		},
+		baseUrl: 'http://localhost:1234',
+	}
+});

+ 0 - 14
test/cypress/config/ci.json

@@ -1,14 +0,0 @@
-{
-	"requestTimeout": 30000,
-	"defaultCommandTimeout": 20000,
-	"reporter": "cypress-multi-reporters",
-	"reporterOptions": {
-		"configFile": "multi-reporter.json"
-	},
-	"videosFolder": "results/videos",
-	"screenshotsFolder": "results/screenshots",
-	"env": {
-		"swaggerBase": "{{baseUrl}}/api/schema",
-		"RETRIES": 4
-	}
-}

+ 22 - 0
test/cypress/config/dev.js

@@ -0,0 +1,22 @@
+const { defineConfig } = require('cypress');
+
+module.exports = defineConfig({
+	requestTimeout: 30000,
+	defaultCommandTimeout: 20000,
+	reporter: 'cypress-multi-reporters',
+	reporterOptions: {
+		configFile: 'multi-reporter.json'
+	},
+	video: false,
+	videosFolder: 'results/videos',
+	screenshotsFolder: 'results/screenshots',
+	e2e: {
+		setupNodeEvents(on, config) {
+			return require("../plugins/index.js")(on, config);
+		},
+		env: {
+			swaggerBase: '{{baseUrl}}/api/schema',
+		},
+		baseUrl: 'http://localhost:1234',
+	}
+});

+ 0 - 14
test/cypress/config/dev.json

@@ -1,14 +0,0 @@
-{
-	"requestTimeout": 30000,
-	"defaultCommandTimeout": 20000,
-	"reporter": "cypress-multi-reporters",
-	"reporterOptions": {
-		"configFile": "multi-reporter.json"
-	},
-	"videos": false,
-	"screenshotsFolder": "results/screenshots",
-	"env": {
-		"swaggerBase": "{{baseUrl}}/api/schema",
-		"RETRIES": 0
-	}
-}

+ 0 - 0
test/cypress/integration/api/Health.spec.js → test/cypress/e2e/api/Health.cy.js


+ 0 - 0
test/cypress/integration/api/Hosts.spec.js → test/cypress/e2e/api/Hosts.cy.js


+ 0 - 0
test/cypress/integration/api/Users.spec.js → test/cypress/e2e/api/Users.cy.js


+ 0 - 5
test/cypress/fixtures/example.json

@@ -1,5 +0,0 @@
-{
-  "name": "Using fixtures to represent data",
-  "email": "[email protected]",
-  "body": "Fixtures are a great way to mock data for responses to routes"
-}

+ 8 - 4
test/cypress/plugins/backendApi/logger.js

@@ -1,8 +1,12 @@
-const _     = require('lodash');
-const chalk = require('chalk');
+const _ = require("lodash");
+const chalk = require("chalk");
 
-module.exports = function () {
+module.exports = function() {
 	var arr = _.values(arguments);
-	arr.unshift(chalk.blue.bold('[') + chalk.yellow.bold('Backend API') + chalk.blue.bold(']'));
+	arr.unshift(
+		chalk.blue.bold("[") +
+			chalk.yellow.bold("Backend API") +
+			chalk.blue.bold("]"),
+	);
 	console.log.apply(null, arr);
 };

+ 31 - 3
test/cypress/support/commands.js

@@ -9,20 +9,32 @@
 // ***********************************************
 //
 
+import 'cypress-wait-until';
+
+Cypress.Commands.add('randomString', (length) => {
+	var result           = '';
+	var characters       = 'ABCDEFGHIJK LMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
+	var charactersLength = characters.length;
+	for (var i = 0; i < length; i++) {
+		result += characters.charAt(Math.floor(Math.random() * charactersLength));
+	}
+	return result;
+});
+
 /**
  * Check the swagger schema:
  *
  * @param {string}  method        API Method in swagger doc, "get", "put", "post", "delete"
- * @param {number}  statusCode    API status code in swagger doc
+ * @param {integer} code          Swagger doc endpoint response code, exactly as defined in swagger doc
  * @param {string}  path          Swagger doc endpoint path, exactly as defined in swagger doc
  * @param {*}       data          The API response data to check against the swagger schema
  */
-Cypress.Commands.add('validateSwaggerSchema', (method, statusCode, path, data) => {
+Cypress.Commands.add('validateSwaggerSchema', (method, code, path, data) => {
 	cy.task('validateSwaggerSchema', {
 		file:           Cypress.env('swaggerBase'),
 		endpoint:       path,
 		method:         method,
-		statusCode:     statusCode,
+		statusCode:     code,
 		responseSchema: data,
 		verbose:        true
 	}).should('equal', null);
@@ -40,3 +52,19 @@ Cypress.Commands.add('getToken', () => {
 		cy.wrap(res.token);
 	});
 });
+
+// TODO: copied from v3, is this usable?
+Cypress.Commands.add('waitForCertificateStatus', (token, certID, expected, timeout = 60) => {
+	cy.log(`Waiting for certificate (${certID}) status (${expected}) timeout (${timeout})`);
+
+	cy.waitUntil(() => cy.task('backendApiGet', {
+		token: token,
+		path:  `/api/certificates/${certID}`
+	}).then((data) => {
+		return data.result.status === expected;
+	}), {
+		errorMsg: 'Waiting for certificate status failed',
+		timeout:  timeout * 1000,
+		interval: 5000
+	});
+});

+ 0 - 2
test/cypress/support/index.js → test/cypress/support/e2e.js

@@ -1,5 +1,3 @@
-require('cypress-plugin-retries');
-
 import './commands';
 
 Cypress.on('uncaught:exception', (/*err, runnable*/) => {

+ 1 - 1
test/jsconfig.json

@@ -3,4 +3,4 @@
 		"./node_modules/cypress",
 		"cypress/**/*.js"
 	]
-}
+}

+ 10 - 10
test/package.json

@@ -4,19 +4,19 @@
 	"description": "",
 	"main": "index.js",
 	"dependencies": {
-		"@jc21/cypress-swagger-validation": "^0.0.9",
+		"@jc21/cypress-swagger-validation": "^0.2.6",
 		"@jc21/restler": "^3.4.0",
 		"chalk": "^4.1.0",
-		"cypress": "^9.4.1",
-		"cypress-multi-reporters": "^1.4.0",
-		"cypress-plugin-retries": "^1.5.2",
-		"eslint": "^7.6.0",
+		"cypress": "^13.9.0",
+		"cypress-multi-reporters": "^1.6.4",
+		"cypress-wait-until": "^3.0.1",
+		"eslint": "^9.3.0",
 		"eslint-plugin-align-assignments": "^1.1.2",
-		"eslint-plugin-chai-friendly": "^0.6.0",
-		"eslint-plugin-cypress": "^2.11.1",
-		"lodash": "^4.17.19",
-		"mocha": "^8.1.1",
-		"mocha-junit-reporter": "^2.0.0"
+		"eslint-plugin-chai-friendly": "^0.7.4",
+		"eslint-plugin-cypress": "^3.2.0",
+		"lodash": "^4.17.21",
+		"mocha": "^10.4.0",
+		"mocha-junit-reporter": "^2.2.1"
 	},
 	"scripts": {
 		"cypress": "cypress open --config-file=cypress/config/dev.json --config baseUrl=${BASE_URL:-http://127.0.0.1:3081}",

Різницю між файлами не показано, бо вона завелика
+ 327 - 348
test/yarn.lock


Деякі файли не було показано, через те що забагато файлів було змінено