Browse Source

Optimizations and build speedup (#6339)

* Optimizations and build speedup

With this commit I have changed several components to be more efficient.
This can be less llvm-lines generated or less `clone()` calls.

 ### Config
- Re-ordered the `make_config` macro to be more efficient
- Created a custom Deserializer for `ConfigBuilder` less code and more efficient
- Use struct's for the `prepare_json` function instead of generating a custom JSON object.
  This generates less code and is more efficient.
- Updated the `get_support_string` function to handle the masking differently.
  This generates less code and also was able to remove some sub-macro-calls

 ### Error
- Added an extra new call to prevent duplicate Strings in generated macro code.
  This generated less llvm-lines and seems to be more efficient.
- Created a custom Serializer for `ApiError` and `CompactApiError`
  This makes that struct smaller in size, so better for memory, but also less llvm-lines.

 ### General
- Removed `once_lock` and replace it all with Rust's std LazyLock
- Added and fixed some Clippy lints which reduced `clone()` calls for example.
- Updated build profiles for more efficiency
  Also added a new profile specifically for CI, which should decrease the build check
- Updated several GitHub Workflows for better security and use the new `ci` build profile
- Updated to Rust v1.90.0 which uses a new linker `rust-lld` which should help in faster building
- Updated the Cargo.toml for all crates to better use the `workspace` variables
- Added a `typos` Workflow and Pre-Commit, which should help in detecting spell error's.
  Also fixed a few found by it.

Signed-off-by: BlackDex <[email protected]>

* Fix release profile

Signed-off-by: BlackDex <[email protected]>

* Update typos and remove mimalloc check from pre-commit checks

Signed-off-by: BlackDex <[email protected]>

* Misc fixes and updated typos

Signed-off-by: BlackDex <[email protected]>

* Update crates and workflows

Signed-off-by: BlackDex <[email protected]>

* Fix formating and pre-commit

Signed-off-by: BlackDex <[email protected]>

* Update to Rust v1.91 and update crates

Signed-off-by: BlackDex <[email protected]>

* Update web-vault to v2025.10.1 and xx to v1.8.0

Signed-off-by: BlackDex <[email protected]>

---------

Signed-off-by: BlackDex <[email protected]>
Mathijs van Veluw 2 days ago
parent
commit
9017ca265a

+ 28 - 13
.github/workflows/build.yml

@@ -14,6 +14,7 @@ on:
       - "diesel.toml"
       - "docker/Dockerfile.j2"
       - "docker/DockerSettings.yaml"
+      - "macros/**"
 
   pull_request:
     paths:
@@ -27,13 +28,11 @@ on:
       - "diesel.toml"
       - "docker/Dockerfile.j2"
       - "docker/DockerSettings.yaml"
+      - "macros/**"
 
 jobs:
   build:
     name: Build and Test ${{ matrix.channel }}
-    permissions:
-      actions: write
-      contents: read
     runs-on: ubuntu-24.04
     timeout-minutes: 120
     # Make warnings errors, this is to prevent warnings slipping through.
@@ -81,7 +80,7 @@ jobs:
 
       # Only install the clippy and rustfmt components on the default rust-toolchain
       - name: "Install rust-toolchain version"
-        uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @ Aug 23, 2025, 3:20 AM GMT+2
+        uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2
         if: ${{ matrix.channel == 'rust-toolchain' }}
         with:
           toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@@ -91,7 +90,7 @@ jobs:
 
       # Install the any other channel to be used for which we do not execute clippy and rustfmt
       - name: "Install MSRV version"
-        uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @ Aug 23, 2025, 3:20 AM GMT+2
+        uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2
         if: ${{ matrix.channel != 'rust-toolchain' }}
         with:
           toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@@ -121,43 +120,55 @@ jobs:
           # Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
           # Like changing the build host from Ubuntu 20.04 to 22.04 for example.
           # Only update when really needed! Use a <year>.<month>[.<inc>] format.
-          prefix-key: "v2023.07-rust"
+          prefix-key: "v2025.09-rust"
       # End Enable Rust Caching
 
       # Run cargo tests
       # First test all features together, afterwards test them separately.
+      - name: "test features: sqlite,mysql,postgresql,enable_mimalloc,s3"
+        id: test_sqlite_mysql_postgresql_mimalloc_s3
+        if: ${{ !cancelled() }}
+        run: |
+          cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3
+
+      - name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
+        id: test_sqlite_mysql_postgresql_mimalloc
+        if: ${{ !cancelled() }}
+        run: |
+          cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc
+
       - name: "test features: sqlite,mysql,postgresql"
         id: test_sqlite_mysql_postgresql
         if: ${{ !cancelled() }}
         run: |
-          cargo test --features sqlite,mysql,postgresql
+          cargo test --profile ci --features sqlite,mysql,postgresql
 
       - name: "test features: sqlite"
         id: test_sqlite
         if: ${{ !cancelled() }}
         run: |
-          cargo test --features sqlite
+          cargo test --profile ci --features sqlite
 
       - name: "test features: mysql"
         id: test_mysql
         if: ${{ !cancelled() }}
         run: |
-          cargo test --features mysql
+          cargo test --profile ci --features mysql
 
       - name: "test features: postgresql"
         id: test_postgresql
         if: ${{ !cancelled() }}
         run: |
-          cargo test --features postgresql
+          cargo test --profile ci --features postgresql
       # End Run cargo tests
 
 
       # Run cargo clippy, and fail on warnings
-      - name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc"
+      - name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc,s3"
         id: clippy
         if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
         run: |
-          cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc
+          cargo clippy --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3
       # End Run cargo clippy
 
 
@@ -175,6 +186,8 @@ jobs:
       - name: "Some checks failed"
         if: ${{ failure() }}
         env:
+          TEST_DB_M_S3: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_s3.outcome }}
+          TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
           TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
           TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
           TEST_MYSQL: ${{ steps.test_mysql.outcome }}
@@ -186,11 +199,13 @@ jobs:
           echo "" >> "${GITHUB_STEP_SUMMARY}"
           echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
           echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
+          echo "|test (sqlite,mysql,postgresql,enable_mimalloc,s3)|${TEST_DB_M_S3}|" >> "${GITHUB_STEP_SUMMARY}"
+          echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}"
           echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
           echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
           echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
           echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
-          echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
+          echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc,s3)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
           echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
           echo "" >> "${GITHUB_STEP_SUMMARY}"
           echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"

+ 0 - 2
.github/workflows/check-templates.yml

@@ -6,8 +6,6 @@ on: [ push, pull_request ]
 jobs:
   docker-templates:
     name: Validate docker templates
-    permissions:
-      contents: read
     runs-on: ubuntu-24.04
     timeout-minutes: 30
 

+ 3 - 4
.github/workflows/hadolint.yml

@@ -1,13 +1,12 @@
 name: Hadolint
-permissions: {}
 
 on: [ push, pull_request ]
+permissions: {}
+
 
 jobs:
   hadolint:
     name: Validate Dockerfile syntax
-    permissions:
-      contents: read
     runs-on: ubuntu-24.04
     timeout-minutes: 30
 
@@ -31,7 +30,7 @@ jobs:
           sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
           sudo chmod +x /usr/local/bin/hadolint
         env:
-          HADOLINT_VERSION: 2.13.1
+          HADOLINT_VERSION: 2.14.0
       # End Download hadolint
       # Checkout the repo
       - name: Checkout

+ 10 - 10
.github/workflows/release.yml

@@ -21,10 +21,10 @@ jobs:
     name: Build Vaultwarden containers
     if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
     permissions:
-      packages: write
+      packages: write # Needed to upload packages and artifacts
       contents: read
-      attestations: write
-      id-token: write
+      attestations: write # Needed to generate an artifact attestation for a build
+      id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate
     runs-on: ubuntu-24.04
     timeout-minutes: 120
     # Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
@@ -103,7 +103,7 @@ jobs:
 
       # Login to Docker Hub
       - name: Login to Docker Hub
-        uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
+        uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
         with:
           username: ${{ secrets.DOCKERHUB_USERNAME }}
           password: ${{ secrets.DOCKERHUB_TOKEN }}
@@ -119,7 +119,7 @@ jobs:
 
       # Login to GitHub Container Registry
       - name: Login to GitHub Container Registry
-        uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
+        uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
         with:
           registry: ghcr.io
           username: ${{ github.repository_owner }}
@@ -136,7 +136,7 @@ jobs:
 
       # Login to Quay.io
       - name: Login to Quay.io
-        uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
+        uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
         with:
           registry: quay.io
           username: ${{ secrets.QUAY_USERNAME }}
@@ -275,25 +275,25 @@ jobs:
 
       # Upload artifacts to Github Actions and Attest the binaries
       - name: "Upload amd64 artifact ${{ matrix.base_image }}"
-        uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
+        uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
         with:
           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }}
           path: vaultwarden-amd64-${{ matrix.base_image }}
 
       - name: "Upload arm64 artifact ${{ matrix.base_image }}"
-        uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
+        uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
         with:
           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }}
           path: vaultwarden-arm64-${{ matrix.base_image }}
 
       - name: "Upload armv7 artifact ${{ matrix.base_image }}"
-        uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
+        uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
         with:
           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }}
           path: vaultwarden-armv7-${{ matrix.base_image }}
 
       - name: "Upload armv6 artifact ${{ matrix.base_image }}"
-        uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
+        uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
         with:
           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }}
           path: vaultwarden-armv6-${{ matrix.base_image }}

+ 1 - 1
.github/workflows/releasecache-cleanup.yml

@@ -16,7 +16,7 @@ jobs:
   releasecache-cleanup:
     name: Releasecache Cleanup
     permissions:
-      packages: write
+      packages: write # To be able to cleanup old caches
     runs-on: ubuntu-24.04
     continue-on-error: true
     timeout-minutes: 30

+ 2 - 4
.github/workflows/trivy.yml

@@ -23,9 +23,7 @@ jobs:
     if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
     name: Trivy Scan
     permissions:
-      contents: read
-      actions: read
-      security-events: write
+      security-events: write # To write the security report
     runs-on: ubuntu-24.04
     timeout-minutes: 30
 
@@ -48,6 +46,6 @@ jobs:
           severity: CRITICAL,HIGH
 
       - name: Upload Trivy scan results to GitHub Security tab
-        uses: github/codeql-action/upload-sarif@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
+        uses: github/codeql-action/upload-sarif@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
         with:
           sarif_file: 'trivy-results.sarif'

+ 22 - 0
.github/workflows/typos.yml

@@ -0,0 +1,22 @@
+name: Code Spell Checking
+
+on: [ push, pull_request ]
+permissions: {}
+
+jobs:
+  typos:
+    name: Run typos spell checking
+    runs-on: ubuntu-24.04
+    timeout-minutes: 30
+
+    steps:
+      # Checkout the repo
+      - name: Checkout
+        uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
+        with:
+          persist-credentials: false
+      # End Checkout the repo
+
+      # When this version is updated, do not forget to update this in `.pre-commit-config.yaml` too
+      - name: Spell Check Repo
+        uses: crate-ci/typos@07d900b8fa1097806b8adb6391b0d3e0ac2fdea7 # v1.39.0

+ 1 - 1
.github/workflows/zizmor.yml

@@ -13,7 +13,7 @@ jobs:
     name: Run zizmor
     runs-on: ubuntu-latest
     permissions:
-      security-events: write
+      security-events: write # To write the security report
     steps:
       - name: Checkout repository
         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0

+ 10 - 4
.pre-commit-config.yaml

@@ -1,7 +1,7 @@
 ---
 repos:
 -   repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v6.0.0
+    rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # v6.0.0
     hooks:
     - id: check-yaml
     - id: check-json
@@ -22,14 +22,15 @@ repos:
       description: Format files with cargo fmt.
       entry: cargo fmt
       language: system
-      types: [rust]
+      always_run: true
+      pass_filenames: false
       args: ["--", "--check"]
     - id: cargo-test
       name: cargo test
       description: Test the package for errors.
       entry: cargo test
       language: system
-      args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"]
+      args: ["--features", "sqlite,mysql,postgresql", "--"]
       types_or: [rust, file]
       files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
       pass_filenames: false
@@ -38,7 +39,7 @@ repos:
       description: Lint Rust sources
       entry: cargo clippy
       language: system
-      args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"]
+      args: ["--features", "sqlite,mysql,postgresql", "--", "-D", "warnings"]
       types_or: [rust, file]
       files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
       pass_filenames: false
@@ -50,3 +51,8 @@ repos:
       args:
         - "-c"
         - "cd docker && make"
+# When this version is updated, do not forget to update this in `.github/workflows/typos.yaml` too
+- repo: https://github.com/crate-ci/typos
+  rev: 07d900b8fa1097806b8adb6391b0d3e0ac2fdea7 # v1.39.0
+  hooks:
+    - id: typos

+ 26 - 0
.typos.toml

@@ -0,0 +1,26 @@
+[files]
+extend-exclude = [
+    ".git/",
+    "playwright/",
+    "*.js", # Ignore all JavaScript files
+    "!admin*.js", # Except our own JavaScript files
+]
+ignore-hidden = false
+
+[default]
+extend-ignore-re = [
+    # We use this in place of the reserved type identifier at some places
+    "typ",
+    # In SMTP it's called HELO, so ignore it
+    "(?i)helo_name",
+    "Server name sent during.+HELO",
+    # COSE Is short for CBOR Object Signing and Encryption, ignore these specific items
+    "COSEKey",
+    "COSEAlgorithm",
+    # Ignore this specific string as it's valid
+    "Ensure they are valid OTPs",
+    # This word is misspelled upstream
+    # https://github.com/bitwarden/server/blob/dff9f1cf538198819911cf2c20f8cda3307701c5/src/Notifications/HubHelpers.cs#L86
+    # https://github.com/bitwarden/clients/blob/9612a4ac45063e372a6fbe87eb253c7cb3c588fb/libs/common/src/auth/services/anonymous-hub.service.ts#L45
+    "AuthRequestResponseRecieved",
+]

+ 79 - 81
Cargo.lock

@@ -33,9 +33,9 @@ dependencies = [
 
 [[package]]
 name = "aho-corasick"
-version = "1.1.3"
+version = "1.1.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
+checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
 dependencies = [
  "memchr",
 ]
@@ -76,6 +76,15 @@ version = "1.0.100"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
 
+[[package]]
+name = "ar_archive_writer"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0c269894b6fe5e9d7ada0cf69b5bf847ff35bc25fc271f08e1d080fce80339a"
+dependencies = [
+ "object",
+]
+
 [[package]]
 name = "argon2"
 version = "0.5.3"
@@ -589,9 +598,9 @@ dependencies = [
 
 [[package]]
 name = "aws-smithy-runtime-api"
-version = "1.9.1"
+version = "1.9.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3683c5b152d2ad753607179ed71988e8cfd52964443b4f74fd8e552d0bbfeb46"
+checksum = "ec7204f9fd94749a7c53b26da1b961b4ac36bf070ef1e0b94bb09f79d4f6c193"
 dependencies = [
  "aws-smithy-async",
  "aws-smithy-types",
@@ -606,9 +615,9 @@ dependencies = [
 
 [[package]]
 name = "aws-smithy-types"
-version = "1.3.3"
+version = "1.3.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9f5b3a7486f6690ba25952cabf1e7d75e34d69eaff5081904a47bc79074d6457"
+checksum = "25f535879a207fce0db74b679cfc3e91a3159c8144d717d55f5832aea9eef46e"
 dependencies = [
  "base64-simd",
  "bytes",
@@ -2482,9 +2491,9 @@ dependencies = [
 
 [[package]]
 name = "icu_collections"
-version = "2.0.0"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
+checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43"
 dependencies = [
  "displaydoc",
  "potential_utf",
@@ -2495,9 +2504,9 @@ dependencies = [
 
 [[package]]
 name = "icu_locale_core"
-version = "2.0.0"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
+checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6"
 dependencies = [
  "displaydoc",
  "litemap",
@@ -2508,11 +2517,10 @@ dependencies = [
 
 [[package]]
 name = "icu_normalizer"
-version = "2.0.0"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
+checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599"
 dependencies = [
- "displaydoc",
  "icu_collections",
  "icu_normalizer_data",
  "icu_properties",
@@ -2523,42 +2531,38 @@ dependencies = [
 
 [[package]]
 name = "icu_normalizer_data"
-version = "2.0.0"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
+checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"
 
 [[package]]
 name = "icu_properties"
-version = "2.0.1"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
+checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99"
 dependencies = [
- "displaydoc",
  "icu_collections",
  "icu_locale_core",
  "icu_properties_data",
  "icu_provider",
- "potential_utf",
  "zerotrie",
  "zerovec",
 ]
 
 [[package]]
 name = "icu_properties_data"
-version = "2.0.1"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
+checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899"
 
 [[package]]
 name = "icu_provider"
-version = "2.0.0"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
+checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614"
 dependencies = [
  "displaydoc",
  "icu_locale_core",
- "stable_deref_trait",
- "tinystr",
  "writeable",
  "yoke",
  "zerofrom",
@@ -2715,9 +2719,9 @@ dependencies = [
 
 [[package]]
 name = "js-sys"
-version = "0.3.81"
+version = "0.3.82"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ec48937a97411dcb524a265206ccd4c90bb711fca92b2792c407f268825b9305"
+checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65"
 dependencies = [
  "once_cell",
  "wasm-bindgen",
@@ -2838,9 +2842,9 @@ checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
 
 [[package]]
 name = "litemap"
-version = "0.8.0"
+version = "0.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
+checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
 
 [[package]]
 name = "litrs"
@@ -3225,6 +3229,15 @@ dependencies = [
  "url",
 ]
 
+[[package]]
+name = "object"
+version = "0.32.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441"
+dependencies = [
+ "memchr",
+]
+
 [[package]]
 name = "oid-registry"
 version = "0.7.1"
@@ -3714,9 +3727,9 @@ checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
 
 [[package]]
 name = "potential_utf"
-version = "0.1.3"
+version = "0.1.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a"
+checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77"
 dependencies = [
  "zerovec",
 ]
@@ -3786,10 +3799,11 @@ checksum = "33cb294fe86a74cbcf50d4445b37da762029549ebeea341421c7c70370f86cac"
 
 [[package]]
 name = "psm"
-version = "0.1.27"
+version = "0.1.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e66fcd288453b748497d8fb18bccc83a16b0518e3906d4b8df0a8d42d93dbb1c"
+checksum = "d11f2fedc3b7dafdc2851bc52f277377c5473d378859be234bc7ebb593144d01"
 dependencies = [
+ "ar_archive_writer",
  "cc",
 ]
 
@@ -4427,7 +4441,7 @@ dependencies = [
  "once_cell",
  "ring",
  "rustls-pki-types",
- "rustls-webpki 0.103.7",
+ "rustls-webpki 0.103.8",
  "subtle",
  "zeroize",
 ]
@@ -4475,9 +4489,9 @@ dependencies = [
 
 [[package]]
 name = "rustls-webpki"
-version = "0.103.7"
+version = "0.103.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e10b3f4191e8a80e6b43eebabfac91e5dcecebb27a71f04e820c47ec41d314bf"
+checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52"
 dependencies = [
  "ring",
  "rustls-pki-types",
@@ -5218,9 +5232,9 @@ dependencies = [
 
 [[package]]
 name = "tinystr"
-version = "0.8.1"
+version = "0.8.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
+checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869"
 dependencies = [
  "displaydoc",
  "zerovec",
@@ -5597,9 +5611,9 @@ checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
 
 [[package]]
 name = "unicode-ident"
-version = "1.0.20"
+version = "1.0.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "462eeb75aeb73aea900253ce739c8e18a67423fadf006037cd3ff27e82748a06"
+checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5"
 
 [[package]]
 name = "unicode-xid"
@@ -5710,7 +5724,6 @@ dependencies = [
  "mini-moka",
  "num-derive",
  "num-traits",
- "once_cell",
  "opendal",
  "openidconnect",
  "openssl",
@@ -5800,9 +5813,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.104"
+version = "0.2.105"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1da10c01ae9f1ae40cbfac0bac3b1e724b320abfcf52229f80b547c0d250e2d"
+checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60"
 dependencies = [
  "cfg-if",
  "once_cell",
@@ -5811,25 +5824,11 @@ dependencies = [
  "wasm-bindgen-shared",
 ]
 
-[[package]]
-name = "wasm-bindgen-backend"
-version = "0.2.104"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "671c9a5a66f49d8a47345ab942e2cb93c7d1d0339065d4f8139c486121b43b19"
-dependencies = [
- "bumpalo",
- "log",
- "proc-macro2",
- "quote",
- "syn",
- "wasm-bindgen-shared",
-]
-
 [[package]]
 name = "wasm-bindgen-futures"
-version = "0.4.54"
+version = "0.4.55"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e038d41e478cc73bae0ff9b36c60cff1c98b8f38f8d7e8061e79ee63608ac5c"
+checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0"
 dependencies = [
  "cfg-if",
  "js-sys",
@@ -5840,9 +5839,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.104"
+version = "0.2.105"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7ca60477e4c59f5f2986c50191cd972e3a50d8a95603bc9434501cf156a9a119"
+checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -5850,22 +5849,22 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.104"
+version = "0.2.105"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9f07d2f20d4da7b26400c9f4a0511e6e0345b040694e8a75bd41d578fa4421d7"
+checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc"
 dependencies = [
+ "bumpalo",
  "proc-macro2",
  "quote",
  "syn",
- "wasm-bindgen-backend",
  "wasm-bindgen-shared",
 ]
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.104"
+version = "0.2.105"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bad67dc8b2a1a6e5448428adec4c3e84c43e561d8c9ee8a9e5aabeb193ec41d1"
+checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76"
 dependencies = [
  "unicode-ident",
 ]
@@ -5885,9 +5884,9 @@ dependencies = [
 
 [[package]]
 name = "web-sys"
-version = "0.3.81"
+version = "0.3.82"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9367c417a924a74cae129e6a2ae3b47fabb1f8995595ab474029da749a8be120"
+checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1"
 dependencies = [
  "js-sys",
  "wasm-bindgen",
@@ -6404,9 +6403,9 @@ checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
 
 [[package]]
 name = "writeable"
-version = "0.6.1"
+version = "0.6.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
+checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
 
 [[package]]
 name = "x509-parser"
@@ -6448,11 +6447,10 @@ dependencies = [
 
 [[package]]
 name = "yoke"
-version = "0.8.0"
+version = "0.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
+checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954"
 dependencies = [
- "serde",
  "stable_deref_trait",
  "yoke-derive",
  "zerofrom",
@@ -6460,9 +6458,9 @@ dependencies = [
 
 [[package]]
 name = "yoke-derive"
-version = "0.8.0"
+version = "0.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
+checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -6535,9 +6533,9 @@ checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
 
 [[package]]
 name = "zerotrie"
-version = "0.2.2"
+version = "0.2.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
+checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851"
 dependencies = [
  "displaydoc",
  "yoke",
@@ -6546,9 +6544,9 @@ dependencies = [
 
 [[package]]
 name = "zerovec"
-version = "0.11.4"
+version = "0.11.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b"
+checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002"
 dependencies = [
  "yoke",
  "zerofrom",
@@ -6557,9 +6555,9 @@ dependencies = [
 
 [[package]]
 name = "zerovec-derive"
-version = "0.11.1"
+version = "0.11.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
+checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3"
 dependencies = [
  "proc-macro2",
  "quote",

+ 52 - 25
Cargo.toml

@@ -1,3 +1,10 @@
+[workspace.package]
+edition = "2021"
+rust-version = "1.89.0"
+license = "AGPL-3.0-only"
+repository = "https://github.com/dani-garcia/vaultwarden"
+publish = false
+
 [workspace]
 members = ["macros"]
 
@@ -5,15 +12,14 @@ members = ["macros"]
 name = "vaultwarden"
 version = "1.0.0"
 authors = ["Daniel García <[email protected]>"]
-edition = "2021"
-rust-version = "1.87.0"
-resolver = "2"
-
-repository = "https://github.com/dani-garcia/vaultwarden"
 readme = "README.md"
-license = "AGPL-3.0-only"
-publish = false
 build = "build.rs"
+resolver = "2"
+repository.workspace = true
+edition.workspace = true
+rust-version.workspace = true
+license.workspace = true
+publish.workspace = true
 
 [features]
 default = [
@@ -56,9 +62,6 @@ tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and
 # A `dotenv` implementation for Rust
 dotenvy = { version = "0.15.7", default-features = false }
 
-# Lazy initialization
-once_cell = "1.21.3"
-
 # Numerical libraries
 num-traits = "0.2.19"
 num-derive = "0.4.2"
@@ -196,7 +199,7 @@ opendal = { version = "0.54.1", features = ["services-fs"], default-features = f
 anyhow = { version = "1.0.100", optional = true }
 aws-config = { version = "1.8.8", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
 aws-credential-types = { version = "1.2.8", optional = true }
-aws-smithy-runtime-api = { version = "1.9.1", optional = true }
+aws-smithy-runtime-api = { version = "1.9.2", optional = true }
 http = { version = "1.3.1", optional = true }
 reqsign = { version = "0.16.5", optional = true }
 
@@ -207,23 +210,13 @@ reqsign = { version = "0.16.5", optional = true }
 strip = "debuginfo"
 lto = "fat"
 codegen-units = 1
-
-# A little bit of a speedup
-[profile.dev]
-split-debuginfo = "unpacked"
-
-# Always build argon2 using opt-level 3
-# This is a huge speed improvement during testing
-[profile.dev.package.argon2]
-opt-level = 3
+debug = false
 
 # Optimize for size
 [profile.release-micro]
 inherits = "release"
-opt-level = "z"
 strip = "symbols"
-lto = "fat"
-codegen-units = 1
+opt-level = "z"
 panic = "abort"
 
 # Profile for systems with low resources
@@ -234,6 +227,32 @@ strip = "symbols"
 lto = "thin"
 codegen-units = 16
 
+# Used for profiling and debugging like valgrind or heaptrack
+# Inherits release to be sure all optimizations have been done
+[profile.dbg]
+inherits = "release"
+strip = "none"
+split-debuginfo = "off"
+debug = "full"
+
+# A little bit of a speedup for generic building
+[profile.dev]
+split-debuginfo = "unpacked"
+debug = "line-tables-only"
+
+# Used for CI builds to improve compile time
+[profile.ci]
+inherits = "dev"
+debug = false
+debug-assertions = false
+strip = "symbols"
+panic = "abort"
+
+# Always build argon2 using opt-level 3
+# This is a huge speed improvement during testing
+[profile.dev.package.argon2]
+opt-level = 3
+
 # Linting config
 # https://doc.rust-lang.org/rustc/lints/groups.html
 [workspace.lints.rust]
@@ -243,15 +262,16 @@ non_ascii_idents = "forbid"
 
 # Deny
 deprecated_in_future = "deny"
+deprecated_safe = { level = "deny", priority = -1 }
 future_incompatible = { level = "deny", priority = -1 }
 keyword_idents = { level = "deny", priority = -1 }
 let_underscore = { level = "deny", priority = -1 }
+nonstandard_style = { level = "deny", priority = -1 }
 noop_method_call = "deny"
 refining_impl_trait = { level = "deny", priority = -1 }
 rust_2018_idioms = { level = "deny", priority = -1 }
 rust_2021_compatibility = { level = "deny", priority = -1 }
 rust_2024_compatibility = { level = "deny", priority = -1 }
-edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
 single_use_lifetimes = "deny"
 trivial_casts = "deny"
 trivial_numeric_casts = "deny"
@@ -261,7 +281,8 @@ unused_lifetimes = "deny"
 unused_qualifications = "deny"
 variant_size_differences = "deny"
 # Allow the following lints since these cause issues with Rust v1.84.0 or newer
-# Building Vaultwarden with Rust v1.85.0 and edition 2024 also works without issues
+# Building Vaultwarden with Rust v1.85.0 with edition 2024 also works without issues
+edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
 if_let_rescope = "allow"
 tail_expr_drop_order = "allow"
 
@@ -275,10 +296,12 @@ todo = "warn"
 result_large_err = "allow"
 
 # Deny
+branches_sharing_code = "deny"
 case_sensitive_file_extension_comparisons = "deny"
 cast_lossless = "deny"
 clone_on_ref_ptr = "deny"
 equatable_if_let = "deny"
+excessive_precision = "deny"
 filter_map_next = "deny"
 float_cmp_const = "deny"
 implicit_clone = "deny"
@@ -292,15 +315,19 @@ manual_instant_elapsed = "deny"
 manual_string_new = "deny"
 match_wildcard_for_single_variants = "deny"
 mem_forget = "deny"
+needless_borrow = "deny"
+needless_collect = "deny"
 needless_continue = "deny"
 needless_lifetimes = "deny"
 option_option = "deny"
+redundant_clone = "deny"
 string_add_assign = "deny"
 unnecessary_join = "deny"
 unnecessary_self_imports = "deny"
 unnested_or_patterns = "deny"
 unused_async = "deny"
 unused_self = "deny"
+useless_let_if_seq = "deny"
 verbose_file_reads = "deny"
 zero_sized_map_values = "deny"
 

+ 5 - 5
docker/DockerSettings.yaml

@@ -1,11 +1,11 @@
 ---
-vault_version: "v2025.9.1"
-vault_image_digest: "sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4"
-# Cross Compile Docker Helper Scripts v1.6.1
+vault_version: "v2025.10.1"
+vault_image_digest: "sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa"
+# Cross Compile Docker Helper Scripts v1.8.0
 # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
 # https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
-xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894"
-rust_version: 1.89.0 # Rust version to be used
+xx_image_digest: "sha256:add602d55daca18914838a78221f6bbe4284114b452c86a48f96d59aeb00f5c6"
+rust_version: 1.91.0 # Rust version to be used
 debian_version: trixie # Debian release name to be used
 alpine_version: "3.22" # Alpine version to be used
 # For which platforms/architectures will we try to build images

+ 10 - 10
docker/Dockerfile.alpine

@@ -19,23 +19,23 @@
 # - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
 #   click the tag name to view the digest of the image it currently points to.
 # - From the command line:
-#     $ docker pull docker.io/vaultwarden/web-vault:v2025.9.1
-#     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.9.1
-#     [docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4]
+#     $ docker pull docker.io/vaultwarden/web-vault:v2025.10.1
+#     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.10.1
+#     [docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa]
 #
 # - Conversely, to get the tag name from the digest:
-#     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4
-#     [docker.io/vaultwarden/web-vault:v2025.9.1]
+#     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa
+#     [docker.io/vaultwarden/web-vault:v2025.10.1]
 #
-FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4 AS vault
+FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa AS vault
 
 ########################## ALPINE BUILD IMAGES ##########################
 ## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
 ## And for Alpine we define all build images here, they will only be loaded when actually used
-FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.89.0 AS build_amd64
-FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.89.0 AS build_arm64
-FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.89.0 AS build_armv7
-FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.89.0 AS build_armv6
+FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.91.0 AS build_amd64
+FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.91.0 AS build_arm64
+FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.91.0 AS build_armv7
+FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.91.0 AS build_armv6
 
 ########################## BUILD IMAGE ##########################
 # hadolint ignore=DL3006

+ 8 - 8
docker/Dockerfile.debian

@@ -19,24 +19,24 @@
 # - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
 #   click the tag name to view the digest of the image it currently points to.
 # - From the command line:
-#     $ docker pull docker.io/vaultwarden/web-vault:v2025.9.1
-#     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.9.1
-#     [docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4]
+#     $ docker pull docker.io/vaultwarden/web-vault:v2025.10.1
+#     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.10.1
+#     [docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa]
 #
 # - Conversely, to get the tag name from the digest:
-#     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4
-#     [docker.io/vaultwarden/web-vault:v2025.9.1]
+#     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa
+#     [docker.io/vaultwarden/web-vault:v2025.10.1]
 #
-FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4 AS vault
+FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa AS vault
 
 ########################## Cross Compile Docker Helper Scripts ##########################
 ## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
 ## And these bash scripts do not have any significant difference if at all
-FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894 AS xx
+FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:add602d55daca18914838a78221f6bbe4284114b452c86a48f96d59aeb00f5c6 AS xx
 
 ########################## BUILD IMAGE ##########################
 # hadolint ignore=DL3006
-FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.89.0-slim-trixie AS build
+FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.91.0-slim-trixie AS build
 COPY --from=xx / /
 ARG TARGETARCH
 ARG TARGETVARIANT

+ 2 - 2
docker/README.md

@@ -116,7 +116,7 @@ docker/bake.sh
 ```
 
 You can append both `alpine` and `debian` with `-amd64`, `-arm64`, `-armv7` or `-armv6`, which will trigger a build for that specific platform.<br>
-This will also append those values to the tag so you can see the builded container when running `docker images`.
+This will also append those values to the tag so you can see the built container when running `docker images`.
 
 You can also append extra arguments after the target if you want. This can be useful for example to print what bake will use.
 ```bash
@@ -162,7 +162,7 @@ You can append extra arguments after the target if you want. This can be useful
 
 For the podman builds you can, just like the `bake.sh` script, also append the architecture to build for that specific platform.<br>
 
-### Testing podman builded images
+### Testing podman built images
 
 The command to start a podman built container is almost the same as for the docker/bake built containers. The images start with `localhost/`, so you need to prepend that.
 

+ 5 - 1
macros/Cargo.toml

@@ -1,7 +1,11 @@
 [package]
 name = "macros"
 version = "0.1.0"
-edition = "2021"
+repository.workspace = true
+edition.workspace = true
+rust-version.workspace = true
+license.workspace = true
+publish.workspace = true
 
 [lib]
 name = "macros"

+ 1 - 1
rust-toolchain.toml

@@ -1,4 +1,4 @@
 [toolchain]
-channel = "1.89.0"
+channel = "1.91.0"
 components = [ "rustfmt", "clippy" ]
 profile = "minimal"

+ 22 - 23
src/api/admin.rs

@@ -1,17 +1,16 @@
-use once_cell::sync::Lazy;
-use reqwest::Method;
-use serde::de::DeserializeOwned;
-use serde_json::Value;
-use std::env;
+use std::{env, sync::LazyLock};
 
-use rocket::serde::json::Json;
+use reqwest::Method;
 use rocket::{
     form::Form,
     http::{Cookie, CookieJar, MediaType, SameSite, Status},
     request::{FromRequest, Outcome, Request},
     response::{content::RawHtml as Html, Redirect},
+    serde::json::Json,
     Catcher, Route,
 };
+use serde::de::DeserializeOwned;
+use serde_json::Value;
 
 use crate::{
     api::{
@@ -82,7 +81,7 @@ pub fn catchers() -> Vec<Catcher> {
     }
 }
 
-static DB_TYPE: Lazy<&str> = Lazy::new(|| match ACTIVE_DB_TYPE.get() {
+static DB_TYPE: LazyLock<&str> = LazyLock::new(|| match ACTIVE_DB_TYPE.get() {
     #[cfg(mysql)]
     Some(DbConnType::Mysql) => "MySQL",
     #[cfg(postgresql)]
@@ -93,9 +92,10 @@ static DB_TYPE: Lazy<&str> = Lazy::new(|| match ACTIVE_DB_TYPE.get() {
 });
 
 #[cfg(sqlite)]
-static CAN_BACKUP: Lazy<bool> = Lazy::new(|| ACTIVE_DB_TYPE.get().map(|t| *t == DbConnType::Sqlite).unwrap_or(false));
+static CAN_BACKUP: LazyLock<bool> =
+    LazyLock::new(|| ACTIVE_DB_TYPE.get().map(|t| *t == DbConnType::Sqlite).unwrap_or(false));
 #[cfg(not(sqlite))]
-static CAN_BACKUP: Lazy<bool> = Lazy::new(|| false);
+static CAN_BACKUP: LazyLock<bool> = LazyLock::new(|| false);
 
 #[get("/")]
 fn admin_disabled() -> &'static str {
@@ -157,10 +157,10 @@ fn admin_login(request: &Request<'_>) -> ApiResult<Html<String>> {
         err_code!("Authorization failed.", Status::Unauthorized.code);
     }
     let redirect = request.segments::<std::path::PathBuf>(0..).unwrap_or_default().display().to_string();
-    render_admin_login(None, Some(redirect))
+    render_admin_login(None, Some(&redirect))
 }
 
-fn render_admin_login(msg: Option<&str>, redirect: Option<String>) -> ApiResult<Html<String>> {
+fn render_admin_login(msg: Option<&str>, redirect: Option<&str>) -> ApiResult<Html<String>> {
     // If there is an error, show it
     let msg = msg.map(|msg| format!("Error: {msg}"));
     let json = json!({
@@ -194,14 +194,17 @@ fn post_admin_login(
     if crate::ratelimit::check_limit_admin(&ip.ip).is_err() {
         return Err(AdminResponse::TooManyRequests(render_admin_login(
             Some("Too many requests, try again later."),
-            redirect,
+            redirect.as_deref(),
         )));
     }
 
     // If the token is invalid, redirect to login page
     if !_validate_token(&data.token) {
         error!("Invalid admin token. IP: {}", ip.ip);
-        Err(AdminResponse::Unauthorized(render_admin_login(Some("Invalid admin token, please try again."), redirect)))
+        Err(AdminResponse::Unauthorized(render_admin_login(
+            Some("Invalid admin token, please try again."),
+            redirect.as_deref(),
+        )))
     } else {
         // If the token received is valid, generate JWT and save it as a cookie
         let claims = generate_admin_claims();
@@ -308,7 +311,7 @@ async fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -
         err_code!("User already exists", Status::Conflict.code)
     }
 
-    let mut user = User::new(data.email, None);
+    let mut user = User::new(&data.email, None);
 
     async fn _generate_invite(user: &User, conn: &DbConn) -> EmptyResult {
         if CONFIG.mail_enabled() {
@@ -825,11 +828,7 @@ impl<'r> FromRequest<'r> for AdminToken {
             _ => err_handler!("Error getting Client IP"),
         };
 
-        if CONFIG.disable_admin_token() {
-            Outcome::Success(Self {
-                ip,
-            })
-        } else {
+        if !CONFIG.disable_admin_token() {
             let cookies = request.cookies();
 
             let access_token = match cookies.get(COOKIE_NAME) {
@@ -853,10 +852,10 @@ impl<'r> FromRequest<'r> for AdminToken {
                 error!("Invalid or expired admin JWT. IP: {}.", &ip.ip);
                 return Outcome::Error((Status::Unauthorized, "Session expired"));
             }
-
-            Outcome::Success(Self {
-                ip,
-            })
         }
+
+        Outcome::Success(Self {
+            ip,
+        })
     }
 }

+ 10 - 9
src/api/core/accounts.rs

@@ -285,7 +285,7 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn:
                 || CONFIG.is_signup_allowed(&email)
                 || pending_emergency_access.is_some()
             {
-                User::new(email.clone(), None)
+                User::new(&email, None)
             } else {
                 err!("Registration not allowed or user already exists")
             }
@@ -295,7 +295,7 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn:
     // Make sure we don't leave a lingering invitation.
     Invitation::take(&email, &conn).await;
 
-    set_kdf_data(&mut user, data.kdf)?;
+    set_kdf_data(&mut user, &data.kdf)?;
 
     user.set_password(&data.master_password_hash, Some(data.key), true, None);
     user.password_hint = password_hint;
@@ -358,7 +358,7 @@ async fn post_set_password(data: Json<SetPasswordData>, headers: Headers, conn:
     let password_hint = clean_password_hint(&data.master_password_hint);
     enforce_password_hint_setting(&password_hint)?;
 
-    set_kdf_data(&mut user, data.kdf)?;
+    set_kdf_data(&mut user, &data.kdf)?;
 
     user.set_password(
         &data.master_password_hash,
@@ -556,7 +556,7 @@ struct ChangeKdfData {
     key: String,
 }
 
-fn set_kdf_data(user: &mut User, data: KDFData) -> EmptyResult {
+fn set_kdf_data(user: &mut User, data: &KDFData) -> EmptyResult {
     if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 {
         err!("PBKDF2 KDF iterations must be at least 100000.")
     }
@@ -600,7 +600,7 @@ async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, conn: DbConn, nt:
         err!("Invalid password")
     }
 
-    set_kdf_data(&mut user, data.kdf)?;
+    set_kdf_data(&mut user, &data.kdf)?;
 
     user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
     let save_result = user.save(&conn).await;
@@ -1279,10 +1279,11 @@ async fn rotate_api_key(data: Json<PasswordOrOtpData>, headers: Headers, conn: D
 
 #[get("/devices/knowndevice")]
 async fn get_known_device(device: KnownDevice, conn: DbConn) -> JsonResult {
-    let mut result = false;
-    if let Some(user) = User::find_by_mail(&device.email, &conn).await {
-        result = Device::find_by_uuid_and_user(&device.uuid, &user.uuid, &conn).await.is_some();
-    }
+    let result = if let Some(user) = User::find_by_mail(&device.email, &conn).await {
+        Device::find_by_uuid_and_user(&device.uuid, &user.uuid, &conn).await.is_some()
+    } else {
+        false
+    };
     Ok(Json(json!(result)))
 }
 

+ 1 - 1
src/api/core/ciphers.rs

@@ -1269,7 +1269,7 @@ async fn save_attachment(
         attachment.save(&conn).await.expect("Error saving attachment");
     }
 
-    save_temp_file(PathType::Attachments, &format!("{cipher_id}/{file_id}"), data.data, true).await?;
+    save_temp_file(&PathType::Attachments, &format!("{cipher_id}/{file_id}"), data.data, true).await?;
 
     nt.send_cipher_update(
         UpdateType::SyncCipherUpdate,

+ 1 - 1
src/api/core/emergency_access.rs

@@ -245,7 +245,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, co
                 invitation.save(&conn).await?;
             }
 
-            let mut user = User::new(email.clone(), None);
+            let mut user = User::new(&email, None);
             user.save(&conn).await?;
             (user, true)
         }

+ 3 - 3
src/api/core/organizations.rs

@@ -202,7 +202,7 @@ async fn create_organization(headers: Headers, data: Json<OrgData>, conn: DbConn
         (None, None)
     };
 
-    let org = Organization::new(data.name, data.billing_email, private_key, public_key);
+    let org = Organization::new(data.name, &data.billing_email, private_key, public_key);
     let mut member = Membership::new(headers.user.uuid, org.uuid.clone(), None);
     let collection = Collection::new(org.uuid.clone(), data.collection_name, None);
 
@@ -1124,7 +1124,7 @@ async fn send_invite(
                     Invitation::new(email).save(&conn).await?;
                 }
 
-                let mut new_user = User::new(email.clone(), None);
+                let mut new_user = User::new(email, None);
                 new_user.save(&conn).await?;
                 user_created = true;
                 new_user
@@ -1591,7 +1591,7 @@ async fn edit_member(
     // HACK: We need the raw user-type to be sure custom role is selected to determine the access_all permission
     // The from_str() will convert the custom role type into a manager role type
     let raw_type = &data.r#type.into_string();
-    // MembershipTyp::from_str will convert custom (4) to manager (3)
+    // MembershipType::from_str will convert custom (4) to manager (3)
     let Some(new_type) = MembershipType::from_str(raw_type) else {
         err!("Invalid type")
     };

+ 1 - 1
src/api/core/public.rs

@@ -94,7 +94,7 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, conn: DbConn
                 Some(user) => user, // exists in vaultwarden
                 None => {
                     // User does not exist yet
-                    let mut new_user = User::new(user_data.email.clone(), None);
+                    let mut new_user = User::new(&user_data.email, None);
                     new_user.save(&conn).await?;
 
                     if !CONFIG.mail_enabled() {

+ 10 - 11
src/api/core/sends.rs

@@ -1,13 +1,12 @@
-use std::path::Path;
-use std::time::Duration;
+use std::{path::Path, sync::LazyLock, time::Duration};
 
 use chrono::{DateTime, TimeDelta, Utc};
 use num_traits::ToPrimitive;
-use once_cell::sync::Lazy;
-use rocket::form::Form;
-use rocket::fs::NamedFile;
-use rocket::fs::TempFile;
-use rocket::serde::json::Json;
+use rocket::{
+    form::Form,
+    fs::{NamedFile, TempFile},
+    serde::json::Json,
+};
 use serde_json::Value;
 
 use crate::{
@@ -23,7 +22,7 @@ use crate::{
 };
 
 const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
-static ANON_PUSH_DEVICE: Lazy<Device> = Lazy::new(|| {
+static ANON_PUSH_DEVICE: LazyLock<Device> = LazyLock::new(|| {
     let dt = crate::util::parse_date("1970-01-01T00:00:00.000000Z");
     Device {
         uuid: String::from("00000000-0000-0000-0000-000000000000").into(),
@@ -274,7 +273,7 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, conn: DbCo
 
     let file_id = crate::crypto::generate_send_file_id();
 
-    save_temp_file(PathType::Sends, &format!("{}/{file_id}", send.uuid), data, true).await?;
+    save_temp_file(&PathType::Sends, &format!("{}/{file_id}", send.uuid), data, true).await?;
 
     let mut data_value: Value = serde_json::from_str(&send.data)?;
     if let Some(o) = data_value.as_object_mut() {
@@ -426,7 +425,7 @@ async fn post_send_file_v2_data(
 
     let file_path = format!("{send_id}/{file_id}");
 
-    save_temp_file(PathType::Sends, &file_path, data.data, false).await?;
+    save_temp_file(&PathType::Sends, &file_path, data.data, false).await?;
 
     nt.send_send_update(
         UpdateType::SyncSendCreate,
@@ -567,7 +566,7 @@ async fn post_access_file(
 }
 
 async fn download_url(host: &Host, send_id: &SendId, file_id: &SendFileId) -> Result<String, crate::Error> {
-    let operator = CONFIG.opendal_operator_for_path_type(PathType::Sends)?;
+    let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?;
 
     if operator.info().scheme() == opendal::Scheme::Fs {
         let token_claims = crate::auth::generate_send_claims(send_id, file_id);

+ 1 - 1
src/api/core/two_factor/authenticator.rs

@@ -31,7 +31,7 @@ async fn generate_authenticator(data: Json<PasswordOrOtpData>, headers: Headers,
 
     let (enabled, key) = match twofactor {
         Some(tf) => (true, tf.data),
-        _ => (false, crypto::encode_random_bytes::<20>(BASE32)),
+        _ => (false, crypto::encode_random_bytes::<20>(&BASE32)),
     };
 
     // Upstream seems to also return `userVerificationToken`, but doesn't seem to be used at all.

+ 1 - 1
src/api/core/two_factor/mod.rs

@@ -126,7 +126,7 @@ async fn recover(data: Json<RecoverTwoFactor>, client_headers: ClientHeaders, co
 
 async fn _generate_recover_code(user: &mut User, conn: &DbConn) {
     if user.totp_recover.is_none() {
-        let totp_recover = crypto::encode_random_bytes::<20>(BASE32);
+        let totp_recover = crypto::encode_random_bytes::<20>(&BASE32);
         user.totp_recover = Some(totp_recover);
         user.save(conn).await.ok();
     }

+ 10 - 12
src/api/icons.rs

@@ -1,13 +1,13 @@
 use std::{
     collections::HashMap,
     net::IpAddr,
-    sync::Arc,
+    sync::{Arc, LazyLock},
     time::{Duration, SystemTime},
 };
 
 use bytes::{Bytes, BytesMut};
 use futures::{stream::StreamExt, TryFutureExt};
-use once_cell::sync::Lazy;
+use html5gum::{Emitter, HtmlString, Readable, StringReader, Tokenizer};
 use regex::Regex;
 use reqwest::{
     header::{self, HeaderMap, HeaderValue},
@@ -16,8 +16,6 @@ use reqwest::{
 use rocket::{http::ContentType, response::Redirect, Route};
 use svg_hush::{data_url_filter, Filter};
 
-use html5gum::{Emitter, HtmlString, Readable, StringReader, Tokenizer};
-
 use crate::{
     config::PathType,
     error::Error,
@@ -33,7 +31,7 @@ pub fn routes() -> Vec<Route> {
     }
 }
 
-static CLIENT: Lazy<Client> = Lazy::new(|| {
+static CLIENT: LazyLock<Client> = LazyLock::new(|| {
     // Generate the default headers
     let mut default_headers = HeaderMap::new();
     default_headers.insert(
@@ -78,7 +76,7 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
 });
 
 // Build Regex only once since this takes a lot of time.
-static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap());
+static ICON_SIZE_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap());
 
 // The function name `icon_external` is checked in the `on_response` function in `AppHeaders`
 // It is used to prevent sending a specific header which breaks icon downloads.
@@ -220,7 +218,7 @@ async fn get_cached_icon(path: &str) -> Option<Vec<u8>> {
     }
 
     // Try to read the cached icon, and return it if it exists
-    if let Ok(operator) = CONFIG.opendal_operator_for_path_type(PathType::IconCache) {
+    if let Ok(operator) = CONFIG.opendal_operator_for_path_type(&PathType::IconCache) {
         if let Ok(buf) = operator.read(path).await {
             return Some(buf.to_vec());
         }
@@ -230,7 +228,7 @@ async fn get_cached_icon(path: &str) -> Option<Vec<u8>> {
 }
 
 async fn file_is_expired(path: &str, ttl: u64) -> Result<bool, Error> {
-    let operator = CONFIG.opendal_operator_for_path_type(PathType::IconCache)?;
+    let operator = CONFIG.opendal_operator_for_path_type(&PathType::IconCache)?;
     let meta = operator.stat(path).await?;
     let modified =
         meta.last_modified().ok_or_else(|| std::io::Error::other(format!("No last modified time for `{path}`")))?;
@@ -246,7 +244,7 @@ async fn icon_is_negcached(path: &str) -> bool {
     match expired {
         // No longer negatively cached, drop the marker
         Ok(true) => {
-            match CONFIG.opendal_operator_for_path_type(PathType::IconCache) {
+            match CONFIG.opendal_operator_for_path_type(&PathType::IconCache) {
                 Ok(operator) => {
                     if let Err(e) = operator.delete(&miss_indicator).await {
                         error!("Could not remove negative cache indicator for icon {path:?}: {e:?}");
@@ -462,8 +460,8 @@ async fn get_page_with_referer(url: &str, referer: &str) -> Result<Response, Err
 /// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", "");
 /// ```
 fn get_icon_priority(href: &str, sizes: &str) -> u8 {
-    static PRIORITY_MAP: Lazy<HashMap<&'static str, u8>> =
-        Lazy::new(|| [(".png", 10), (".jpg", 20), (".jpeg", 20)].into_iter().collect());
+    static PRIORITY_MAP: LazyLock<HashMap<&'static str, u8>> =
+        LazyLock::new(|| [(".png", 10), (".jpg", 20), (".jpeg", 20)].into_iter().collect());
 
     // Check if there is a dimension set
     let (width, height) = parse_sizes(sizes);
@@ -597,7 +595,7 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> {
 }
 
 async fn save_icon(path: &str, icon: Vec<u8>) {
-    let operator = match CONFIG.opendal_operator_for_path_type(PathType::IconCache) {
+    let operator = match CONFIG.opendal_operator_for_path_type(&PathType::IconCache) {
         Ok(operator) => operator,
         Err(e) => {
             warn!("Failed to get OpenDAL operator while saving icon: {e}");

+ 2 - 2
src/api/identity.rs

@@ -248,7 +248,7 @@ async fn _sso_login(
                 _ => (),
             }
 
-            let mut user = User::new(user_infos.email, user_infos.user_name);
+            let mut user = User::new(&user_infos.email, user_infos.user_name);
             user.verified_at = Some(now);
             user.save(conn).await?;
 
@@ -1061,7 +1061,7 @@ async fn oidcsignin_redirect(
     wrapper: impl FnOnce(OIDCState) -> sso::OIDCCodeWrapper,
     conn: &DbConn,
 ) -> ApiResult<Redirect> {
-    let state = sso::decode_state(base64_state)?;
+    let state = sso::decode_state(&base64_state)?;
     let code = sso::encode_code_claims(wrapper(state.clone()));
 
     let nonce = match SsoNonce::find(&state, conn).await {

+ 23 - 21
src/api/notifications.rs

@@ -1,11 +1,14 @@
-use std::{net::IpAddr, sync::Arc, time::Duration};
+use std::{
+    net::IpAddr,
+    sync::{Arc, LazyLock},
+    time::Duration,
+};
 
 use chrono::{NaiveDateTime, Utc};
 use rmpv::Value;
 use rocket::{futures::StreamExt, Route};
-use tokio::sync::mpsc::Sender;
-
 use rocket_ws::{Message, WebSocket};
+use tokio::sync::mpsc::Sender;
 
 use crate::{
     auth::{ClientIp, WsAccessTokenHeader},
@@ -16,15 +19,13 @@ use crate::{
     Error, CONFIG,
 };
 
-use once_cell::sync::Lazy;
-
-pub static WS_USERS: Lazy<Arc<WebSocketUsers>> = Lazy::new(|| {
+pub static WS_USERS: LazyLock<Arc<WebSocketUsers>> = LazyLock::new(|| {
     Arc::new(WebSocketUsers {
         map: Arc::new(dashmap::DashMap::new()),
     })
 });
 
-pub static WS_ANONYMOUS_SUBSCRIPTIONS: Lazy<Arc<AnonymousWebSocketSubscriptions>> = Lazy::new(|| {
+pub static WS_ANONYMOUS_SUBSCRIPTIONS: LazyLock<Arc<AnonymousWebSocketSubscriptions>> = LazyLock::new(|| {
     Arc::new(AnonymousWebSocketSubscriptions {
         map: Arc::new(dashmap::DashMap::new()),
     })
@@ -35,7 +36,7 @@ use super::{
     push_send_update, push_user_update,
 };
 
-static NOTIFICATIONS_DISABLED: Lazy<bool> = Lazy::new(|| !CONFIG.enable_websocket() && !CONFIG.push_enabled());
+static NOTIFICATIONS_DISABLED: LazyLock<bool> = LazyLock::new(|| !CONFIG.enable_websocket() && !CONFIG.push_enabled());
 
 pub fn routes() -> Vec<Route> {
     if CONFIG.enable_websocket() {
@@ -109,8 +110,7 @@ fn websockets_hub<'r>(
     ip: ClientIp,
     header_token: WsAccessTokenHeader,
 ) -> Result<rocket_ws::Stream!['r], Error> {
-    let addr = ip.ip;
-    info!("Accepting Rocket WS connection from {addr}");
+    info!("Accepting Rocket WS connection from {}", ip.ip);
 
     let token = if let Some(token) = data.access_token {
         token
@@ -133,7 +133,7 @@ fn websockets_hub<'r>(
         users.map.entry(claims.sub.to_string()).or_default().push((entry_uuid, tx));
 
         // Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map
-        (rx, WSEntryMapGuard::new(users, claims.sub, entry_uuid, addr))
+        (rx, WSEntryMapGuard::new(users, claims.sub, entry_uuid, ip.ip))
     };
 
     Ok({
@@ -189,8 +189,7 @@ fn websockets_hub<'r>(
 #[allow(tail_expr_drop_order)]
 #[get("/anonymous-hub?<token..>")]
 fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> Result<rocket_ws::Stream!['r], Error> {
-    let addr = ip.ip;
-    info!("Accepting Anonymous Rocket WS connection from {addr}");
+    info!("Accepting Anonymous Rocket WS connection from {}", ip.ip);
 
     let (mut rx, guard) = {
         let subscriptions = Arc::clone(&WS_ANONYMOUS_SUBSCRIPTIONS);
@@ -200,7 +199,7 @@ fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> R
         subscriptions.map.insert(token.clone(), tx);
 
         // Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map
-        (rx, WSAnonymousEntryMapGuard::new(subscriptions, token, addr))
+        (rx, WSAnonymousEntryMapGuard::new(subscriptions, token, ip.ip))
     };
 
     Ok({
@@ -257,11 +256,11 @@ fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> R
 // Websockets server
 //
 
-fn serialize(val: Value) -> Vec<u8> {
+fn serialize(val: &Value) -> Vec<u8> {
     use rmpv::encode::write_value;
 
     let mut buf = Vec::new();
-    write_value(&mut buf, &val).expect("Error encoding MsgPack");
+    write_value(&mut buf, val).expect("Error encoding MsgPack");
 
     // Add size bytes at the start
     // Extracted from BinaryMessageFormat.js
@@ -552,7 +551,7 @@ impl AnonymousWebSocketSubscriptions {
         let data = create_anonymous_update(
             vec![("Id".into(), auth_request_id.to_string().into()), ("UserId".into(), user_id.to_string().into())],
             UpdateType::AuthRequestResponse,
-            user_id.clone(),
+            user_id,
         );
         self.send_update(auth_request_id, &data).await;
     }
@@ -588,16 +587,19 @@ fn create_update(payload: Vec<(Value, Value)>, ut: UpdateType, acting_device_id:
         ])]),
     ]);
 
-    serialize(value)
+    serialize(&value)
 }
 
-fn create_anonymous_update(payload: Vec<(Value, Value)>, ut: UpdateType, user_id: UserId) -> Vec<u8> {
+fn create_anonymous_update(payload: Vec<(Value, Value)>, ut: UpdateType, user_id: &UserId) -> Vec<u8> {
     use rmpv::Value as V;
 
     let value = V::Array(vec![
         1.into(),
         V::Map(vec![]),
         V::Nil,
+        // This word is misspelled, but upstream has this too
+        // https://github.com/bitwarden/server/blob/dff9f1cf538198819911cf2c20f8cda3307701c5/src/Notifications/HubHelpers.cs#L86
+        // https://github.com/bitwarden/clients/blob/9612a4ac45063e372a6fbe87eb253c7cb3c588fb/libs/common/src/auth/services/anonymous-hub.service.ts#L45
         "AuthRequestResponseRecieved".into(),
         V::Array(vec![V::Map(vec![
             ("Type".into(), (ut as i32).into()),
@@ -606,11 +608,11 @@ fn create_anonymous_update(payload: Vec<(Value, Value)>, ut: UpdateType, user_id
         ])]),
     ]);
 
-    serialize(value)
+    serialize(&value)
 }
 
 fn create_ping() -> Vec<u8> {
-    serialize(Value::Array(vec![6.into()]))
+    serialize(&Value::Array(vec![6.into()]))
 }
 
 // https://github.com/bitwarden/server/blob/375af7c43b10d9da03525d41452f95de3f921541/src/Core/Enums/PushType.cs

+ 6 - 4
src/api/push.rs

@@ -1,3 +1,8 @@
+use std::{
+    sync::LazyLock,
+    time::{Duration, Instant},
+};
+
 use reqwest::{
     header::{ACCEPT, AUTHORIZATION, CONTENT_TYPE},
     Method,
@@ -16,9 +21,6 @@ use crate::{
     CONFIG,
 };
 
-use once_cell::sync::Lazy;
-use std::time::{Duration, Instant};
-
 #[derive(Deserialize)]
 struct AuthPushToken {
     access_token: String,
@@ -32,7 +34,7 @@ struct LocalAuthPushToken {
 }
 
 async fn get_auth_api_token() -> ApiResult<String> {
-    static API_TOKEN: Lazy<RwLock<LocalAuthPushToken>> = Lazy::new(|| {
+    static API_TOKEN: LazyLock<RwLock<LocalAuthPushToken>> = LazyLock::new(|| {
         RwLock::new(LocalAuthPushToken {
             access_token: String::new(),
             valid_until: Instant::now(),

+ 34 - 28
src/auth.rs

@@ -1,12 +1,15 @@
-// JWT Handling
+use std::{
+    env,
+    net::IpAddr,
+    sync::{LazyLock, OnceLock},
+};
+
 use chrono::{DateTime, TimeDelta, Utc};
 use jsonwebtoken::{errors::ErrorKind, Algorithm, DecodingKey, EncodingKey, Header};
 use num_traits::FromPrimitive;
-use once_cell::sync::{Lazy, OnceCell};
 use openssl::rsa::Rsa;
 use serde::de::DeserializeOwned;
 use serde::ser::Serialize;
-use std::{env, net::IpAddr};
 
 use crate::{
     api::ApiResult,
@@ -22,27 +25,30 @@ use crate::{
 const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
 
 // Limit when BitWarden consider the token as expired
-pub static BW_EXPIRATION: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_minutes(5).unwrap());
-
-pub static DEFAULT_REFRESH_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_days(30).unwrap());
-pub static MOBILE_REFRESH_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_days(90).unwrap());
-pub static DEFAULT_ACCESS_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_hours(2).unwrap());
-static JWT_HEADER: Lazy<Header> = Lazy::new(|| Header::new(JWT_ALGORITHM));
-
-pub static JWT_LOGIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|login", CONFIG.domain_origin()));
-static JWT_INVITE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|invite", CONFIG.domain_origin()));
-static JWT_EMERGENCY_ACCESS_INVITE_ISSUER: Lazy<String> =
-    Lazy::new(|| format!("{}|emergencyaccessinvite", CONFIG.domain_origin()));
-static JWT_DELETE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|delete", CONFIG.domain_origin()));
-static JWT_VERIFYEMAIL_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|verifyemail", CONFIG.domain_origin()));
-static JWT_ADMIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|admin", CONFIG.domain_origin()));
-static JWT_SEND_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|send", CONFIG.domain_origin()));
-static JWT_ORG_API_KEY_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|api.organization", CONFIG.domain_origin()));
-static JWT_FILE_DOWNLOAD_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|file_download", CONFIG.domain_origin()));
-static JWT_REGISTER_VERIFY_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|register_verify", CONFIG.domain_origin()));
-
-static PRIVATE_RSA_KEY: OnceCell<EncodingKey> = OnceCell::new();
-static PUBLIC_RSA_KEY: OnceCell<DecodingKey> = OnceCell::new();
+pub static BW_EXPIRATION: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_minutes(5).unwrap());
+
+pub static DEFAULT_REFRESH_VALIDITY: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_days(30).unwrap());
+pub static MOBILE_REFRESH_VALIDITY: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_days(90).unwrap());
+pub static DEFAULT_ACCESS_VALIDITY: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_hours(2).unwrap());
+static JWT_HEADER: LazyLock<Header> = LazyLock::new(|| Header::new(JWT_ALGORITHM));
+
+pub static JWT_LOGIN_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|login", CONFIG.domain_origin()));
+static JWT_INVITE_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|invite", CONFIG.domain_origin()));
+static JWT_EMERGENCY_ACCESS_INVITE_ISSUER: LazyLock<String> =
+    LazyLock::new(|| format!("{}|emergencyaccessinvite", CONFIG.domain_origin()));
+static JWT_DELETE_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|delete", CONFIG.domain_origin()));
+static JWT_VERIFYEMAIL_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|verifyemail", CONFIG.domain_origin()));
+static JWT_ADMIN_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|admin", CONFIG.domain_origin()));
+static JWT_SEND_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|send", CONFIG.domain_origin()));
+static JWT_ORG_API_KEY_ISSUER: LazyLock<String> =
+    LazyLock::new(|| format!("{}|api.organization", CONFIG.domain_origin()));
+static JWT_FILE_DOWNLOAD_ISSUER: LazyLock<String> =
+    LazyLock::new(|| format!("{}|file_download", CONFIG.domain_origin()));
+static JWT_REGISTER_VERIFY_ISSUER: LazyLock<String> =
+    LazyLock::new(|| format!("{}|register_verify", CONFIG.domain_origin()));
+
+static PRIVATE_RSA_KEY: OnceLock<EncodingKey> = OnceLock::new();
+static PUBLIC_RSA_KEY: OnceLock<DecodingKey> = OnceLock::new();
 
 pub async fn initialize_keys() -> Result<(), Error> {
     use std::io::Error;
@@ -54,7 +60,7 @@ pub async fn initialize_keys() -> Result<(), Error> {
         .ok_or_else(|| Error::other("Private RSA key path filename is not valid UTF-8"))?
         .to_string();
 
-    let operator = CONFIG.opendal_operator_for_path_type(PathType::RsaKey).map_err(Error::other)?;
+    let operator = CONFIG.opendal_operator_for_path_type(&PathType::RsaKey).map_err(Error::other)?;
 
     let priv_key_buffer = match operator.read(&rsa_key_filename).await {
         Ok(buffer) => Some(buffer),
@@ -457,7 +463,7 @@ pub fn generate_delete_claims(uuid: String) -> BasicJwtClaims {
     }
 }
 
-pub fn generate_verify_email_claims(user_id: UserId) -> BasicJwtClaims {
+pub fn generate_verify_email_claims(user_id: &UserId) -> BasicJwtClaims {
     let time_now = Utc::now();
     let expire_hours = i64::from(CONFIG.invitation_expiration_hours());
     BasicJwtClaims {
@@ -696,9 +702,9 @@ impl<'r> FromRequest<'r> for OrgHeaders {
         // First check the path, if this is not a valid uuid, try the query values.
         let url_org_id: Option<OrganizationId> = {
             if let Some(Ok(org_id)) = request.param::<OrganizationId>(1) {
-                Some(org_id.clone())
+                Some(org_id)
             } else if let Some(Ok(org_id)) = request.query_value::<OrganizationId>("organizationId") {
-                Some(org_id.clone())
+                Some(org_id)
             } else {
                 None
             }

+ 211 - 107
src/config.rs

@@ -1,5 +1,6 @@
 use std::{
     env::consts::EXE_SUFFIX,
+    fmt,
     process::exit,
     sync::{
         atomic::{AtomicBool, Ordering},
@@ -8,15 +9,15 @@ use std::{
 };
 
 use job_scheduler_ng::Schedule;
-use once_cell::sync::Lazy;
 use reqwest::Url;
+use serde::de::{self, Deserialize, Deserializer, MapAccess, Visitor};
 
 use crate::{
     error::Error,
     util::{get_env, get_env_bool, get_web_vault_version, is_valid_email, parse_experimental_client_feature_flags},
 };
 
-static CONFIG_FILE: Lazy<String> = Lazy::new(|| {
+static CONFIG_FILE: LazyLock<String> = LazyLock::new(|| {
     let data_folder = get_env("DATA_FOLDER").unwrap_or_else(|| String::from("data"));
     get_env("CONFIG_FILE").unwrap_or_else(|| format!("{data_folder}/config.json"))
 });
@@ -33,7 +34,7 @@ static CONFIG_FILENAME: LazyLock<String> = LazyLock::new(|| {
 
 pub static SKIP_CONFIG_VALIDATION: AtomicBool = AtomicBool::new(false);
 
-pub static CONFIG: Lazy<Config> = Lazy::new(|| {
+pub static CONFIG: LazyLock<Config> = LazyLock::new(|| {
     std::thread::spawn(|| {
         let rt = tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap_or_else(|e| {
             println!("Error loading config:\n  {e:?}\n");
@@ -55,6 +56,41 @@ pub static CONFIG: Lazy<Config> = Lazy::new(|| {
 pub type Pass = String;
 
 macro_rules! make_config {
+    // Support string print
+    ( @supportstr $name:ident, $value:expr, Pass, option ) => { serde_json::to_value(&$value.as_ref().map(|_| String::from("***"))).unwrap() }; // Optional pass, we map to an Option<String> with "***"
+    ( @supportstr $name:ident, $value:expr, Pass, $none_action:ident ) => { "***".into() }; // Required pass, we return "***"
+    ( @supportstr $name:ident, $value:expr, $ty:ty, option ) => { serde_json::to_value(&$value).unwrap() }; // Optional other or string, we convert to json
+    ( @supportstr $name:ident, $value:expr, String, $none_action:ident ) => { $value.as_str().into() }; // Required string value, we convert to json
+    ( @supportstr $name:ident, $value:expr, $ty:ty, $none_action:ident ) => { ($value).into() }; // Required other value, we return as is or convert to json
+
+    // Group or empty string
+    ( @show ) => { "" };
+    ( @show $lit:literal ) => { $lit };
+
+    // Wrap the optionals in an Option type
+    ( @type $ty:ty, option) => { Option<$ty> };
+    ( @type $ty:ty, $id:ident) => { $ty };
+
+    // Generate the values depending on none_action
+    ( @build $value:expr, $config:expr, option, ) => { $value };
+    ( @build $value:expr, $config:expr, def, $default:expr ) => { $value.unwrap_or($default) };
+    ( @build $value:expr, $config:expr, auto, $default_fn:expr ) => {{
+        match $value {
+            Some(v) => v,
+            None => {
+                let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
+                f($config)
+            }
+        }
+    }};
+    ( @build $value:expr, $config:expr, generated, $default_fn:expr ) => {{
+        let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
+        f($config)
+    }};
+
+    ( @getenv $name:expr, bool ) => { get_env_bool($name) };
+    ( @getenv $name:expr, $ty:ident ) => { get_env($name) };
+
     ($(
         $(#[doc = $groupdoc:literal])?
         $group:ident $(: $group_enabled:ident)? {
@@ -74,10 +110,103 @@ macro_rules! make_config {
             _env: ConfigBuilder,
             _usr: ConfigBuilder,
 
-            _overrides: Vec<String>,
+            _overrides: Vec<&'static str>,
+        }
+
+        // Custom Deserialize for ConfigBuilder, mainly based upon https://serde.rs/deserialize-struct.html
+        // This deserialize doesn't care if there are keys missing, or if there are duplicate keys
+        // In case of duplicate keys (which should never be possible unless manually edited), the last value is used!
+        // Main reason for this is removing the `visit_seq` function, which causes a lot of code generation not needed or used for this struct.
+        impl<'de> Deserialize<'de> for ConfigBuilder {
+            fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+            where
+                D: Deserializer<'de>,
+            {
+                const FIELDS: &[&str] = &[
+                $($(
+                    stringify!($name),
+                )+)+
+                ];
+
+                #[allow(non_camel_case_types)]
+                enum Field {
+                $($(
+                    $name,
+                )+)+
+                    __ignore,
+                }
+
+                impl<'de> Deserialize<'de> for Field {
+                    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+                    where
+                        D: Deserializer<'de>,
+                    {
+                        struct FieldVisitor;
+
+                        impl Visitor<'_> for FieldVisitor {
+                            type Value = Field;
+
+                            fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+                                formatter.write_str("ConfigBuilder field identifier")
+                            }
+
+                            #[inline]
+                            fn visit_str<E>(self, value: &str) -> Result<Field, E>
+                            where
+                                E: de::Error,
+                            {
+                                match value {
+                                $($(
+                                    stringify!($name) => Ok(Field::$name),
+                                )+)+
+                                    _ => Ok(Field::__ignore),
+                                }
+                            }
+                        }
+
+                        deserializer.deserialize_identifier(FieldVisitor)
+                    }
+                }
+
+                struct ConfigBuilderVisitor;
+
+                impl<'de> Visitor<'de> for ConfigBuilderVisitor {
+                    type Value = ConfigBuilder;
+
+                    fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+                        formatter.write_str("struct ConfigBuilder")
+                    }
+
+                    #[inline]
+                    fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
+                    where
+                        A: MapAccess<'de>,
+                    {
+                        let mut builder = ConfigBuilder::default();
+                        while let Some(key) = map.next_key()? {
+                            match key {
+                            $($(
+                                Field::$name => {
+                                    if builder.$name.is_some() {
+                                        return Err(de::Error::duplicate_field(stringify!($name)));
+                                    }
+                                    builder.$name = map.next_value()?;
+                                }
+                            )+)+
+                                Field::__ignore => {
+                                    let _ = map.next_value::<de::IgnoredAny>()?;
+                                }
+                            }
+                        }
+                        Ok(builder)
+                    }
+                }
+
+                deserializer.deserialize_struct("ConfigBuilder", FIELDS, ConfigBuilderVisitor)
+            }
         }
 
-        #[derive(Clone, Default, Deserialize, Serialize)]
+        #[derive(Clone, Default, Serialize)]
         pub struct ConfigBuilder {
             $($(
                 #[serde(skip_serializing_if = "Option::is_none")]
@@ -86,7 +215,6 @@ macro_rules! make_config {
         }
 
         impl ConfigBuilder {
-            #[allow(clippy::field_reassign_with_default)]
             fn from_env() -> Self {
                 let env_file = get_env("ENV_FILE").unwrap_or_else(|| String::from(".env"));
                 match dotenvy::from_path(&env_file) {
@@ -148,14 +276,14 @@ macro_rules! make_config {
 
             /// Merges the values of both builders into a new builder.
             /// If both have the same element, `other` wins.
-            fn merge(&self, other: &Self, show_overrides: bool, overrides: &mut Vec<String>) -> Self {
+            fn merge(&self, other: &Self, show_overrides: bool, overrides: &mut Vec<&str>) -> Self {
                 let mut builder = self.clone();
                 $($(
                     if let v @Some(_) = &other.$name {
                         builder.$name = v.clone();
 
                         if self.$name.is_some() {
-                            overrides.push(pastey::paste!(stringify!([<$name:upper>])).into());
+                            overrides.push(pastey::paste!(stringify!([<$name:upper>])));
                         }
                     }
                 )+)+
@@ -196,6 +324,32 @@ macro_rules! make_config {
         #[derive(Clone, Default)]
         struct ConfigItems { $($( $name: make_config! {@type $ty, $none_action}, )+)+ }
 
+        #[derive(Serialize)]
+        struct ElementDoc {
+            name: &'static str,
+            description: &'static str,
+        }
+
+        #[derive(Serialize)]
+        struct ElementData {
+            editable: bool,
+            name: &'static str,
+            value: serde_json::Value,
+            default: serde_json::Value,
+            #[serde(rename = "type")]
+            r#type: &'static str,
+            doc: ElementDoc,
+            overridden: bool,
+        }
+
+        #[derive(Serialize)]
+        pub struct GroupData {
+            group: &'static str,
+            grouptoggle: &'static str,
+            groupdoc: &'static str,
+            elements: Vec<ElementData>,
+        }
+
         #[allow(unused)]
         impl Config {
             $($(
@@ -207,11 +361,12 @@ macro_rules! make_config {
 
             pub fn prepare_json(&self) -> serde_json::Value {
                 let (def, cfg, overridden) = {
+                    // Lock the inner as short as possible and clone what is needed to prevent deadlocks
                     let inner = &self.inner.read().unwrap();
                     (inner._env.build(), inner.config.clone(), inner._overrides.clone())
                 };
 
-                fn _get_form_type(rust_type: &str) -> &'static str {
+                fn _get_form_type(rust_type: &'static str) -> &'static str {
                     match rust_type {
                         "Pass" => "password",
                         "String" => "text",
@@ -220,48 +375,36 @@ macro_rules! make_config {
                     }
                 }
 
-                fn _get_doc(doc: &str) -> serde_json::Value {
-                    let mut split = doc.split("|>").map(str::trim);
-
-                    // We do not use the json!() macro here since that causes a lot of macro recursion.
-                    // This slows down compile time and it also causes issues with rust-analyzer
-                    serde_json::Value::Object({
-                        let mut doc_json = serde_json::Map::new();
-                        doc_json.insert("name".into(), serde_json::to_value(split.next()).unwrap());
-                        doc_json.insert("description".into(), serde_json::to_value(split.next()).unwrap());
-                        doc_json
-                    })
+                fn _get_doc(doc_str: &'static str) -> ElementDoc {
+                    let mut split = doc_str.split("|>").map(str::trim);
+                    ElementDoc {
+                        name: split.next().unwrap_or_default(),
+                        description: split.next().unwrap_or_default(),
+                    }
                 }
 
-                // We do not use the json!() macro here since that causes a lot of macro recursion.
-                // This slows down compile time and it also causes issues with rust-analyzer
-                serde_json::Value::Array(<[_]>::into_vec(Box::new([
-                $(
-                    serde_json::Value::Object({
-                        let mut group = serde_json::Map::new();
-                        group.insert("group".into(), (stringify!($group)).into());
-                        group.insert("grouptoggle".into(), (stringify!($($group_enabled)?)).into());
-                        group.insert("groupdoc".into(), (make_config! { @show $($groupdoc)? }).into());
-
-                        group.insert("elements".into(), serde_json::Value::Array(<[_]>::into_vec(Box::new([
-                        $(
-                            serde_json::Value::Object({
-                                let mut element = serde_json::Map::new();
-                                element.insert("editable".into(), ($editable).into());
-                                element.insert("name".into(), (stringify!($name)).into());
-                                element.insert("value".into(), serde_json::to_value(cfg.$name).unwrap());
-                                element.insert("default".into(), serde_json::to_value(def.$name).unwrap());
-                                element.insert("type".into(), (_get_form_type(stringify!($ty))).into());
-                                element.insert("doc".into(), (_get_doc(concat!($($doc),+))).into());
-                                element.insert("overridden".into(), (overridden.contains(&pastey::paste!(stringify!([<$name:upper>])).into())).into());
-                                element
-                            }),
-                        )+
-                        ]))));
-                        group
-                    }),
-                )+
-                ])))
+                let data: Vec<GroupData> = vec![
+                $( // This repetition is for each group
+                    GroupData {
+                        group: stringify!($group),
+                        grouptoggle: stringify!($($group_enabled)?),
+                        groupdoc: (make_config! { @show $($groupdoc)? }),
+
+                        elements: vec![
+                        $( // This repetition is for each element within a group
+                            ElementData {
+                                editable: $editable,
+                                name: stringify!($name),
+                                value: serde_json::to_value(&cfg.$name).unwrap_or_default(),
+                                default: serde_json::to_value(&def.$name).unwrap_or_default(),
+                                r#type: _get_form_type(stringify!($ty)),
+                                doc: _get_doc(concat!($($doc),+)),
+                                overridden: overridden.contains(&pastey::paste!(stringify!([<$name:upper>]))),
+                            },
+                        )+], // End of elements repetition
+                    },
+                )+]; // End of groups repetition
+                serde_json::to_value(data).unwrap()
             }
 
             pub fn get_support_json(&self) -> serde_json::Value {
@@ -269,8 +412,8 @@ macro_rules! make_config {
                 // Pass types will always be masked and no need to put them in the list.
                 // Besides Pass, only String types will be masked via _privacy_mask.
                 const PRIVACY_CONFIG: &[&str] = &[
-                    "allowed_iframe_ancestors",
                     "allowed_connect_src",
+                    "allowed_iframe_ancestors",
                     "database_url",
                     "domain_origin",
                     "domain_path",
@@ -278,16 +421,18 @@ macro_rules! make_config {
                     "helo_name",
                     "org_creation_users",
                     "signups_domains_whitelist",
+                    "_smtp_img_src",
+                    "smtp_from_name",
                     "smtp_from",
                     "smtp_host",
                     "smtp_username",
-                    "_smtp_img_src",
-                    "sso_client_id",
                     "sso_authority",
                     "sso_callback_path",
+                    "sso_client_id",
                 ];
 
                 let cfg = {
+                    // Lock the inner as short as possible and clone what is needed to prevent deadlocks
                     let inner = &self.inner.read().unwrap();
                     inner.config.clone()
                 };
@@ -317,13 +462,21 @@ macro_rules! make_config {
                 serde_json::Value::Object({
                     let mut json = serde_json::Map::new();
                     $($(
-                        json.insert(stringify!($name).into(), make_config! { @supportstr $name, cfg.$name, $ty, $none_action });
+                        json.insert(String::from(stringify!($name)), make_config! { @supportstr $name, cfg.$name, $ty, $none_action });
                     )+)+;
+                    // Loop through all privacy sensitive keys and mask them
+                    for mask_key in PRIVACY_CONFIG {
+                        if let Some(value) = json.get_mut(*mask_key) {
+                            if let Some(s) = value.as_str() {
+                                *value = _privacy_mask(s).into();
+                            }
+                        }
+                    }
                     json
                 })
             }
 
-            pub fn get_overrides(&self) -> Vec<String> {
+            pub fn get_overrides(&self) -> Vec<&'static str> {
                 let overrides = {
                     let inner = &self.inner.read().unwrap();
                     inner._overrides.clone()
@@ -332,55 +485,6 @@ macro_rules! make_config {
             }
         }
     };
-
-    // Support string print
-    ( @supportstr $name:ident, $value:expr, Pass, option ) => { serde_json::to_value($value.as_ref().map(|_| String::from("***"))).unwrap() }; // Optional pass, we map to an Option<String> with "***"
-    ( @supportstr $name:ident, $value:expr, Pass, $none_action:ident ) => { "***".into() }; // Required pass, we return "***"
-    ( @supportstr $name:ident, $value:expr, String, option ) => { // Optional other value, we return as is or convert to string to apply the privacy config
-        if PRIVACY_CONFIG.contains(&stringify!($name)) {
-            serde_json::to_value($value.as_ref().map(|x| _privacy_mask(x) )).unwrap()
-        } else {
-            serde_json::to_value($value).unwrap()
-        }
-    };
-    ( @supportstr $name:ident, $value:expr, String, $none_action:ident ) => { // Required other value, we return as is or convert to string to apply the privacy config
-        if PRIVACY_CONFIG.contains(&stringify!($name)) {
-            _privacy_mask(&$value).into()
-        } else {
-            ($value).into()
-        }
-    };
-    ( @supportstr $name:ident, $value:expr, $ty:ty, option ) => { serde_json::to_value($value).unwrap() }; // Optional other value, we return as is or convert to string to apply the privacy config
-    ( @supportstr $name:ident, $value:expr, $ty:ty, $none_action:ident ) => { ($value).into() }; // Required other value, we return as is or convert to string to apply the privacy config
-
-    // Group or empty string
-    ( @show ) => { "" };
-    ( @show $lit:literal ) => { $lit };
-
-    // Wrap the optionals in an Option type
-    ( @type $ty:ty, option) => { Option<$ty> };
-    ( @type $ty:ty, $id:ident) => { $ty };
-
-    // Generate the values depending on none_action
-    ( @build $value:expr, $config:expr, option, ) => { $value };
-    ( @build $value:expr, $config:expr, def, $default:expr ) => { $value.unwrap_or($default) };
-    ( @build $value:expr, $config:expr, auto, $default_fn:expr ) => {{
-        match $value {
-            Some(v) => v,
-            None => {
-                let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
-                f($config)
-            }
-        }
-    }};
-    ( @build $value:expr, $config:expr, generated, $default_fn:expr ) => {{
-        let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
-        f($config)
-    }};
-
-    ( @getenv $name:expr, bool ) => { get_env_bool($name) };
-    ( @getenv $name:expr, $ty:ident ) => { get_env($name) };
-
 }
 
 //STRUCTURE:
@@ -1518,7 +1622,7 @@ impl Config {
         if let Some(akey) = self._duo_akey() {
             akey
         } else {
-            let akey_s = crate::crypto::encode_random_bytes::<64>(data_encoding::BASE64);
+            let akey_s = crate::crypto::encode_random_bytes::<64>(&data_encoding::BASE64);
 
             // Save the new value
             let builder = ConfigBuilder {
@@ -1542,7 +1646,7 @@ impl Config {
         token.is_some() && !token.unwrap().trim().is_empty()
     }
 
-    pub fn opendal_operator_for_path_type(&self, path_type: PathType) -> Result<opendal::Operator, Error> {
+    pub fn opendal_operator_for_path_type(&self, path_type: &PathType) -> Result<opendal::Operator, Error> {
         let path = match path_type {
             PathType::Data => self.data_folder(),
             PathType::IconCache => self.icon_cache_folder(),
@@ -1735,7 +1839,7 @@ fn to_json<'reg, 'rc>(
 
 // Configure the web-vault version as an integer so it can be used as a comparison smaller or greater then.
 // The default is based upon the version since this feature is added.
-static WEB_VAULT_VERSION: Lazy<semver::Version> = Lazy::new(|| {
+static WEB_VAULT_VERSION: LazyLock<semver::Version> = LazyLock::new(|| {
     let vault_version = get_web_vault_version();
     // Use a single regex capture to extract version components
     let re = regex::Regex::new(r"(\d{4})\.(\d{1,2})\.(\d{1,2})").unwrap();
@@ -1751,7 +1855,7 @@ static WEB_VAULT_VERSION: Lazy<semver::Version> = Lazy::new(|| {
 
 // Configure the Vaultwarden version as an integer so it can be used as a comparison smaller or greater then.
 // The default is based upon the version since this feature is added.
-static VW_VERSION: Lazy<semver::Version> = Lazy::new(|| {
+static VW_VERSION: LazyLock<semver::Version> = LazyLock::new(|| {
     let vw_version = crate::VERSION.unwrap_or("1.32.5");
     // Use a single regex capture to extract version components
     let re = regex::Regex::new(r"(\d{1})\.(\d{1,2})\.(\d{1,2})").unwrap();

+ 2 - 2
src/crypto.rs

@@ -48,7 +48,7 @@ pub fn get_random_bytes<const N: usize>() -> [u8; N] {
 }
 
 /// Encode random bytes using the provided function.
-pub fn encode_random_bytes<const N: usize>(e: Encoding) -> String {
+pub fn encode_random_bytes<const N: usize>(e: &Encoding) -> String {
     e.encode(&get_random_bytes::<N>())
 }
 
@@ -81,7 +81,7 @@ pub fn get_random_string_alphanum(num_chars: usize) -> String {
 }
 
 pub fn generate_id<const N: usize>() -> String {
-    encode_random_bytes::<N>(HEXLOWER)
+    encode_random_bytes::<N>(&HEXLOWER)
 }
 
 pub fn generate_send_file_id() -> String {

+ 2 - 2
src/db/models/attachment.rs

@@ -44,7 +44,7 @@ impl Attachment {
     }
 
     pub async fn get_url(&self, host: &str) -> Result<String, crate::Error> {
-        let operator = CONFIG.opendal_operator_for_path_type(PathType::Attachments)?;
+        let operator = CONFIG.opendal_operator_for_path_type(&PathType::Attachments)?;
 
         if operator.info().scheme() == opendal::Scheme::Fs {
             let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone()));
@@ -117,7 +117,7 @@ impl Attachment {
             .map_res("Error deleting attachment")
         }}?;
 
-        let operator = CONFIG.opendal_operator_for_path_type(PathType::Attachments)?;
+        let operator = CONFIG.opendal_operator_for_path_type(&PathType::Attachments)?;
         let file_path = self.get_file_path();
 
         if let Err(e) = operator.delete(&file_path).await {

+ 2 - 2
src/db/models/device.rs

@@ -48,7 +48,7 @@ impl Device {
     }
 
     pub fn refresh_twofactor_remember(&mut self) -> String {
-        let twofactor_remember = crypto::encode_random_bytes::<180>(BASE64);
+        let twofactor_remember = crypto::encode_random_bytes::<180>(&BASE64);
         self.twofactor_remember = Some(twofactor_remember.clone());
 
         twofactor_remember
@@ -129,7 +129,7 @@ impl Device {
 
             push_uuid: Some(PushId(get_uuid())),
             push_token: None,
-            refresh_token: crypto::encode_random_bytes::<64>(BASE64URL),
+            refresh_token: crypto::encode_random_bytes::<64>(&BASE64URL),
             twofactor_remember: None,
         };
 

+ 1 - 1
src/db/models/organization.rs

@@ -172,7 +172,7 @@ impl PartialOrd<MembershipType> for i32 {
 
 /// Local methods
 impl Organization {
-    pub fn new(name: String, billing_email: String, private_key: Option<String>, public_key: Option<String>) -> Self {
+    pub fn new(name: String, billing_email: &str, private_key: Option<String>, public_key: Option<String>) -> Self {
         let billing_email = billing_email.to_lowercase();
         Self {
             uuid: OrganizationId(crate::util::get_uuid()),

+ 1 - 1
src/db/models/send.rs

@@ -225,7 +225,7 @@ impl Send {
         self.update_users_revision(conn).await;
 
         if self.atype == SendType::File as i32 {
-            let operator = CONFIG.opendal_operator_for_path_type(PathType::Sends)?;
+            let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?;
             operator.remove_all(&self.uuid).await.ok();
         }
 

+ 1 - 1
src/db/models/user.rs

@@ -106,7 +106,7 @@ impl User {
     pub const CLIENT_KDF_TYPE_DEFAULT: i32 = UserKdfType::Pbkdf2 as i32;
     pub const CLIENT_KDF_ITER_DEFAULT: i32 = 600_000;
 
-    pub fn new(email: String, name: Option<String>) -> Self {
+    pub fn new(email: &str, name: Option<String>) -> Self {
         let now = Utc::now().naive_utc();
         let email = email.to_lowercase();
 

+ 122 - 46
src/error.rs

@@ -3,6 +3,7 @@
 //
 use crate::db::models::EventType;
 use crate::http_client::CustomHttpClientError;
+use serde::ser::{Serialize, SerializeStruct, Serializer};
 use std::error::Error as StdError;
 
 macro_rules! make_error {
@@ -73,7 +74,7 @@ make_error! {
     Empty(Empty):     _no_source, _serialize,
     // Used to represent err! calls
     Simple(String):  _no_source,  _api_error,
-    Compact(Compact):  _no_source,  _api_error_small,
+    Compact(Compact):  _no_source,  _compact_api_error,
 
     // Used in our custom http client to handle non-global IPs and blocked domains
     CustomHttpClient(CustomHttpClientError): _has_source, _api_error,
@@ -130,6 +131,10 @@ impl Error {
         (usr_msg, log_msg.into()).into()
     }
 
+    pub fn new_msg<M: Into<String> + Clone>(usr_msg: M) -> Self {
+        (usr_msg.clone(), usr_msg.into()).into()
+    }
+
     pub fn empty() -> Self {
         Empty {}.into()
     }
@@ -196,38 +201,97 @@ fn _no_source<T, S>(_: T) -> Option<S> {
     None
 }
 
-fn _serialize(e: &impl serde::Serialize, _msg: &str) -> String {
+fn _serialize(e: &impl Serialize, _msg: &str) -> String {
     serde_json::to_string(e).unwrap()
 }
 
+/// This will serialize the default ApiErrorResponse
+/// It will add the needed fields which are mostly empty or have multiple copies of the message
+/// This is more efficient than having a larger struct and use the Serialize derive
+/// It also prevents using `json!()` calls to create the final output
+impl Serialize for ApiErrorResponse<'_> {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: Serializer,
+    {
+        #[derive(serde::Serialize)]
+        struct ErrorModel<'a> {
+            message: &'a str,
+            object: &'static str,
+        }
+
+        let mut state = serializer.serialize_struct("ApiErrorResponse", 9)?;
+
+        state.serialize_field("message", self.0.message)?;
+
+        let mut validation_errors = std::collections::HashMap::with_capacity(1);
+        validation_errors.insert("", vec![self.0.message]);
+        state.serialize_field("validationErrors", &validation_errors)?;
+
+        let error_model = ErrorModel {
+            message: self.0.message,
+            object: "error",
+        };
+        state.serialize_field("errorModel", &error_model)?;
+
+        state.serialize_field("error", "")?;
+        state.serialize_field("error_description", "")?;
+        state.serialize_field("exceptionMessage", &None::<()>)?;
+        state.serialize_field("exceptionStackTrace", &None::<()>)?;
+        state.serialize_field("innerExceptionMessage", &None::<()>)?;
+        state.serialize_field("object", "error")?;
+
+        state.end()
+    }
+}
+
+/// This will serialize the smaller CompactApiErrorResponse
+/// It will add the needed fields which are mostly empty
+/// This is more efficient than having a larger struct and use the Serialize derive
+/// It also prevents using `json!()` calls to create the final output
+impl Serialize for CompactApiErrorResponse<'_> {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: Serializer,
+    {
+        let mut state = serializer.serialize_struct("CompactApiErrorResponse", 6)?;
+
+        state.serialize_field("message", self.0.message)?;
+        state.serialize_field("validationErrors", &None::<()>)?;
+        state.serialize_field("exceptionMessage", &None::<()>)?;
+        state.serialize_field("exceptionStackTrace", &None::<()>)?;
+        state.serialize_field("innerExceptionMessage", &None::<()>)?;
+        state.serialize_field("object", "error")?;
+
+        state.end()
+    }
+}
+
+/// Main API Error struct template
+/// This struct which we can be used by both ApiErrorResponse and CompactApiErrorResponse
+/// is small and doesn't contain unneeded empty fields. This is more memory efficient, but also less code to compile
+struct ApiErrorMsg<'a> {
+    message: &'a str,
+}
+/// Default API Error response struct
+/// The custom serialization adds all other needed fields
+struct ApiErrorResponse<'a>(ApiErrorMsg<'a>);
+/// Compact API Error response struct used for some newer error responses
+/// The custom serialization adds all other needed fields
+struct CompactApiErrorResponse<'a>(ApiErrorMsg<'a>);
+
 fn _api_error(_: &impl std::any::Any, msg: &str) -> String {
-    let json = json!({
-        "message": msg,
-        "error": "",
-        "error_description": "",
-        "validationErrors": {"": [ msg ]},
-        "errorModel": {
-            "message": msg,
-            "object": "error"
-        },
-        "exceptionMessage": null,
-        "exceptionStackTrace": null,
-        "innerExceptionMessage": null,
-        "object": "error"
-    });
-    _serialize(&json, "")
+    let response = ApiErrorMsg {
+        message: msg,
+    };
+    serde_json::to_string(&ApiErrorResponse(response)).unwrap()
 }
 
-fn _api_error_small(_: &impl std::any::Any, msg: &str) -> String {
-    let json = json!({
-        "message": msg,
-        "validationErrors": null,
-        "exceptionMessage": null,
-        "exceptionStackTrace": null,
-        "innerExceptionMessage": null,
-        "object": "error"
-    });
-    _serialize(&json, "")
+fn _compact_api_error(_: &impl std::any::Any, msg: &str) -> String {
+    let response = ApiErrorMsg {
+        message: msg,
+    };
+    serde_json::to_string(&CompactApiErrorResponse(response)).unwrap()
 }
 
 //
@@ -258,34 +322,41 @@ impl Responder<'_, 'static> for Error {
 #[macro_export]
 macro_rules! err {
     ($kind:ident, $msg:expr) => {{
-        error!("{}", $msg);
-        return Err($crate::error::Error::new($msg, $msg).with_kind($crate::error::ErrorKind::$kind($crate::error::$kind {})));
+        let msg = $msg;
+        error!("{msg}");
+        return Err($crate::error::Error::new_msg(msg).with_kind($crate::error::ErrorKind::$kind($crate::error::$kind {})));
     }};
     ($msg:expr) => {{
-        error!("{}", $msg);
-        return Err($crate::error::Error::new($msg, $msg));
+        let msg = $msg;
+        error!("{msg}");
+        return Err($crate::error::Error::new_msg(msg));
     }};
     ($msg:expr, ErrorEvent $err_event:tt) => {{
-        error!("{}", $msg);
-        return Err($crate::error::Error::new($msg, $msg).with_event($crate::error::ErrorEvent $err_event));
+        let msg = $msg;
+        error!("{msg}");
+        return Err($crate::error::Error::new_msg(msg).with_event($crate::error::ErrorEvent $err_event));
     }};
     ($usr_msg:expr, $log_value:expr) => {{
-        error!("{}. {}", $usr_msg, $log_value);
-        return Err($crate::error::Error::new($usr_msg, $log_value));
+        let usr_msg = $usr_msg;
+        let log_value = $log_value;
+        error!("{usr_msg}. {log_value}");
+        return Err($crate::error::Error::new(usr_msg, log_value));
     }};
     ($usr_msg:expr, $log_value:expr, ErrorEvent $err_event:tt) => {{
-        error!("{}. {}", $usr_msg, $log_value);
-        return Err($crate::error::Error::new($usr_msg, $log_value).with_event($crate::error::ErrorEvent $err_event));
+        let usr_msg = $usr_msg;
+        let log_value = $log_value;
+        error!("{usr_msg}. {log_value}");
+        return Err($crate::error::Error::new(usr_msg, log_value).with_event($crate::error::ErrorEvent $err_event));
     }};
 }
 
 #[macro_export]
 macro_rules! err_silent {
     ($msg:expr) => {{
-        return Err($crate::error::Error::new($msg, $msg));
+        return Err($crate::error::Error::new_msg($msg));
     }};
     ($msg:expr, ErrorEvent $err_event:tt) => {{
-        return Err($crate::error::Error::new($msg, $msg).with_event($crate::error::ErrorEvent $err_event));
+        return Err($crate::error::Error::new_msg($msg).with_event($crate::error::ErrorEvent $err_event));
     }};
     ($usr_msg:expr, $log_value:expr) => {{
         return Err($crate::error::Error::new($usr_msg, $log_value));
@@ -298,12 +369,15 @@ macro_rules! err_silent {
 #[macro_export]
 macro_rules! err_code {
     ($msg:expr, $err_code:expr) => {{
-        error!("{}", $msg);
-        return Err($crate::error::Error::new($msg, $msg).with_code($err_code));
+        let msg = $msg;
+        error!("{msg}");
+        return Err($crate::error::Error::new_msg(msg).with_code($err_code));
     }};
     ($usr_msg:expr, $log_value:expr, $err_code:expr) => {{
-        error!("{}. {}", $usr_msg, $log_value);
-        return Err($crate::error::Error::new($usr_msg, $log_value).with_code($err_code));
+        let usr_msg = $usr_msg;
+        let log_value = $log_value;
+        error!("{usr_msg}. {log_value}");
+        return Err($crate::error::Error::new(usr_msg, log_value).with_code($err_code));
     }};
 }
 
@@ -311,7 +385,7 @@ macro_rules! err_code {
 macro_rules! err_discard {
     ($msg:expr, $data:expr) => {{
         std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
-        return Err($crate::error::Error::new($msg, $msg));
+        return Err($crate::error::Error::new_msg($msg));
     }};
     ($usr_msg:expr, $log_value:expr, $data:expr) => {{
         std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
@@ -336,7 +410,9 @@ macro_rules! err_handler {
         return ::rocket::request::Outcome::Error((rocket::http::Status::Unauthorized, $expr));
     }};
     ($usr_msg:expr, $log_value:expr) => {{
-        error!(target: "auth", "Unauthorized Error: {}. {}", $usr_msg, $log_value);
-        return ::rocket::request::Outcome::Error((rocket::http::Status::Unauthorized, $usr_msg));
+        let usr_msg = $usr_msg;
+        let log_value = $log_value;
+        error!(target: "auth", "Unauthorized Error: {usr_msg}. {log_value}");
+        return ::rocket::request::Outcome::Error((rocket::http::Status::Unauthorized, usr_msg));
     }};
 }

+ 10 - 10
src/http_client.rs

@@ -2,12 +2,11 @@ use std::{
     fmt,
     net::{IpAddr, SocketAddr},
     str::FromStr,
-    sync::{Arc, Mutex},
+    sync::{Arc, LazyLock, Mutex},
     time::Duration,
 };
 
 use hickory_resolver::{name_server::TokioConnectionProvider, TokioResolver};
-use once_cell::sync::Lazy;
 use regex::Regex;
 use reqwest::{
     dns::{Name, Resolve, Resolving},
@@ -25,9 +24,10 @@ pub fn make_http_request(method: reqwest::Method, url: &str) -> Result<reqwest::
         err!("Invalid host");
     };
 
-    should_block_host(host)?;
+    should_block_host(&host)?;
 
-    static INSTANCE: Lazy<Client> = Lazy::new(|| get_reqwest_client_builder().build().expect("Failed to build client"));
+    static INSTANCE: LazyLock<Client> =
+        LazyLock::new(|| get_reqwest_client_builder().build().expect("Failed to build client"));
 
     Ok(INSTANCE.request(method, url))
 }
@@ -45,7 +45,7 @@ pub fn get_reqwest_client_builder() -> ClientBuilder {
             return attempt.error("Invalid host");
         };
 
-        if let Err(e) = should_block_host(host) {
+        if let Err(e) = should_block_host(&host) {
             return attempt.error(e);
         }
 
@@ -100,11 +100,11 @@ fn should_block_address_regex(domain_or_ip: &str) -> bool {
     is_match
 }
 
-fn should_block_host(host: Host<&str>) -> Result<(), CustomHttpClientError> {
+fn should_block_host(host: &Host<&str>) -> Result<(), CustomHttpClientError> {
     let (ip, host_str): (Option<IpAddr>, String) = match host {
-        Host::Ipv4(ip) => (Some(ip.into()), ip.to_string()),
-        Host::Ipv6(ip) => (Some(ip.into()), ip.to_string()),
-        Host::Domain(d) => (None, d.to_string()),
+        Host::Ipv4(ip) => (Some(IpAddr::V4(*ip)), ip.to_string()),
+        Host::Ipv6(ip) => (Some(IpAddr::V6(*ip)), ip.to_string()),
+        Host::Domain(d) => (None, (*d).to_string()),
     };
 
     if let Some(ip) = ip {
@@ -179,7 +179,7 @@ type BoxError = Box<dyn std::error::Error + Send + Sync>;
 
 impl CustomDnsResolver {
     fn instance() -> Arc<Self> {
-        static INSTANCE: Lazy<Arc<CustomDnsResolver>> = Lazy::new(CustomDnsResolver::new);
+        static INSTANCE: LazyLock<Arc<CustomDnsResolver>> = LazyLock::new(CustomDnsResolver::new);
         Arc::clone(&*INSTANCE)
     }
 

+ 2 - 2
src/mail.rs

@@ -184,7 +184,7 @@ pub async fn send_delete_account(address: &str, user_id: &UserId) -> EmptyResult
 }
 
 pub async fn send_verify_email(address: &str, user_id: &UserId) -> EmptyResult {
-    let claims = generate_verify_email_claims(user_id.clone());
+    let claims = generate_verify_email_claims(user_id);
     let verify_email_token = encode_jwt(&claims);
 
     let (subject, body_html, body_text) = get_text(
@@ -235,7 +235,7 @@ pub async fn send_welcome(address: &str) -> EmptyResult {
 }
 
 pub async fn send_welcome_must_verify(address: &str, user_id: &UserId) -> EmptyResult {
-    let claims = generate_verify_email_claims(user_id.clone());
+    let claims = generate_verify_email_claims(user_id);
     let verify_email_token = encode_jwt(&claims);
 
     let (subject, body_html, body_text) = get_text(

+ 1 - 1
src/main.rs

@@ -448,7 +448,7 @@ async fn check_data_folder() {
 
     if data_folder.starts_with("s3://") {
         if let Err(e) = CONFIG
-            .opendal_operator_for_path_type(PathType::Data)
+            .opendal_operator_for_path_type(&PathType::Data)
             .unwrap_or_else(|e| {
                 error!("Failed to create S3 operator for data folder '{data_folder}': {e:?}");
                 exit(1);

+ 3 - 4
src/ratelimit.rs

@@ -1,5 +1,4 @@
-use once_cell::sync::Lazy;
-use std::{net::IpAddr, num::NonZeroU32, time::Duration};
+use std::{net::IpAddr, num::NonZeroU32, sync::LazyLock, time::Duration};
 
 use governor::{clock::DefaultClock, state::keyed::DashMapStateStore, Quota, RateLimiter};
 
@@ -7,13 +6,13 @@ use crate::{Error, CONFIG};
 
 type Limiter<T = IpAddr> = RateLimiter<T, DashMapStateStore<T>, DefaultClock>;
 
-static LIMITER_LOGIN: Lazy<Limiter> = Lazy::new(|| {
+static LIMITER_LOGIN: LazyLock<Limiter> = LazyLock::new(|| {
     let seconds = Duration::from_secs(CONFIG.login_ratelimit_seconds());
     let burst = NonZeroU32::new(CONFIG.login_ratelimit_max_burst()).expect("Non-zero login ratelimit burst");
     RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero login ratelimit seconds").allow_burst(burst))
 });
 
-static LIMITER_ADMIN: Lazy<Limiter> = Lazy::new(|| {
+static LIMITER_ADMIN: LazyLock<Limiter> = LazyLock::new(|| {
     let seconds = Duration::from_secs(CONFIG.admin_ratelimit_seconds());
     let burst = NonZeroU32::new(CONFIG.admin_ratelimit_max_burst()).expect("Non-zero admin ratelimit burst");
     RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero admin ratelimit seconds").allow_burst(burst))

+ 8 - 9
src/sso.rs

@@ -1,12 +1,11 @@
+use std::{sync::LazyLock, time::Duration};
+
 use chrono::Utc;
 use derive_more::{AsRef, Deref, Display, From};
+use mini_moka::sync::Cache;
 use regex::Regex;
-use std::time::Duration;
 use url::Url;
 
-use mini_moka::sync::Cache;
-use once_cell::sync::Lazy;
-
 use crate::{
     api::ApiResult,
     auth,
@@ -21,12 +20,12 @@ use crate::{
 
 pub static FAKE_IDENTIFIER: &str = "VW_DUMMY_IDENTIFIER_FOR_OIDC";
 
-static AC_CACHE: Lazy<Cache<OIDCState, AuthenticatedUser>> =
-    Lazy::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build());
+static AC_CACHE: LazyLock<Cache<OIDCState, AuthenticatedUser>> =
+    LazyLock::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build());
 
-static SSO_JWT_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|sso", CONFIG.domain_origin()));
+static SSO_JWT_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|sso", CONFIG.domain_origin()));
 
-pub static NONCE_EXPIRATION: Lazy<chrono::Duration> = Lazy::new(|| chrono::TimeDelta::try_minutes(10).unwrap());
+pub static NONCE_EXPIRATION: LazyLock<chrono::Duration> = LazyLock::new(|| chrono::TimeDelta::try_minutes(10).unwrap());
 
 #[derive(
     Clone,
@@ -151,7 +150,7 @@ fn decode_token_claims(token_name: &str, token: &str) -> ApiResult<BasicTokenCla
     }
 }
 
-pub fn decode_state(base64_state: String) -> ApiResult<OIDCState> {
+pub fn decode_state(base64_state: &str) -> ApiResult<OIDCState> {
     let state = match data_encoding::BASE64.decode(base64_state.as_bytes()) {
         Ok(vec) => match String::from_utf8(vec) {
             Ok(valid) => OIDCState(valid),

+ 7 - 11
src/sso_client.rs

@@ -1,13 +1,9 @@
-use regex::Regex;
-use std::borrow::Cow;
-use std::time::Duration;
-use url::Url;
+use std::{borrow::Cow, sync::LazyLock, time::Duration};
 
 use mini_moka::sync::Cache;
-use once_cell::sync::Lazy;
-use openidconnect::core::*;
-use openidconnect::reqwest;
-use openidconnect::*;
+use openidconnect::{core::*, reqwest, *};
+use regex::Regex;
+use url::Url;
 
 use crate::{
     api::{ApiResult, EmptyResult},
@@ -16,8 +12,8 @@ use crate::{
     CONFIG,
 };
 
-static CLIENT_CACHE_KEY: Lazy<String> = Lazy::new(|| "sso-client".to_string());
-static CLIENT_CACHE: Lazy<Cache<String, Client>> = Lazy::new(|| {
+static CLIENT_CACHE_KEY: LazyLock<String> = LazyLock::new(|| "sso-client".to_string());
+static CLIENT_CACHE: LazyLock<Cache<String, Client>> = LazyLock::new(|| {
     Cache::builder().max_capacity(1).time_to_live(Duration::from_secs(CONFIG.sso_client_cache_expiration())).build()
 });
 
@@ -162,7 +158,7 @@ impl Client {
         if CONFIG.sso_pkce() {
             match nonce.verifier {
                 None => err!(format!("Missing verifier in the DB nonce table")),
-                Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret.clone())),
+                Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret)),
             }
         }
 

+ 1 - 1
src/util.rs

@@ -842,7 +842,7 @@ pub fn is_global(ip: std::net::IpAddr) -> bool {
 
 /// Saves a Rocket temporary file to the OpenDAL Operator at the given path.
 pub async fn save_temp_file(
-    path_type: PathType,
+    path_type: &PathType,
     path: &str,
     temp_file: rocket::fs::TempFile<'_>,
     overwrite: bool,