Преглед на файлове

Merge remote-tracking branch 'origin/master' into 241018-sync-master-to-release-58

zmstone преди 1 година
родител
ревизия
2a0db9ffdc
променени са 100 файла, в които са добавени 1389 реда и са изтрити 476 реда
  1. 3 3
      .github/actions/prepare-jmeter/action.yaml
  2. 5 5
      .github/workflows/_pr_entrypoint.yaml
  3. 5 5
      .github/workflows/_push-entrypoint.yaml
  4. 4 4
      .github/workflows/build_and_push_docker_images.yaml
  5. 4 4
      .github/workflows/build_packages.yaml
  6. 6 6
      .github/workflows/build_packages_cron.yaml
  7. 4 4
      .github/workflows/build_slim_packages.yaml
  8. 1 1
      .github/workflows/bump-dashboard-version.yaml
  9. 2 2
      .github/workflows/check_deps_integrity.yaml
  10. 2 2
      .github/workflows/codeql.yaml
  11. 1 1
      .github/workflows/green_master.yaml
  12. 3 3
      .github/workflows/performance_test.yaml
  13. 2 2
      .github/workflows/release.yaml
  14. 1 1
      .github/workflows/run_conf_tests.yaml
  15. 2 2
      .github/workflows/run_docker_tests.yaml
  16. 3 3
      .github/workflows/run_emqx_app_tests.yaml
  17. 2 2
      .github/workflows/run_helm_tests.yaml
  18. 11 11
      .github/workflows/run_jmeter_tests.yaml
  19. 4 4
      .github/workflows/run_relup_tests.yaml
  20. 2 2
      .github/workflows/run_test_cases.yaml
  21. 2 2
      .github/workflows/scorecard.yaml
  22. 1 1
      .github/workflows/sync-release-branch.yaml
  23. 1 1
      .github/workflows/upload-helm-charts.yaml
  24. 2 2
      README-CN.md
  25. 3 3
      README-RU.md
  26. 3 3
      README.md
  27. 1 0
      apps/emqx/include/emqx_placeholder.hrl
  28. 34 3
      apps/emqx/src/emqx_channel.erl
  29. 17 8
      apps/emqx/src/emqx_connection.erl
  30. 1 1
      apps/emqx/src/emqx_crl_cache.erl
  31. 18 3
      apps/emqx/src/emqx_keepalive.erl
  32. 1 0
      apps/emqx/src/emqx_mountpoint.erl
  33. 74 27
      apps/emqx/src/emqx_packet.erl
  34. 1 1
      apps/emqx/src/emqx_persistent_session_ds/emqx_persistent_session_ds_shared_subs.erl
  35. 31 6
      apps/emqx/src/emqx_quic_connection.erl
  36. 18 8
      apps/emqx/src/emqx_quic_data_stream.erl
  37. 30 7
      apps/emqx/src/emqx_ssl_crl_cache.erl
  38. 2 9
      apps/emqx/src/emqx_trace/emqx_trace_formatter.erl
  39. 4 7
      apps/emqx/src/emqx_trace/emqx_trace_json_formatter.erl
  40. 1 1
      apps/emqx/src/proto/emqx_proto_v1.erl
  41. 3 1
      apps/emqx/test/emqx_bpapi_static_checks.erl
  42. 21 2
      apps/emqx/test/emqx_common_test_helpers.erl
  43. 4 5
      apps/emqx/test/emqx_crl_cache_SUITE.erl
  44. 94 0
      apps/emqx/test/emqx_packet_tests.erl
  45. 179 0
      apps/emqx/test/emqx_quic_multistreams_SUITE.erl
  46. 12 10
      apps/emqx/test/emqx_trace_SUITE.erl
  47. 1 0
      apps/emqx_auth/include/emqx_authn.hrl
  48. 1 1
      apps/emqx_auth/src/emqx_auth.app.src
  49. 9 1
      apps/emqx_auth/src/emqx_authn/emqx_authn_chains.erl
  50. 16 5
      apps/emqx_auth/src/emqx_authn/emqx_authn_user_import_api.erl
  51. 1 0
      apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl
  52. 28 0
      apps/emqx_auth/test/emqx_authz/emqx_authz_file_SUITE.erl
  53. 1 1
      apps/emqx_auth_jwt/src/emqx_auth_jwt.app.src
  54. 1 1
      apps/emqx_auth_jwt/src/emqx_authn_jwt.erl
  55. 1 1
      apps/emqx_auth_mnesia/src/emqx_auth_mnesia.app.src
  56. 39 44
      apps/emqx_auth_mnesia/src/emqx_authn_mnesia.erl
  57. 11 5
      apps/emqx_auth_mnesia/test/emqx_authn_api_mnesia_SUITE.erl
  58. 18 18
      apps/emqx_auth_mnesia/test/emqx_authn_mnesia_SUITE.erl
  59. 1 1
      apps/emqx_auth_mongodb/src/emqx_auth_mongodb.app.src
  60. 1 0
      apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl
  61. 1 1
      apps/emqx_auth_mysql/src/emqx_auth_mysql.app.src
  62. 1 0
      apps/emqx_auth_mysql/src/emqx_authz_mysql.erl
  63. 1 1
      apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src
  64. 1 0
      apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl
  65. 1 1
      apps/emqx_auth_redis/src/emqx_auth_redis.app.src
  66. 1 0
      apps/emqx_auth_redis/src/emqx_authz_redis.erl
  67. 1 1
      apps/emqx_bridge/src/emqx_bridge.app.src
  68. 8 4
      apps/emqx_bridge/src/emqx_bridge_v2.erl
  69. 8 2
      apps/emqx_bridge/src/emqx_bridge_v2_api.erl
  70. 98 0
      apps/emqx_bridge/test/emqx_bridge_v2_api_SUITE.erl
  71. 1 1
      apps/emqx_bridge_iotdb/rebar.config
  72. 12 1
      apps/emqx_bridge_snowflake/test/emqx_bridge_snowflake_SUITE.erl
  73. 1 1
      apps/emqx_cluster_link/src/emqx_cluster_link.app.src
  74. 63 17
      apps/emqx_cluster_link/src/emqx_cluster_link.erl
  75. 2 4
      apps/emqx_cluster_link/src/emqx_cluster_link_app.erl
  76. 0 13
      apps/emqx_cluster_link/src/emqx_cluster_link_config.erl
  77. 11 1
      apps/emqx_cluster_link/src/emqx_cluster_link_extrouter_gc.erl
  78. 8 2
      apps/emqx_cluster_link/src/emqx_cluster_link_mqtt.erl
  79. 11 6
      apps/emqx_cluster_link/src/emqx_cluster_link_router_syncer.erl
  80. 63 6
      apps/emqx_cluster_link/test/emqx_cluster_link_SUITE.erl
  81. 114 23
      apps/emqx_cluster_link/test/emqx_cluster_link_api_SUITE.erl
  82. 1 1
      apps/emqx_conf/src/emqx_conf.app.src
  83. 5 3
      apps/emqx_conf/src/emqx_conf_cli.erl
  84. 1 1
      apps/emqx_connector_aggregator/src/emqx_connector_aggregator.app.src
  85. 17 11
      apps/emqx_connector_aggregator/src/emqx_connector_aggregator.erl
  86. 1 1
      apps/emqx_dashboard/src/emqx_dashboard.app.src
  87. 28 24
      apps/emqx_dashboard/src/emqx_dashboard_monitor.erl
  88. 19 0
      apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl
  89. 1 1
      apps/emqx_dashboard_sso/src/emqx_dashboard_sso.app.src
  90. 4 4
      apps/emqx_dashboard_sso/src/emqx_dashboard_sso_oidc_session.erl
  91. 27 0
      apps/emqx_dashboard_sso/src/emqx_dashboard_sso_oidc_sup.erl
  92. 24 14
      apps/emqx_dashboard_sso/src/emqx_dashboard_sso_sup.erl
  93. 39 23
      apps/emqx_ds_shared_sub/src/emqx_ds_shared_sub_leader.erl
  94. 1 1
      apps/emqx_gateway/src/emqx_gateway.app.src
  95. 1 1
      apps/emqx_gateway/src/emqx_gateway_api.erl
  96. 20 37
      apps/emqx_gateway/src/emqx_gateway_api_authn_user_import.erl
  97. 36 12
      apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl
  98. 1 1
      apps/emqx_gateway_ocpp/rebar.config
  99. 2 0
      apps/emqx_machine/src/emqx_machine.erl
  100. 0 0
      apps/emqx_machine/src/emqx_machine_replicant_health_probe.erl

+ 3 - 3
.github/actions/prepare-jmeter/action.yaml

@@ -3,19 +3,19 @@ description: 'Download jmeter packages'
 runs:
   using: composite
   steps:
-    - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       with:
         repository: emqx/emqx-fvt
         ref: broker-autotest-v5
         path: scripts
-    - uses: actions/setup-java@99b8673ff64fbf99d8d325f52d9a5bdedb8483e9 # v4.2.1
+    - uses: actions/setup-java@b36c23c0d998641eff861008f374ee103c25ac73 # v4.4.0
       with:
         java-version: '8.0.282' # The JDK version to make available on the path.
         java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
         architecture: x64 # (x64 or x86) - defaults to x64
         # https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md
         distribution: 'zulu'
-    - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4
+    - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
       with:
         name: apache-jmeter.tgz
     - name: install jmeter

+ 5 - 5
.github/workflows/_pr_entrypoint.yaml

@@ -30,7 +30,7 @@ jobs:
       ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }}
       BUILDER: ${{ steps.env.outputs.BUILDER }}
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ github.event.inputs.ref }}
       - name: Set up environment
@@ -52,7 +52,7 @@ jobs:
       ct-docker: ${{ steps.matrix.outputs.ct-docker }}
 
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ github.event.inputs.ref }}
           fetch-depth: 0
@@ -133,7 +133,7 @@ jobs:
           - emqx-enterprise
 
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           fetch-depth: 0
       - name: Work around https://github.com/actions/checkout/issues/766
@@ -150,12 +150,12 @@ jobs:
           echo "export PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a env.sh
           zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip .
           make ${PROFILE}-rel
-      - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+      - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         with:
           name: ${{ matrix.profile }}
           path: ${{ matrix.profile }}.zip
           retention-days: 7
-      - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+      - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         with:
           name: "${{ matrix.profile }}-schema-dump"
           path: |

+ 5 - 5
.github/workflows/_push-entrypoint.yaml

@@ -36,7 +36,7 @@ jobs:
       ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }}
       BUILDER: ${{ steps.env.outputs.BUILDER }}
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ github.event.inputs.ref }}
       - name: Set up environment
@@ -61,7 +61,7 @@ jobs:
       ct-docker: ${{ steps.matrix.outputs.ct-docker }}
 
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ github.event.inputs.ref }}
           fetch-depth: 0
@@ -146,7 +146,7 @@ jobs:
           - emqx-enterprise
 
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ github.event.inputs.ref }}
           fetch-depth: 0
@@ -163,12 +163,12 @@ jobs:
           echo "export PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a env.sh
           zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip .
           make ${PROFILE}-rel
-      - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+      - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         with:
           name: ${{ matrix.profile }}
           path: ${{ matrix.profile }}.zip
           retention-days: 7
-      - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+      - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         with:
           name: "${{ matrix.profile }}-schema-dump"
           path: |

+ 4 - 4
.github/workflows/build_and_push_docker_images.yaml

@@ -70,7 +70,7 @@ jobs:
           - arm64
 
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ github.event.inputs.ref }}
       - name: build release tarball
@@ -82,7 +82,7 @@ jobs:
           ./scripts/buildx.sh --profile ${{ matrix.profile }} --pkgtype tgz --builder "$EMQX_DOCKER_BUILD_FROM"
           PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_DOCKER_BUILD_FROM" ./pkg-vsn.sh "${{ matrix.profile }}")
           echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_OUTPUT"
-      - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+      - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         with:
           name: "${{ matrix.profile }}-${{ matrix.arch == 'x64' && 'amd64' || 'arm64' }}.tar.gz"
           path: "_packages/emqx*/emqx-*.tar.gz"
@@ -114,7 +114,7 @@ jobs:
       EMQX_SOURCE_TYPE: tgz
 
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ github.event.inputs.ref }}
 
@@ -201,7 +201,7 @@ jobs:
         run: |
           docker save "${_EMQX_DOCKER_IMAGE_TAG}" | gzip > $PROFILE-docker-$PKG_VSN.tar.gz
 
-      - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+      - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         with:
           name: "${{ env.PROFILE }}-docker"
           path: "${{ env.PROFILE }}-docker-${{ env.PKG_VSN }}.tar.gz"

+ 4 - 4
.github/workflows/build_packages.yaml

@@ -82,7 +82,7 @@ jobs:
           - ${{ inputs.otp_vsn }}
     runs-on: ${{ matrix.os }}
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       with:
         ref: ${{ github.event.inputs.ref }}
         fetch-depth: 0
@@ -95,7 +95,7 @@ jobs:
         apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
         apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
         apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       if: success()
       with:
         name: ${{ matrix.profile }}-${{ matrix.os }}-${{ matrix.otp }}
@@ -146,7 +146,7 @@ jobs:
         shell: bash
 
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       with:
         ref: ${{ github.event.inputs.ref }}
         fetch-depth: 0
@@ -181,7 +181,7 @@ jobs:
           --builder $BUILDER \
           --elixir $IS_ELIXIR \
           --pkgtype pkg
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       with:
         name: ${{ matrix.profile }}-${{ matrix.os }}-${{ matrix.arch }}${{ matrix.with_elixir == 'yes' && '-elixir' || '' }}-${{ matrix.builder }}-${{ matrix.otp }}-${{ matrix.elixir }}
         path: _packages/${{ matrix.profile }}/

+ 6 - 6
.github/workflows/build_packages_cron.yaml

@@ -37,7 +37,7 @@ jobs:
         shell: bash
 
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ matrix.profile[1] }}
           fetch-depth: 0
@@ -53,14 +53,14 @@ jobs:
       - name: build pkg
         run: |
           ./scripts/buildx.sh --profile "$PROFILE" --pkgtype pkg --builder "$BUILDER"
-      - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+      - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         if: success()
         with:
           name: ${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.os }}
           path: _packages/${{ matrix.profile[0] }}/
           retention-days: 7
       - name: Send notification to Slack
-        uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0
+        uses: slackapi/slack-github-action@37ebaef184d7626c5f204ab8d3baff4262dd30f0 # v1.27.0
         if: failure()
         env:
           SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
@@ -83,7 +83,7 @@ jobs:
           - macos-14-arm64
 
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ matrix.branch }}
           fetch-depth: 0
@@ -101,14 +101,14 @@ jobs:
           apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
           apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
           apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
-      - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+      - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         if: success()
         with:
           name: ${{ matrix.profile }}-${{ matrix.os }}
           path: _packages/${{ matrix.profile }}/
           retention-days: 7
       - name: Send notification to Slack
-        uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0
+        uses: slackapi/slack-github-action@37ebaef184d7626c5f204ab8d3baff4262dd30f0 # v1.27.0
         if: failure()
         env:
           SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

+ 4 - 4
.github/workflows/build_slim_packages.yaml

@@ -32,7 +32,7 @@ jobs:
           - ["emqx-enterprise", "erlang", "x64"]
 
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       with:
         fetch-depth: 0
     - name: build tgz
@@ -41,7 +41,7 @@ jobs:
     - name: build pkg
       run: |
         ./scripts/buildx.sh --profile $PROFILE --pkgtype pkg --elixir $ELIXIR --arch $ARCH
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       with:
         name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
         path: _packages/${{ matrix.profile[0] }}/*
@@ -62,7 +62,7 @@ jobs:
       EMQX_NAME: ${{ matrix.profile }}
 
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
     - name: Set up environment
       id: env
       run: |
@@ -77,7 +77,7 @@ jobs:
         apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
         apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
         apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       with:
         name: ${{ matrix.os }}
         path: _packages/**/*

+ 1 - 1
.github/workflows/bump-dashboard-version.yaml

@@ -33,7 +33,7 @@ jobs:
       pull-requests: write
 
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
 
       - name: Create PR to update dashboard version in Makefile
         env:

+ 2 - 2
.github/workflows/check_deps_integrity.yaml

@@ -22,7 +22,7 @@ jobs:
         profile:
           - emqx-enterprise
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       - run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
       - run: make ensure-rebar3
       - run: ./scripts/check-deps-integrity.escript
@@ -37,7 +37,7 @@ jobs:
       - run: ./scripts/check-elixir-deps-discrepancies.exs
       - run: ./scripts/check-elixir-applications.exs
       - name: Upload produced lock files
-        uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+        uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         if: failure()
         with:
           name: ${{ matrix.profile }}_produced_lock_files

+ 2 - 2
.github/workflows/codeql.yaml

@@ -25,7 +25,7 @@ jobs:
 
     steps:
       - name: Checkout repository
-        uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+        uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ matrix.branch }}
 
@@ -58,7 +58,7 @@ jobs:
 
     steps:
     - name: Checkout repository
-      uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       with:
         ref: ${{ matrix.branch }}
 

+ 1 - 1
.github/workflows/green_master.yaml

@@ -25,7 +25,7 @@ jobs:
           - master
           - release-58
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ matrix.ref }}
 

+ 3 - 3
.github/workflows/performance_test.yaml

@@ -44,12 +44,12 @@ jobs:
         aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
         aws-region: ${{ secrets.AWS_DEFAULT_REGION_PERF_TEST }}
     - name: Checkout tf-emqx-performance-test
-      uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       with:
         repository: emqx/tf-emqx-performance-test
         ref: v0.3.2
     - name: Setup Terraform
-      uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1
+      uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2
       with:
         terraform_version: 1.6.4
         terraform_wrapper: false
@@ -218,7 +218,7 @@ jobs:
         slack-message: "EMQX performance test ${{ matrix.scenario }} failed. <${{ github.event.repository.html_url }}/actions/runs/${{ github.run_id }}|Workflow Run>"
         payload-file-path: slack-payload.json
         payload-file-path-parsed: false
-    
+
     - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
       if: failure()
       with:

+ 2 - 2
.github/workflows/release.yaml

@@ -36,7 +36,7 @@ jobs:
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
           aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ github.event.inputs.tag }}
       - name: Detect profile
@@ -133,7 +133,7 @@ jobs:
       checks: write
       actions: write
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       - name: trigger re-run of app versions check on open PRs
         shell: bash
         env:

+ 1 - 1
.github/workflows/run_conf_tests.yaml

@@ -41,7 +41,7 @@ jobs:
         if: failure()
         run: |
           cat _build/${{ matrix.profile }}/rel/emqx/log/erlang.log.*
-      - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+      - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         if: failure()
         with:
           name: conftest-logs-${{ matrix.profile }}

+ 2 - 2
.github/workflows/run_docker_tests.yaml

@@ -28,7 +28,7 @@ jobs:
       EMQX_IMAGE_OLD_VERSION_TAG: ${{ matrix.profile[1] }}
 
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
         with:
           name: ${{ env.EMQX_NAME }}-docker
@@ -72,7 +72,7 @@ jobs:
           - emqx-enterprise
           - emqx-elixir
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
         with:
           name: ${{ env.EMQX_NAME }}-docker

+ 3 - 3
.github/workflows/run_emqx_app_tests.yaml

@@ -37,7 +37,7 @@ jobs:
       matrix: ${{ steps.matrix.outputs.matrix }}
       skip: ${{ steps.matrix.outputs.skip }}
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       with:
         fetch-depth: 0
     - name: prepare test matrix
@@ -72,7 +72,7 @@ jobs:
       run:
         shell: bash
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       with:
         fetch-depth: 0
     - name: run
@@ -95,7 +95,7 @@ jobs:
             echo "Suites: $SUITES"
             ./rebar3 as standalone_test ct --name 'test@127.0.0.1' -v --readable=true --suite="$SUITES"
         fi
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       if: failure()
       with:
         name: logs-emqx-app-tests-${{ matrix.type }}

+ 2 - 2
.github/workflows/run_helm_tests.yaml

@@ -34,7 +34,7 @@ jobs:
         - ssl1.3
         - ssl1.2
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       with:
         path: source
     - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
@@ -158,7 +158,7 @@ jobs:
           fi
           sleep 1;
         done
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       with:
         repository: emqx/paho.mqtt.testing
         ref: develop-5.0

+ 11 - 11
.github/workflows/run_jmeter_tests.yaml

@@ -31,7 +31,7 @@ jobs:
         else
           wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz $ARCHIVE_URL
         fi
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       with:
         name: apache-jmeter.tgz
         path: /tmp/apache-jmeter.tgz
@@ -51,7 +51,7 @@ jobs:
 
     needs: jmeter_artifact
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
     - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
       with:
         name: emqx-docker
@@ -88,7 +88,7 @@ jobs:
           echo "check logs failed"
           exit 1
         fi
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       if: always()
       with:
         name: jmeter_logs-advanced_feat-${{ matrix.scripts_type }}
@@ -113,7 +113,7 @@ jobs:
 
     needs: jmeter_artifact
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
     - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
       with:
         name: emqx-docker
@@ -161,7 +161,7 @@ jobs:
       if: failure()
       run: |
         docker compose -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml logs --no-color > ./jmeter_logs/emqx.log
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       if: always()
       with:
         name: jmeter_logs-pgsql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.pgsql_tag }}
@@ -183,7 +183,7 @@ jobs:
 
     needs: jmeter_artifact
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
     - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
       with:
         name: emqx-docker
@@ -227,7 +227,7 @@ jobs:
           echo "check logs failed"
           exit 1
         fi
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       if: always()
       with:
         name: jmeter_logs-mysql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.mysql_tag }}
@@ -245,7 +245,7 @@ jobs:
 
     needs: jmeter_artifact
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
     - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
       with:
         name: emqx-docker
@@ -285,7 +285,7 @@ jobs:
           echo "check logs failed"
           exit 1
         fi
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       if: always()
       with:
         name: jmeter_logs-JWT_authn-${{ matrix.scripts_type }}
@@ -304,7 +304,7 @@ jobs:
 
     needs: jmeter_artifact
     steps:
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
     - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
       with:
         name: emqx-docker
@@ -335,7 +335,7 @@ jobs:
           echo "check logs failed"
           exit 1
         fi
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       if: always()
       with:
         name: jmeter_logs-built_in_database_authn_authz-${{ matrix.scripts_type }}

+ 4 - 4
.github/workflows/run_relup_tests.yaml

@@ -45,7 +45,7 @@ jobs:
       run: |
         export PROFILE='emqx-enterprise'
         make emqx-enterprise-tgz
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       name: Upload built emqx and test scenario
       with:
         name: relup_tests_emqx_built
@@ -72,10 +72,10 @@ jobs:
       run:
         shell: bash
     steps:
-    - uses: erlef/setup-beam@b9c58b0450cd832ccdb3c17cc156a47065d2114f # v1.18.1
+    - uses: erlef/setup-beam@5304e04ea2b355f03681464e683d92e3b2f18451 # v1.18.2
       with:
         otp-version: 26.2.5
-    - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+    - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
       with:
         repository: hawk/lux
         ref: lux-2.8.1
@@ -111,7 +111,7 @@ jobs:
           docker logs node2.emqx.io | tee lux_logs/emqx2.log
           exit 1
         fi
-    - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+    - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
       name: Save debug data
       if: failure()
       with:

+ 2 - 2
.github/workflows/run_test_cases.yaml

@@ -130,7 +130,7 @@ jobs:
         if: failure()
         run: tar -czf logs.tar.gz _build/test/logs
 
-      - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+      - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         if: failure()
         with:
           name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }}
@@ -187,7 +187,7 @@ jobs:
         if: failure()
         run: tar -czf logs.tar.gz _build/test/logs
 
-      - uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+      - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         if: failure()
         with:
           name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }}

+ 2 - 2
.github/workflows/scorecard.yaml

@@ -25,7 +25,7 @@ jobs:
 
     steps:
       - name: "Checkout code"
-        uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+        uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           persist-credentials: false
 
@@ -40,7 +40,7 @@ jobs:
           publish_results: true
 
       - name: "Upload artifact"
-        uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
+        uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         with:
           name: SARIF file
           path: results.sarif

+ 1 - 1
.github/workflows/sync-release-branch.yaml

@@ -34,7 +34,7 @@ jobs:
       pull-requests: write
 
     steps:
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           fetch-depth: 0
 

+ 1 - 1
.github/workflows/upload-helm-charts.yaml

@@ -23,7 +23,7 @@ jobs:
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
           aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
-      - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
         with:
           ref: ${{ github.event.inputs.tag }}
       - name: Detect profile

+ 2 - 2
README-CN.md

@@ -6,10 +6,10 @@
 [![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml)
 [![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
 [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
-[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/emqx/emqx/badge)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
+[![OpenSSF Scorecard](https://img.shields.io/ossf-scorecard/github.com/emqx/emqx?label=OpenSSF%20Scorecard&style=flat)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
 [![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)
 [![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES)
-[![Twitter](https://img.shields.io/badge/Twitter-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
+[![X](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=x)](https://x.com/EMQTech)
 [![Community](https://img.shields.io/badge/Community-EMQX-yellow)](https://askemq.com)
 [![YouTube](https://img.shields.io/badge/Subscribe-EMQ%20中文-FF0000?logo=youtube)](https://www.youtube.com/channel/UCir_r04HIsLjf2qqyZ4A8Cg)
 

+ 3 - 3
README-RU.md

@@ -6,10 +6,10 @@
 [![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml)
 [![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
 [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
-[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/emqx/emqx/badge)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
+[![OpenSSF Scorecard](https://img.shields.io/ossf-scorecard/github.com/emqx/emqx?label=OpenSSF%20Scorecard&style=flat)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
 [![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)
 [![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES)
-[![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
+[![X](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=x)](https://x.com/EMQTech)
 [![YouTube](https://img.shields.io/badge/Subscribe-EMQ-FF0000?logo=youtube)](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
 
 
@@ -75,7 +75,7 @@ emqx start
 
 ## Дополнительные ресурсы
 
-- [MQTT client programming](https://www.emqx.com/en/blog/tag/mqtt-client-programming)
+- [MQTT client programming](https://www.emqx.com/en/blog/category/mqtt-programming)
 
   Коллекция блогов, чтобы помочь разработчикам быстро начать работу с MQTT на PHP, Node.js, Python, Golang, и других языках программирования.
 

+ 3 - 3
README.md

@@ -6,10 +6,10 @@ English | [简体中文](./README-CN.md) | [Русский](./README-RU.md)
 [![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml)
 [![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
 [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
-[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/emqx/emqx/badge)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
+[![OpenSSF Scorecard](https://img.shields.io/ossf-scorecard/github.com/emqx/emqx?label=OpenSSF%20Scorecard&style=flat)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
 [![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)
 [![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES)
-[![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
+[![X](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=x)](https://x.com/EMQTech)
 [![YouTube](https://img.shields.io/badge/Subscribe-EMQ-FF0000?logo=youtube)](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
 
 
@@ -78,7 +78,7 @@ For more organised improvement proposals, you can send pull requests to [EIP](ht
 
 ## Resources
 
-- [MQTT client programming](https://www.emqx.com/en/blog/tag/mqtt-client-programming)
+- [MQTT client programming](https://www.emqx.com/en/blog/category/mqtt-programming)
 
   A series of blogs to help developers get started quickly with MQTT in PHP, Node.js, Python, Golang, and other programming languages.
 

+ 1 - 0
apps/emqx/include/emqx_placeholder.hrl

@@ -40,6 +40,7 @@
 -define(VAR_TOPIC, "topic").
 -define(VAR_ENDPOINT_NAME, "endpoint_name").
 -define(VAR_NS_CLIENT_ATTRS, {var_namespace, "client_attrs"}).
+-define(VAR_ZONE, "zone").
 
 -define(PH_PASSWORD, ?PH(?VAR_PASSWORD)).
 -define(PH_CLIENTID, ?PH(?VAR_CLIENTID)).

+ 34 - 3
apps/emqx/src/emqx_channel.erl

@@ -276,7 +276,8 @@ init(
         },
         Zone
     ),
-    {NClientInfo, NConnInfo} = take_conn_info_fields([ws_cookie, peersni], ClientInfo, ConnInfo),
+    {NClientInfo, NConnInfo0} = take_conn_info_fields([ws_cookie, peersni], ClientInfo, ConnInfo),
+    NConnInfo = maybe_quic_shared_state(NConnInfo0, Opts),
     #channel{
         conninfo = NConnInfo,
         clientinfo = NClientInfo,
@@ -295,6 +296,11 @@ init(
         pendings = []
     }.
 
+maybe_quic_shared_state(ConnInfo, #{conn_shared_state := QSS}) ->
+    ConnInfo#{conn_shared_state => QSS};
+maybe_quic_shared_state(ConnInfo, _) ->
+    ConnInfo.
+
 set_peercert_infos(NoSSL, ClientInfo, _) when
     NoSSL =:= nossl;
     NoSSL =:= undefined
@@ -2374,17 +2380,42 @@ init_alias_maximum(_ConnPkt, _ClientInfo) ->
 
 %% MQTT 5
 ensure_keepalive(#{'Server-Keep-Alive' := Interval}, Channel = #channel{conninfo = ConnInfo}) ->
+    ensure_quic_conn_idle_timeout(Interval, Channel),
     ensure_keepalive_timer(Interval, Channel#channel{conninfo = ConnInfo#{keepalive => Interval}});
 %% MQTT 3,4
 ensure_keepalive(_AckProps, Channel = #channel{conninfo = ConnInfo}) ->
+    ensure_quic_conn_idle_timeout(maps:get(keepalive, ConnInfo), Channel),
     ensure_keepalive_timer(maps:get(keepalive, ConnInfo), Channel).
 
+ensure_quic_conn_idle_timeout(Timeout, #channel{
+    clientinfo = #{zone := Zone},
+    conninfo = #{socktype := quic, sock := Sock}
+}) ->
+    Conn = element(2, Sock),
+    #{keepalive_multiplier := Mul} =
+        emqx_config:get_zone_conf(Zone, [mqtt]),
+    %%% The original idle_timeout is from the listener, now we update it per connection
+    %%% Conn could be closed so we don't check the ret val
+    _ = quicer:setopt(Conn, settings, #{idle_timeout_ms => timer:seconds(Timeout * Mul)}, false),
+    ok;
+ensure_quic_conn_idle_timeout(_, _) ->
+    ok.
+
 ensure_keepalive_timer(0, Channel) ->
     Channel;
 ensure_keepalive_timer(disabled, Channel) ->
     Channel;
-ensure_keepalive_timer(Interval, Channel = #channel{clientinfo = #{zone := Zone}}) ->
-    Keepalive = emqx_keepalive:init(Zone, Interval),
+ensure_keepalive_timer(
+    Interval, Channel = #channel{clientinfo = #{zone := Zone}, conninfo = ConnInfo}
+) ->
+    Val =
+        case maps:get(conn_shared_state, ConnInfo, undefined) of
+            #{cnts_ref := CntRef} ->
+                _MFA = {emqx_quic_connection, read_cnt, [CntRef, control_packet]};
+            undefined ->
+                emqx_pd:get_counter(recv_pkt)
+        end,
+    Keepalive = emqx_keepalive:init(Zone, Val, Interval),
     ensure_timer(keepalive, Channel#channel{keepalive = Keepalive}).
 
 clear_keepalive(Channel = #channel{timers = Timers}) ->

+ 17 - 8
apps/emqx/src/emqx_connection.erl

@@ -120,8 +120,8 @@
     %% limiter timers
     limiter_timer :: undefined | reference(),
 
-    %% QUIC conn owner pid if in use.
-    quic_conn_pid :: option(pid())
+    %% QUIC conn shared state
+    quic_conn_ss :: option(map())
 }).
 
 -record(retry, {
@@ -317,7 +317,8 @@ init_state(
         sockname => Sockname,
         peercert => Peercert,
         peersni => PeerSNI,
-        conn_mod => ?MODULE
+        conn_mod => ?MODULE,
+        sock => Socket
     },
 
     LimiterTypes = [?LIMITER_BYTES_IN, ?LIMITER_MESSAGE_IN],
@@ -365,7 +366,7 @@ init_state(
         limiter_buffer = queue:new(),
         limiter_timer = undefined,
         %% for quic streams to inherit
-        quic_conn_pid = maps:get(conn_pid, Opts, undefined)
+        quic_conn_ss = maps:get(conn_shared_state, Opts, undefined)
     }.
 
 run_loop(
@@ -595,11 +596,13 @@ handle_msg(
         channel = Channel,
         serialize = Serialize,
         parse_state = PS,
-        quic_conn_pid = QuicConnPid
+        quic_conn_ss = QSS
     }
 ) ->
-    QuicConnPid =/= undefined andalso
-        emqx_quic_connection:activate_data_streams(QuicConnPid, {PS, Serialize, Channel}),
+    QSS =/= undefined andalso
+        emqx_quic_connection:activate_data_streams(
+            maps:get(conn_pid, QSS), {PS, Serialize, Channel}
+        ),
     ClientId = emqx_channel:info(clientid, Channel),
     emqx_cm:insert_channel_info(ClientId, info(State), stats(State));
 handle_msg({event, disconnected}, State = #state{channel = Channel}) ->
@@ -799,7 +802,13 @@ parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
 %%--------------------------------------------------------------------
 %% Handle incoming packet
 
-handle_incoming(Packet, State) when is_record(Packet, mqtt_packet) ->
+handle_incoming(Packet, #state{quic_conn_ss = QSS} = State) when is_record(Packet, mqtt_packet) ->
+    QSS =/= undefined andalso
+        emqx_quic_connection:step_cnt(
+            maps:get(cnts_ref, QSS),
+            control_packet,
+            1
+        ),
     ok = inc_incoming_stats(Packet),
     with_channel(handle_in, [Packet], State);
 handle_incoming(FrameError, State) ->

+ 1 - 1
apps/emqx/src/emqx_crl_cache.erl

@@ -74,7 +74,7 @@
     %% for future use
     extra = #{} :: map()
 }).
--type url() :: uri_string:uri_string().
+-type url() :: string().
 -type state() :: #state{}.
 
 %%--------------------------------------------------------------------

+ 18 - 3
apps/emqx/src/emqx_keepalive.erl

@@ -16,6 +16,8 @@
 
 -module(emqx_keepalive).
 
+-include("types.hrl").
+
 -export([
     init/1,
     init/2,
@@ -35,6 +37,8 @@
     check_interval :: pos_integer(),
     %% the received packets since last keepalive check
     statval :: non_neg_integer(),
+    %% stat reader func
+    stat_reader :: mfargs() | undefined,
     %% The number of idle intervals allowed before disconnecting the client.
     idle_milliseconds = 0 :: non_neg_integer(),
     max_idle_millisecond :: pos_integer()
@@ -65,18 +69,26 @@ init(Zone, Interval) ->
 %% @doc Init keepalive.
 -spec init(
     Zone :: atom(),
-    StatVal :: non_neg_integer(),
+    StatVal :: non_neg_integer() | Reader :: mfa(),
     Second :: non_neg_integer()
 ) -> keepalive() | undefined.
-init(Zone, StatVal, Second) when Second > 0 andalso Second =< ?MAX_INTERVAL ->
+init(Zone, Stat, Second) when Second > 0 andalso Second =< ?MAX_INTERVAL ->
     #{keepalive_multiplier := Mul, keepalive_check_interval := CheckInterval} =
         emqx_config:get_zone_conf(Zone, [mqtt]),
     MilliSeconds = timer:seconds(Second),
     Interval = emqx_utils:clamp(CheckInterval, 1000, max(MilliSeconds div 2, 1000)),
     MaxIdleMs = ceil(MilliSeconds * Mul),
+    {StatVal, ReaderMFA} =
+        case Stat of
+            {M, F, A} = MFA ->
+                {erlang:apply(M, F, A), MFA};
+            Stat when is_integer(Stat) ->
+                {Stat, undefined}
+        end,
     #keepalive{
         check_interval = Interval,
         statval = StatVal,
+        stat_reader = ReaderMFA,
         idle_milliseconds = 0,
         max_idle_millisecond = MaxIdleMs
     };
@@ -110,9 +122,12 @@ info(idle_milliseconds, #keepalive{idle_milliseconds = Val}) ->
 info(check_interval, undefined) ->
     0.
 
-check(Keepalive = #keepalive{}) ->
+check(Keepalive = #keepalive{stat_reader = undefined}) ->
     RecvCnt = emqx_pd:get_counter(recv_pkt),
     check(RecvCnt, Keepalive);
+check(Keepalive = #keepalive{stat_reader = {M, F, A}}) ->
+    RecvCnt = erlang:apply(M, F, A),
+    check(RecvCnt, Keepalive);
 check(Keepalive) ->
     {ok, Keepalive}.
 

+ 1 - 0
apps/emqx/src/emqx_mountpoint.erl

@@ -38,6 +38,7 @@
     ?VAR_CLIENTID,
     ?VAR_USERNAME,
     ?VAR_ENDPOINT_NAME,
+    ?VAR_ZONE,
     ?VAR_NS_CLIENT_ATTRS
 ]).
 

+ 74 - 27
apps/emqx/src/emqx_packet.erl

@@ -54,7 +54,7 @@
     format/2
 ]).
 
--export([format_truncated_payload/3]).
+-export([format_payload/2]).
 
 -define(TYPE_NAMES,
     {'CONNECT', 'CONNACK', 'PUBLISH', 'PUBACK', 'PUBREC', 'PUBREL', 'PUBCOMP', 'SUBSCRIBE',
@@ -506,7 +506,7 @@ format_variable(undefined, _, _) ->
 format_variable(Variable, undefined, PayloadEncode) ->
     format_variable(Variable, PayloadEncode);
 format_variable(Variable, Payload, PayloadEncode) ->
-    [format_variable(Variable, PayloadEncode), ", ", format_payload(Payload, PayloadEncode)].
+    [format_variable(Variable, PayloadEncode), ", ", format_payload_label(Payload, PayloadEncode)].
 
 format_variable(
     #mqtt_packet_connect{
@@ -537,7 +537,7 @@ format_variable(
                     ", Will(Q~p, R~p, Topic=~ts ",
                     [WillQoS, i(WillRetain), WillTopic]
                 ),
-                format_payload(WillPayload, PayloadEncode),
+                format_payload_label(WillPayload, PayloadEncode),
                 ")"
             ];
         false ->
@@ -617,32 +617,79 @@ format_password(undefined) -> "";
 format_password(<<>>) -> "";
 format_password(_Password) -> "******".
 
+format_payload_label(Payload, Type) ->
+    ["Payload=", format_payload(Payload, Type)].
+
 format_payload(_, hidden) ->
-    "Payload=******";
-format_payload(Payload, text) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) ->
-    ["Payload=", unicode:characters_to_list(Payload)];
-format_payload(Payload, hex) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) ->
-    ["Payload(hex)=", binary:encode_hex(Payload)];
-format_payload(<<Part:?TRUNCATED_PAYLOAD_SIZE/binary, _/binary>> = Payload, Type) ->
-    [
-        "Payload=",
-        format_truncated_payload(Part, byte_size(Payload), Type)
-    ].
+    "******";
+format_payload(<<>>, _) ->
+    "";
+format_payload(Payload, Type) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) ->
+    %% under the 1KB limit
+    format_payload_limit(Type, Payload, size(Payload));
+format_payload(Payload, Type) ->
+    %% too long, truncate to 100B
+    format_payload_limit(Type, Payload, ?TRUNCATED_PAYLOAD_SIZE).
+
+format_payload_limit(Type0, Payload, Limit) when size(Payload) > Limit ->
+    {Type, Part, TruncatedBytes} = truncate_payload(Type0, Limit, Payload),
+    case TruncatedBytes > 0 of
+        true ->
+            [do_format_payload(Type, Part), "...(", integer_to_list(TruncatedBytes), " bytes)"];
+        false ->
+            do_format_payload(Type, Payload)
+    end;
+format_payload_limit(text, Payload, _Limit) ->
+    case is_utf8(Payload) of
+        true ->
+            do_format_payload(text, Payload);
+        false ->
+            do_format_payload(hex, Payload)
+    end;
+format_payload_limit(hex, Payload, _Limit) ->
+    do_format_payload(hex, Payload).
+
+do_format_payload(text, Bytes) ->
+    %% utf8 ensured
+    Bytes;
+do_format_payload(hex, Bytes) ->
+    ["hex:", binary:encode_hex(Bytes)].
+
+is_utf8(Bytes) ->
+    case trim_utf8(size(Bytes), Bytes) of
+        {ok, 0} ->
+            true;
+        _ ->
+            false
+    end.
 
-format_truncated_payload(Bin, Size, Type) ->
-    Bin2 =
-        case Type of
-            text -> Bin;
-            hex -> binary:encode_hex(Bin)
-        end,
-    unicode:characters_to_list(
-        [
-            Bin2,
-            "... The ",
-            integer_to_list(Size - ?TRUNCATED_PAYLOAD_SIZE),
-            " bytes of this log are truncated"
-        ]
-    ).
+truncate_payload(hex, Limit, Payload) ->
+    <<Part:Limit/binary, Rest/binary>> = Payload,
+    {hex, Part, size(Rest)};
+truncate_payload(text, Limit, Payload) ->
+    case find_complete_utf8_len(Limit, Payload) of
+        {ok, Len} ->
+            <<Part:Len/binary, Rest/binary>> = Payload,
+            {text, Part, size(Rest)};
+        error ->
+            <<Part:Limit/binary, Rest/binary>> = Payload,
+            {hex, Part, size(Rest)}
+    end.
+
+find_complete_utf8_len(Limit, Payload) ->
+    case trim_utf8(Limit, Payload) of
+        {ok, TailLen} ->
+            {ok, size(Payload) - TailLen};
+        error ->
+            error
+    end.
+
+trim_utf8(Count, <<_/utf8, Rest/binary>> = All) when Count > 0 ->
+    trim_utf8(Count - (size(All) - size(Rest)), Rest);
+trim_utf8(Count, Bytes) when Count =< 0 ->
+    {ok, size(Bytes)};
+trim_utf8(_Count, _Rest) ->
+    error.
 
 i(true) -> 1;
 i(false) -> 0;

+ 1 - 1
apps/emqx/src/emqx_persistent_session_ds/emqx_persistent_session_ds_shared_subs.erl

@@ -402,7 +402,7 @@ revoke_stream(ShareTopicFilter, Stream, S0, SchedS0) ->
             %% This should not happen.
             %% Agent should have received unsubscribe callback
             %% and should not have revoked this stream
-            S0;
+            {S0, SchedS0};
         #{id := SubId} ->
             emqx_persistent_session_ds_stream_scheduler:on_unsubscribe(SubId, Stream, S0, SchedS0)
     end.

+ 31 - 6
apps/emqx/src/emqx_quic_connection.erl

@@ -47,6 +47,12 @@
     handle_info/2
 ]).
 
+%% Connection scope shared counter
+-export([step_cnt/3]).
+-export([read_cnt/2]).
+
+-define(MAX_CNTS, 8).
+
 -export_type([cb_state/0, cb_ret/0]).
 
 -type cb_state() :: #{
@@ -62,6 +68,8 @@
     streams := [{pid(), quicer:stream_handle()}],
     %% New stream opts
     stream_opts := map(),
+    %% Connection Scope Counters, shared by streams for MQTT layer
+    cnts_ref := counters:counters_ref(),
     %% If connection is resumed from session ticket
     is_resumed => boolean(),
     %% mqtt message serializer config
@@ -102,7 +110,7 @@ new_conn(
     #{zone := Zone, conn := undefined, ctrl_pid := undefined} = S
 ) ->
     process_flag(trap_exit, true),
-    ?SLOG(debug, ConnInfo),
+    ?SLOG(debug, ConnInfo#{conn => Conn}),
     case emqx_olp:is_overloaded() andalso is_zone_olp_enabled(Zone) of
         false ->
             %% Start control stream process
@@ -160,7 +168,8 @@ new_stream(
         limiter := Limiter,
         parse_state := PS,
         channel := Channel,
-        serialize := Serialize
+        serialize := Serialize,
+        conn_shared_state := SS
     } = S
 ) ->
     %% Cherry pick options for data streams
@@ -172,7 +181,8 @@ new_stream(
         parse_state => PS,
         channel => Channel,
         serialize => Serialize,
-        quic_event_mask => ?QUICER_STREAM_EVENT_MASK_START_COMPLETE
+        quic_event_mask => ?QUICER_STREAM_EVENT_MASK_START_COMPLETE,
+        conn_shared_state => SS
     },
     {ok, NewStreamOwner} = quicer_stream:start_link(
         emqx_quic_data_stream,
@@ -235,7 +245,7 @@ streams_available(_C, {BidirCnt, UnidirCnt}, S) ->
     cb_ret().
 peer_needs_streams(_C, _StreamType, S) ->
     ?SLOG(info, #{
-        msg => "ignore_peer_needs_more_streams", info => maps:with([conn_pid, ctrl_pid], S)
+        msg => "ignore_peer_needs_more_streams", info => maps:with([conn_shared_state, ctrl_pid], S)
     }),
     {ok, S}.
 
@@ -288,6 +298,17 @@ handle_info({'EXIT', Pid, Reason}, #{streams := Streams} = S) ->
             {stop, unknown_pid_down, S}
     end.
 
+-spec step_cnt(counters:counters_ref(), control_packet, integer()) -> ok.
+step_cnt(CounterRef, Name, Incr) when is_atom(Name) ->
+    counters:add(CounterRef, cnt_id(Name), Incr).
+
+-spec read_cnt(counters:counters_ref(), control_packet) -> integer().
+read_cnt(CounterRef, Name) ->
+    counters:get(CounterRef, cnt_id(Name)).
+
+cnt_id(control_packet) ->
+    1.
+
 %%%
 %%%  Internals
 %%%
@@ -302,15 +323,19 @@ is_zone_olp_enabled(Zone) ->
 
 -spec init_cb_state(map()) -> cb_state().
 init_cb_state(#{zone := _Zone} = Map) ->
+    SS = #{
+        cnts_ref => counters:new(?MAX_CNTS, [write_concurrency]),
+        conn_pid => self()
+    },
     Map#{
-        conn_pid => self(),
         ctrl_pid => undefined,
         conn => undefined,
         streams => [],
         parse_state => undefined,
         channel => undefined,
         serialize => undefined,
-        is_resumed => false
+        is_resumed => false,
+        conn_shared_state => SS
     }.
 
 %% BUILD_WITHOUT_QUIC

+ 18 - 8
apps/emqx/src/emqx_quic_data_stream.erl

@@ -82,11 +82,11 @@ activate_data(StreamPid, {PS, Serialize, Channel}) ->
     {ok, cb_state()}.
 init_handoff(
     Stream,
-    _StreamOpts,
+    #{conn_shared_state := ConnSharedState} = _StreamOpts,
     Connection,
     #{is_orphan := true, flags := Flags}
 ) ->
-    {ok, init_state(Stream, Connection, Flags)}.
+    {ok, init_state(Stream, Connection, Flags, ConnSharedState)}.
 
 %%
 %% @doc Post handoff data stream
@@ -215,10 +215,17 @@ do_handle_appl_msg(
         {error, E} ->
             {stop, E, S}
     end;
-do_handle_appl_msg({incoming, #mqtt_packet{} = Packet}, #{channel := Channel} = S) when
+do_handle_appl_msg(
+    {incoming, #mqtt_packet{} = Packet},
+    #{
+        channel := Channel,
+        conn_shared_state := #{cnts_ref := SharedCntsRef}
+    } = S
+) when
     Channel =/= undefined
 ->
     ok = inc_incoming_stats(Packet),
+    _ = emqx_quic_connection:step_cnt(SharedCntsRef, control_packet, 1),
     with_channel(handle_in, [Packet], S);
 do_handle_appl_msg({incoming, {frame_error, _} = FE}, #{channel := Channel} = S) when
     Channel =/= undefined
@@ -321,14 +328,15 @@ serialize_packet(Packet, Serialize) ->
 -spec init_state(
     quicer:stream_handle(),
     quicer:connection_handle(),
-    non_neg_integer()
+    non_neg_integer(),
+    map()
 ) ->
     % @TODO
     map().
-init_state(Stream, Connection, OpenFlags) ->
-    init_state(Stream, Connection, OpenFlags, undefined).
+init_state(Stream, Connection, OpenFlags, ConnSharedState) ->
+    init_state(Stream, Connection, OpenFlags, ConnSharedState, undefined).
 
-init_state(Stream, Connection, OpenFlags, PS) ->
+init_state(Stream, Connection, OpenFlags, ConnSharedState, PS) ->
     %% quic stream handle
     #{
         stream => Stream,
@@ -350,7 +358,9 @@ init_state(Stream, Connection, OpenFlags, PS) ->
         %% serialize opts for connection
         serialize => undefined,
         %% Current working queue
-        task_queue => queue:new()
+        task_queue => queue:new(),
+        %% Connection Shared State
+        conn_shared_state => ConnSharedState
     }.
 
 -spec do_handle_call(term(), cb_state()) -> cb_ret().

+ 30 - 7
apps/emqx/src/emqx_ssl_crl_cache.erl

@@ -18,7 +18,7 @@
 %% %CopyrightEnd%
 
 %%--------------------------------------------------------------------
-%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
+%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
 %%
 %% Licensed under the Apache License, Version 2.0 (the "License");
 %% you may not use this file except in compliance with the License.
@@ -39,6 +39,25 @@
 
 %%----------------------------------------------------------------------
 %% Purpose: Simple default CRL cache
+%%
+%% The cache is a part of an opaque term named DB created by `ssl_manager'
+%% from calling `ssl_pkix_db:create/1'.
+%%
+%% Insert and delete operations are abstracted by `ssl_manager'.
+%% Read operation is done by passing-through the DB term to
+%% `ssl_pkix_db:lookup/2'.
+%%
+%% The CRL cache in the DB term is essentially an ETS table.
+%% The table is created as `ssl_otp_crl_cache', but not
+%% a named table. You can find the table reference from `ets:i()'.
+%%
+%% The cache key in the original OTP implementation was the path part of the
+%% CRL distribution point URL. e.g. if the URL is `http://foo.bar.com/crl.pem'
+%% the cache key would be `"crl.pem"'.
+%% There is however no type spec for the APIs, nor there is any check
+%% on the format, making it possible to use the full URL binary
+%% string as key instead --- which can avoid cache key clash when
+%% different DPs share the same path.
 %%----------------------------------------------------------------------
 
 -module(emqx_ssl_crl_cache).
@@ -142,8 +161,9 @@ delete({der, CRLs}) ->
     ssl_manager:delete_crls({?NO_DIST_POINT, CRLs});
 delete(URI) ->
     case uri_string:normalize(URI, [return_map]) of
-        #{scheme := "http", path := Path} ->
-            ssl_manager:delete_crls(string:trim(Path, leading, "/"));
+        #{scheme := "http", path := _} ->
+            Key = cache_key(URI),
+            ssl_manager:delete_crls(Key);
         _ ->
             {error, {only_http_distribution_points_supported, URI}}
     end.
@@ -153,8 +173,9 @@ delete(URI) ->
 %%--------------------------------------------------------------------
 do_insert(URI, CRLs) ->
     case uri_string:normalize(URI, [return_map]) of
-        #{scheme := "http", path := Path} ->
-            ssl_manager:insert_crls(string:trim(Path, leading, "/"), CRLs);
+        #{scheme := "http", path := _} ->
+            Key = cache_key(URI),
+            ssl_manager:insert_crls(Key, CRLs);
         _ ->
             {error, {only_http_distribution_points_supported, URI}}
     end.
@@ -218,8 +239,7 @@ http_get(URL, Rest, CRLDbInfo, Timeout) ->
 cache_lookup(_, undefined) ->
     [];
 cache_lookup(URL, {{Cache, _}, _}) ->
-    #{path := Path} = uri_string:normalize(URL, [return_map]),
-    case ssl_pkix_db:lookup(string:trim(Path, leading, "/"), Cache) of
+    case ssl_pkix_db:lookup(cache_key(URL), Cache) of
         undefined ->
             [];
         [CRLs] ->
@@ -235,3 +255,6 @@ handle_http(URI, Rest, {_, [{http, Timeout}]} = CRLDbInfo) ->
     CRLs;
 handle_http(_, Rest, CRLDbInfo) ->
     get_crls(Rest, CRLDbInfo).
+
+cache_key(URL) ->
+    iolist_to_binary(URL).

+ 2 - 9
apps/emqx/src/emqx_trace/emqx_trace_formatter.erl

@@ -98,16 +98,9 @@ format_packet(Packet, Encode) ->
 
 format_payload(undefined, _) ->
     "";
-format_payload(_, hidden) ->
-    "******";
-format_payload(Payload, text) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) ->
-    unicode:characters_to_list(Payload);
-format_payload(Payload, hex) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) -> binary:encode_hex(Payload);
-format_payload(<<Part:?TRUNCATED_PAYLOAD_SIZE/binary, _/binary>> = Payload, Type) ->
-    emqx_packet:format_truncated_payload(Part, byte_size(Payload), Type);
+format_payload(Payload, Type) when is_binary(Payload) ->
+    emqx_packet:format_payload(Payload, Type);
 format_payload(Payload, _) ->
-    %% We don't want to crash if there is a field named payload with some other
-    %% type of value
     Payload.
 
 to_iolist(Atom) when is_atom(Atom) -> atom_to_list(Atom);

+ 4 - 7
apps/emqx/src/emqx_trace/emqx_trace_json_formatter.erl

@@ -185,13 +185,10 @@ format_packet(Packet, Encode) -> emqx_packet:format(Packet, Encode).
 
 format_payload(undefined, _) ->
     "";
-format_payload(_, hidden) ->
-    "******";
-format_payload(Payload, text) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) ->
-    unicode:characters_to_list(Payload);
-format_payload(Payload, hex) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) -> binary:encode_hex(Payload);
-format_payload(<<Part:?TRUNCATED_PAYLOAD_SIZE/binary, _/binary>> = Payload, Type) ->
-    emqx_packet:format_truncated_payload(Part, byte_size(Payload), Type).
+format_payload(Payload, Type) when is_binary(Payload) ->
+    emqx_packet:format_payload(Payload, Type);
+format_payload(Payload, _) ->
+    Payload.
 
 format_map_set_to_list(Map) ->
     Items = [

+ 1 - 1
apps/emqx/src/proto/emqx_proto_v1.erl

@@ -54,7 +54,7 @@ get_stats(Node) ->
 
 -spec get_metrics(node()) -> [{emqx_metrics:metric_name(), non_neg_integer()}] | {badrpc, _}.
 get_metrics(Node) ->
-    rpc:call(Node, emqx_metrics, all, []).
+    rpc:call(Node, emqx_metrics, all, [], timer:seconds(5)).
 
 -spec clean_authz_cache(node(), emqx_types:clientid()) ->
     ok

+ 3 - 1
apps/emqx/test/emqx_bpapi_static_checks.erl

@@ -84,7 +84,9 @@
     % Reason: legacy code. A fun and a QC query are
     % passed in the args, it's futile to try to statically
     % check it
-    "emqx_mgmt_api:do_query/2, emqx_mgmt_api:collect_total_from_tail_nodes/2"
+    "emqx_mgmt_api:do_query/2, emqx_mgmt_api:collect_total_from_tail_nodes/2,"
+    %% Reason: `emqx_machine' should not depend on `emqx', where the `bpapi' modules live.
+    " emqx_machine_replicant_health_probe:get_core_custom_infos/0"
 ).
 
 %% Only the APIs for the features that haven't reached General

+ 21 - 2
apps/emqx/test/emqx_common_test_helpers.erl

@@ -176,11 +176,30 @@
 %%------------------------------------------------------------------------------
 
 all(Suite) ->
-    lists:usort([
+    TestCases = lists:usort([
         F
      || {F, 1} <- Suite:module_info(exports),
         string:substr(atom_to_list(F), 1, 2) == "t_"
-    ]).
+    ]),
+    FlakyTests = flaky_tests(Suite),
+    lists:map(
+        fun(TestCase) ->
+            case maps:find(TestCase, FlakyTests) of
+                {ok, Repetitions} -> {testcase, TestCase, [{flaky, Repetitions}]};
+                error -> TestCase
+            end
+        end,
+        TestCases
+    ).
+
+-spec flaky_tests(module()) -> #{atom() => pos_integer()}.
+flaky_tests(Suite) ->
+    case erlang:function_exported(Suite, flaky_tests, 0) of
+        true ->
+            Suite:flaky_tests();
+        false ->
+            #{}
+    end.
 
 init_per_testcase(Module, TestCase, Config) ->
     case erlang:function_exported(Module, TestCase, 2) of

+ 4 - 5
apps/emqx/test/emqx_crl_cache_SUITE.erl

@@ -465,6 +465,7 @@ t_manual_refresh(Config) ->
     emqx_config_handler:start_link(),
     {ok, _} = emqx_crl_cache:start_link(),
     URL = "http://localhost/crl.pem",
+    URLBin = iolist_to_binary(URL),
     ok = snabbkaffe:start_trace(),
     ?wait_async_action(
         ?assertEqual(ok, emqx_crl_cache:refresh(URL)),
@@ -472,10 +473,7 @@ t_manual_refresh(Config) ->
         5_000
     ),
     ok = snabbkaffe:stop(),
-    ?assertEqual(
-        [{"crl.pem", [CRLDer]}],
-        ets:tab2list(Ref)
-    ),
+    ?assertEqual([{URLBin, [CRLDer]}], ets:tab2list(Ref)),
     emqx_config_handler:stop(),
     ok.
 
@@ -579,13 +577,14 @@ t_evict(_Config) ->
     emqx_config_handler:start_link(),
     {ok, _} = emqx_crl_cache:start_link(),
     URL = "http://localhost/crl.pem",
+    URLBin = iolist_to_binary(URL),
     ?wait_async_action(
         ?assertEqual(ok, emqx_crl_cache:refresh(URL)),
         #{?snk_kind := crl_cache_insert},
         5_000
     ),
     Ref = get_crl_cache_table(),
-    ?assertMatch([{"crl.pem", _}], ets:tab2list(Ref)),
+    ?assertMatch([{URLBin, _}], ets:tab2list(Ref)),
     {ok, {ok, _}} = ?wait_async_action(
         emqx_crl_cache:evict(URL),
         #{?snk_kind := crl_cache_evict}

+ 94 - 0
apps/emqx/test/emqx_packet_tests.erl

@@ -0,0 +1,94 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%%     http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%--------------------------------------------------------------------
+
+-module(emqx_packet_tests).
+
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("emqx/include/emqx_mqtt.hrl").
+
+format_payload_test_() ->
+    Hidden = fun(Payload) -> emqx_packet:format_payload(Payload, hidden) end,
+    Hex = fun(Payload) -> bin(emqx_packet:format_payload(Payload, hex)) end,
+    [
+        {"hidden", fun() -> ?assertEqual("******", Hidden(<<>>)) end},
+        {"hex empty", fun() -> ?assertEqual(<<"">>, Hex(<<"">>)) end},
+        {"hex short", fun() -> ?assertEqual(<<"hex:303030">>, Hex(<<"000">>)) end},
+        {"hex at limit", fun() ->
+            Payload = bin(lists:duplicate(?MAX_PAYLOAD_FORMAT_SIZE, 0)),
+            Expected = bin(
+                [
+                    "hex:",
+                    binary:encode_hex(bin(lists:duplicate(?MAX_PAYLOAD_FORMAT_SIZE, 0)))
+                ]
+            ),
+            ?assertEqual(Expected, Hex(Payload))
+        end},
+        {"hex long", fun() ->
+            Payload = bin(lists:duplicate(?MAX_PAYLOAD_FORMAT_SIZE + 2, 0)),
+            Prefix = binary:encode_hex(bin(lists:duplicate(?TRUNCATED_PAYLOAD_SIZE, 0))),
+            Lost = size(Payload) - ?TRUNCATED_PAYLOAD_SIZE,
+            Expected = bin(["hex:", Prefix, "...(", integer_to_list(Lost), " bytes)"]),
+            ?assertEqual(Expected, Hex(Payload))
+        end}
+    ].
+
+format_payload_utf8_test_() ->
+    Fmt = fun(P) -> bin(emqx_packet:format_payload(P, text)) end,
+    [
+        {"empty", fun() -> ?assertEqual(<<"">>, Fmt(<<>>)) end},
+        {"short ascii", fun() -> ?assertEqual(<<"abc">>, Fmt(<<"abc">>)) end},
+        {"short unicode", fun() -> ?assertEqual(<<"日志"/utf8>>, Fmt(<<"日志"/utf8>>)) end},
+        {"unicode at limit", fun() ->
+            Payload = bin(lists:duplicate(?MAX_PAYLOAD_FORMAT_SIZE div 2, <<"¢"/utf8>>)),
+            Expected = bin(["", Payload]),
+            ?assertEqual(Expected, Fmt(Payload))
+        end}
+    ].
+
+format_payload_utf8_cutoff_test_() ->
+    Fmt = fun(P) -> bin(emqx_packet:format_payload(P, text)) end,
+    Check = fun(MultiBytesChar) ->
+        Prefix = [lists:duplicate(?TRUNCATED_PAYLOAD_SIZE - 1, $a), MultiBytesChar],
+        Payload = bin([Prefix, MultiBytesChar, lists:duplicate(?MAX_PAYLOAD_FORMAT_SIZE, $b)]),
+        Lost = size(Payload) - iolist_size(Prefix),
+        Expected = bin([Prefix, "...(", integer_to_list(Lost), " bytes)"]),
+        ?assertEqual(Expected, Fmt(Payload))
+    end,
+    [
+        {"utf8 1B", fun() -> Check(<<"x"/utf8>>) end},
+        {"utf8 2B", fun() -> Check(<<"¢"/utf8>>) end},
+        {"utf8 3B", fun() -> Check(<<"€"/utf8>>) end},
+        {"utf8 4B", fun() -> Check(<<"𐍈"/utf8>>) end}
+    ].
+
+invalid_utf8_fallback_test() ->
+    %% trucate after the first byte of a utf8 encoded unicode character
+    <<FirstByte:8, Last3Bytes/binary>> = <<"𐍈"/utf8>>,
+    Prefix = iolist_to_binary([lists:duplicate(?TRUNCATED_PAYLOAD_SIZE - 1, $a), FirstByte]),
+    %% invalidate utf8 byte sequence, so it should fallback to hex
+    InvalidUtf8 = 255,
+    Payload = iolist_to_binary([
+        Prefix, InvalidUtf8, lists:duplicate(?MAX_PAYLOAD_FORMAT_SIZE, $b)
+    ]),
+    Lost = size(Payload) - iolist_size(Prefix),
+    Expected = iolist_to_binary([
+        "hex:", binary:encode_hex(Prefix), "...(", integer_to_list(Lost), " bytes)"
+    ]),
+    ?assertEqual(Expected, bin(emqx_packet:format_payload(Payload, text))),
+    ok.
+
+bin(X) ->
+    unicode:characters_to_binary(X).

+ 179 - 0
apps/emqx/test/emqx_quic_multistreams_SUITE.erl

@@ -1980,6 +1980,185 @@ t_listener_with_lowlevel_settings(_Config) ->
     ]),
     ok = emqtt:disconnect(C).
 
+t_keep_alive(Config) ->
+    process_flag(trap_exit, true),
+
+    Topic = atom_to_binary(?FUNCTION_NAME),
+    PubQos = ?config(pub_qos, Config),
+    SubQos = ?config(sub_qos, Config),
+    RecQos = calc_qos(PubQos, SubQos),
+    PktId1 = calc_pkt_id(RecQos, 1),
+    Topic2 = <<Topic/binary, "_two">>,
+    %% GIVEN: keepalive is 2s
+    {ok, C} = emqtt:start_link([{proto_ver, v5}, {force_ping, false}, {keepalive, 2} | Config]),
+    {ok, _} = emqtt:quic_connect(C),
+
+    %% WHEN: we have active data on data stream only
+    %% but keep client ctrl stream quiet with meck
+    meck:new(emqtt, [no_link, passthrough, no_history]),
+    meck:expect(emqtt, connected, fun
+        (info, {timeout, _TRef, keepalive}, State) ->
+            {keep_state, State};
+        (Arg1, Arg2, Arg3) ->
+            meck:passthrough([Arg1, Arg2, Arg3])
+    end),
+    {ok, _, [SubQos]} = emqtt:subscribe_via(C, {new_data_stream, []}, #{}, [
+        {Topic, [{qos, SubQos}]}
+    ]),
+    {ok, _, [SubQos]} = emqtt:subscribe_via(C, {new_data_stream, []}, #{}, [
+        {Topic2, [{qos, SubQos}]}
+    ]),
+    ok = emqtt:publish_async(
+        C,
+        {new_data_stream, []},
+        Topic,
+        <<"stream data 1">>,
+        [{qos, PubQos}],
+        undefined
+    ),
+    ok = emqtt:publish_async(
+        C,
+        {new_data_stream, []},
+        Topic2,
+        <<"stream data 2">>,
+        [{qos, PubQos}],
+        undefined
+    ),
+    PubRecvs = recv_pub(2),
+
+    ?assertMatch(
+        [
+            {publish, #{
+                client_pid := C,
+                packet_id := PktId1,
+                payload := <<"stream data", _/binary>>,
+                qos := RecQos
+            }},
+            {publish, #{
+                client_pid := C,
+                packet_id := PktId1,
+                payload := <<"stream data", _/binary>>,
+                qos := RecQos
+            }}
+        ],
+        PubRecvs
+    ),
+    Payloads = [P || {publish, #{payload := P}} <- PubRecvs],
+    ?assert(
+        [<<"stream data 1">>, <<"stream data 2">>] == Payloads orelse
+            [<<"stream data 2">>, <<"stream data 1">>] == Payloads
+    ),
+
+    %% THEN: after 4s, idle timeout , client should get disconnected.
+    receive
+        {disconnected, ?RC_KEEP_ALIVE_TIMEOUT, _} ->
+            meck:unload(emqtt),
+            ok
+    after 4000 ->
+        meck:unload(emqtt),
+        ct:fail("Didnt shutdown ~p", [process_info(self(), messages)])
+    end.
+
+t_keep_alive_idle_ctrl_stream(Config) ->
+    process_flag(trap_exit, true),
+
+    Topic = atom_to_binary(?FUNCTION_NAME),
+    PubQos = ?config(pub_qos, Config),
+    SubQos = ?config(sub_qos, Config),
+    RecQos = calc_qos(PubQos, SubQos),
+    PktId1 = calc_pkt_id(RecQos, 1),
+    Topic2 = <<Topic/binary, "_two">>,
+    %% GIVEN: keepalive is 2s
+    {ok, C} = emqtt:start_link([{proto_ver, v5}, {force_ping, false}, {keepalive, 2} | Config]),
+    {ok, _} = emqtt:quic_connect(C),
+
+    %% WHEN: we have active data on data stream only
+    %% but keep ctrl stream quiet with meck
+    meck:new(emqtt, [no_link, passthrough, no_history]),
+    meck:expect(emqtt, connected, fun
+        (info, {timeout, _TRef, keepalive}, State) ->
+            {keep_state, State};
+        (Arg1, Arg2, Arg3) ->
+            meck:passthrough([Arg1, Arg2, Arg3])
+    end),
+    {ok, _, [SubQos]} = emqtt:subscribe_via(C, {new_data_stream, []}, #{}, [
+        {Topic, [{qos, SubQos}]}
+    ]),
+    {ok, _, [SubQos]} = emqtt:subscribe_via(C, {new_data_stream, []}, #{}, [
+        {Topic2, [{qos, SubQos}]}
+    ]),
+    ok = emqtt:publish_async(
+        C,
+        {new_data_stream, []},
+        Topic,
+        <<"stream data 1">>,
+        [{qos, PubQos}],
+        undefined
+    ),
+    ok = emqtt:publish_async(
+        C,
+        {new_data_stream, []},
+        Topic2,
+        <<"stream data 2">>,
+        [{qos, PubQos}],
+        undefined
+    ),
+    PubRecvs = recv_pub(2),
+
+    ?assertMatch(
+        [
+            {publish, #{
+                client_pid := C,
+                packet_id := PktId1,
+                payload := <<"stream data", _/binary>>,
+                qos := RecQos
+            }},
+            {publish, #{
+                client_pid := C,
+                packet_id := PktId1,
+                payload := <<"stream data", _/binary>>,
+                qos := RecQos
+            }}
+        ],
+        PubRecvs
+    ),
+    Payloads = [P || {publish, #{payload := P}} <- PubRecvs],
+    ?assert(
+        [<<"stream data 1">>, <<"stream data 2">>] == Payloads orelse
+            [<<"stream data 2">>, <<"stream data 1">>] == Payloads
+    ),
+
+    %% WHEN: keep data stream still active
+    timer:sleep(1000),
+    ok = emqtt:publish_async(
+        C,
+        {new_data_stream, []},
+        Topic,
+        <<"stream data 1">>,
+        [{qos, PubQos}],
+        undefined
+    ),
+    ok = emqtt:publish_async(
+        C,
+        {new_data_stream, []},
+        Topic2,
+        <<"stream data 2">>,
+        [{qos, PubQos}],
+        undefined
+    ),
+
+    %% THEN: after 4s, client should NOT get disconnected,
+    %%       because data stream is active.
+    receive
+        {disconnected, ?RC_KEEP_ALIVE_TIMEOUT, _} ->
+            meck:unload(emqtt),
+            ct:fail("Should not disconnect")
+        %% 4s - 1s
+    after 3000 ->
+        meck:unload(emqtt),
+        ok
+    end.
+
 %%--------------------------------------------------------------------
 %% Helper functions
 %%--------------------------------------------------------------------

+ 12 - 10
apps/emqx/test/emqx_trace_SUITE.erl

@@ -25,6 +25,7 @@
 -include_lib("emqx/include/emqx_trace.hrl").
 -include_lib("snabbkaffe/include/snabbkaffe.hrl").
 -include_lib("kernel/include/file.hrl").
+-include_lib("emqx/include/emqx_mqtt.hrl").
 
 %%--------------------------------------------------------------------
 %% Setups
@@ -333,11 +334,13 @@ t_client_huge_payload_truncated(_Config) ->
     {ok, _} = emqtt:connect(Client),
     emqtt:ping(Client),
     NormalPayload = iolist_to_binary(lists:duplicate(1024, "x")),
+    Size1 = 1025,
+    TruncatedBytes1 = Size1 - ?TRUNCATED_PAYLOAD_SIZE,
+    HugePayload1 = iolist_to_binary(lists:duplicate(Size1, "y")),
+    Size2 = 1024 * 10,
+    HugePayload2 = iolist_to_binary(lists:duplicate(Size2, "z")),
+    TruncatedBytes2 = Size2 - ?TRUNCATED_PAYLOAD_SIZE,
     ok = emqtt:publish(Client, <<"/test">>, #{}, NormalPayload, [{qos, 0}]),
-    HugePayload1 = iolist_to_binary(lists:duplicate(1025, "y")),
-    ok = emqtt:publish(Client, <<"/test">>, #{}, HugePayload1, [{qos, 0}]),
-    HugePayload2 = iolist_to_binary(lists:duplicate(1024 * 10, "y")),
-    ok = emqtt:publish(Client, <<"/test">>, #{}, HugePayload2, [{qos, 0}]),
     ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
     {ok, _} = emqx_trace:create([
         {<<"name">>, <<"test_topic">>},
@@ -355,7 +358,6 @@ t_client_huge_payload_truncated(_Config) ->
     ok = emqx_trace_handler_SUITE:filesync(<<"test_topic">>, topic),
     {ok, Bin2} = file:read_file(emqx_trace:log_file(Name, Now)),
     {ok, Bin3} = file:read_file(emqx_trace:log_file(<<"test_topic">>, Now)),
-    ct:pal("Bin ~p Bin2 ~p Bin3 ~p", [byte_size(Bin), byte_size(Bin2), byte_size(Bin3)]),
     ?assert(erlang:byte_size(Bin) > 1024),
     ?assert(erlang:byte_size(Bin) < erlang:byte_size(Bin2)),
     ?assert(erlang:byte_size(Bin3) > 1024),
@@ -365,11 +367,11 @@ t_client_huge_payload_truncated(_Config) ->
     ?assertEqual(nomatch, binary:match(Bin, [CrashBin])),
     ?assertEqual(nomatch, binary:match(Bin2, [CrashBin])),
     ?assertEqual(nomatch, binary:match(Bin3, [CrashBin])),
-    %% have "this log are truncated" for huge payload
-    TruncatedLog = <<"this log are truncated">>,
-    ?assertNotEqual(nomatch, binary:match(Bin, [TruncatedLog])),
-    ?assertNotEqual(nomatch, binary:match(Bin2, [TruncatedLog])),
-    ?assertNotEqual(nomatch, binary:match(Bin3, [TruncatedLog])),
+    Re = <<"\\.\\.\\.\\([0-9]+\\sbytes\\)">>,
+    ?assertMatch(nomatch, re:run(Bin, Re, [unicode])),
+    ReN = fun(N) -> iolist_to_binary(["\\.\\.\\.\\(", integer_to_list(N), "\\sbytes\\)"]) end,
+    ?assertMatch({match, _}, re:run(Bin2, ReN(TruncatedBytes1), [unicode])),
+    ?assertMatch({match, _}, re:run(Bin3, ReN(TruncatedBytes2), [unicode])),
     ok.
 
 t_get_log_filename(_Config) ->

+ 1 - 0
apps/emqx_auth/include/emqx_authn.hrl

@@ -41,6 +41,7 @@
     ?VAR_CERT_SUBJECT,
     ?VAR_CERT_CN_NAME,
     ?VAR_CERT_PEM,
+    ?VAR_ZONE,
     ?VAR_NS_CLIENT_ATTRS
 ]).
 

+ 1 - 1
apps/emqx_auth/src/emqx_auth.app.src

@@ -1,7 +1,7 @@
 %% -*- mode: erlang -*-
 {application, emqx_auth, [
     {description, "EMQX Authentication and authorization"},
-    {vsn, "0.4.1"},
+    {vsn, "0.4.2"},
     {modules, []},
     {registered, [emqx_auth_sup]},
     {applications, [

+ 9 - 1
apps/emqx_auth/src/emqx_authn/emqx_authn_chains.erl

@@ -151,6 +151,14 @@ end).
     atom() => term()
 }.
 
+-type import_users_result() :: #{
+    total := non_neg_integer(),
+    success := non_neg_integer(),
+    override := non_neg_integer(),
+    skipped := non_neg_integer(),
+    failed := non_neg_integer()
+}.
+
 -export_type([authenticator/0, config/0, state/0, extra/0, user_info/0]).
 
 %%------------------------------------------------------------------------------
@@ -317,7 +325,7 @@ reorder_authenticator(ChainName, AuthenticatorIDs) ->
     authenticator_id(),
     {plain | hash, prepared_user_list | binary(), binary()}
 ) ->
-    ok | {error, term()}.
+    {ok, import_users_result()} | {error, term()}.
 import_users(ChainName, AuthenticatorID, Filename) ->
     call({import_users, ChainName, AuthenticatorID, Filename}).
 

+ 16 - 5
apps/emqx_auth/src/emqx_authn/emqx_authn_user_import_api.erl

@@ -21,6 +21,7 @@
 -include("emqx_authn.hrl").
 -include_lib("emqx/include/logger.hrl").
 -include_lib("hocon/include/hoconsc.hrl").
+-include_lib("typerefl/include/types.hrl").
 
 -import(emqx_dashboard_swagger, [error_codes/2]).
 
@@ -34,7 +35,8 @@
 -export([
     api_spec/0,
     paths/0,
-    schema/1
+    schema/1,
+    import_result_schema/0
 ]).
 
 -export([
@@ -61,7 +63,7 @@ schema("/authentication/:id/import_users") ->
             parameters => [emqx_authn_api:param_auth_id(), param_password_type()],
             'requestBody' => request_body_schema(),
             responses => #{
-                204 => <<"Users imported">>,
+                200 => import_result_schema(),
                 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>),
                 404 => error_codes([?NOT_FOUND], <<"Not Found">>)
             }
@@ -81,7 +83,7 @@ schema("/listeners/:listener_id/authentication/:id/import_users") ->
             ],
             'requestBody' => request_body_schema(),
             responses => #{
-                204 => <<"Users imported">>,
+                200 => import_result_schema(),
                 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>),
                 404 => error_codes([?NOT_FOUND], <<"Not Found">>)
             }
@@ -115,6 +117,15 @@ request_body_schema() ->
         description => <<"Import body">>
     }.
 
+import_result_schema() ->
+    [
+        {total, hoconsc:mk(integer(), #{description => ?DESC(import_result_total)})},
+        {success, hoconsc:mk(integer(), #{description => ?DESC(import_result_success)})},
+        {override, hoconsc:mk(integer(), #{description => ?DESC(import_result_override)})},
+        {skipped, hoconsc:mk(integer(), #{description => ?DESC(import_result_skipped)})},
+        {failed, hoconsc:mk(integer(), #{description => ?DESC(import_result_failed)})}
+    ].
+
 authenticator_import_users(
     post,
     Req = #{
@@ -142,7 +153,7 @@ authenticator_import_users(
                 end
         end,
     case Result of
-        ok -> {204};
+        {ok, Result1} -> {200, Result1};
         {error, Reason} -> emqx_authn_api:serialize_error(Reason)
     end.
 
@@ -165,7 +176,7 @@ listener_authenticator_import_users(
                         ChainName, AuthenticatorID, {PasswordType, FileName, FileData}
                     )
                 of
-                    ok -> {204};
+                    {ok, Result} -> {200, Result};
                     {error, Reason} -> emqx_authn_api:serialize_error(Reason)
                 end
             end

+ 1 - 0
apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl

@@ -126,6 +126,7 @@
     ?VAR_USERNAME,
     ?VAR_CLIENTID,
     ?VAR_CERT_CN_NAME,
+    ?VAR_ZONE,
     ?VAR_NS_CLIENT_ATTRS
 ]).
 

+ 28 - 0
apps/emqx_auth/test/emqx_authz/emqx_authz_file_SUITE.erl

@@ -121,6 +121,34 @@ t_cert_common_name(_Config) ->
     ),
     ok.
 
+t_zone(_Config) ->
+    ClientInfo0 = emqx_authz_test_lib:base_client_info(),
+    ClientInfo = ClientInfo0#{zone => <<"zone1">>},
+    ok = setup_config(?RAW_SOURCE#{
+        <<"rules">> => <<"{allow, all, all, [\"t/${zone}/#\"]}.">>
+    }),
+
+    ?assertEqual(
+        allow,
+        emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t/zone1/1">>)
+    ),
+
+    ?assertEqual(
+        allow,
+        emqx_access_control:authorize(ClientInfo, ?AUTHZ_SUBSCRIBE, <<"t/zone1/#">>)
+    ),
+
+    ?assertEqual(
+        deny,
+        emqx_access_control:authorize(ClientInfo#{zone => other}, ?AUTHZ_SUBSCRIBE, <<"t/zone1/1">>)
+    ),
+
+    ?assertEqual(
+        deny,
+        emqx_access_control:authorize(ClientInfo, ?AUTHZ_SUBSCRIBE, <<"t/otherzone/1">>)
+    ),
+    ok.
+
 t_rich_actions(_Config) ->
     ClientInfo = emqx_authz_test_lib:base_client_info(),
 

+ 1 - 1
apps/emqx_auth_jwt/src/emqx_auth_jwt.app.src

@@ -1,7 +1,7 @@
 %% -*- mode: erlang -*-
 {application, emqx_auth_jwt, [
     {description, "EMQX JWT Authentication and Authorization"},
-    {vsn, "0.3.3"},
+    {vsn, "0.3.4"},
     {registered, []},
     {mod, {emqx_auth_jwt_app, []}},
     {applications, [

+ 1 - 1
apps/emqx_auth_jwt/src/emqx_authn_jwt.erl

@@ -320,7 +320,7 @@ do_verify(JWT, [JWK | More], VerifyClaims) ->
             do_verify(JWT, More, VerifyClaims)
     catch
         _:Reason ->
-            ?TRACE_AUTHN_PROVIDER("jwt_verify_error", #{jwk => JWK, jwt => JWT, reason => Reason}),
+            ?TRACE_AUTHN_PROVIDER("jwt_verify_error", #{jwt => JWT, reason => Reason}),
             do_verify(JWT, More, VerifyClaims)
     end.
 

+ 1 - 1
apps/emqx_auth_mnesia/src/emqx_auth_mnesia.app.src

@@ -1,7 +1,7 @@
 %% -*- mode: erlang -*-
 {application, emqx_auth_mnesia, [
     {description, "EMQX Buitl-in Database Authentication and Authorization"},
-    {vsn, "0.2.0"},
+    {vsn, "0.2.1"},
     {registered, []},
     {mod, {emqx_auth_mnesia_app, []}},
     {applications, [

+ 39 - 44
apps/emqx_auth_mnesia/src/emqx_authn_mnesia.erl

@@ -179,23 +179,12 @@ import_users({PasswordType, Filename, FileData}, State, Opts) ->
     try parse_import_users(Filename, FileData, Convertor) of
         {_NewUsersCnt, Users} ->
             case do_import_users(Users, Opts#{filename => Filename}) of
-                ok ->
-                    ok;
+                {ok, Result} ->
+                    {ok, Result};
                 %% Do not log empty user entries.
                 %% The default etc/auth-built-in-db.csv file contains an empty user entry.
                 {error, empty_users} ->
-                    {error, empty_users};
-                {error, Reason} ->
-                    ?SLOG(
-                        warning,
-                        #{
-                            msg => "import_authn_users_failed",
-                            reason => Reason,
-                            type => PasswordType,
-                            filename => Filename
-                        }
-                    ),
-                    {error, Reason}
+                    {error, empty_users}
             end
     catch
         error:Reason:Stk ->
@@ -215,16 +204,19 @@ import_users({PasswordType, Filename, FileData}, State, Opts) ->
 do_import_users([], _Opts) ->
     {error, empty_users};
 do_import_users(Users, Opts) ->
-    trans(
-        fun() ->
-            lists:foreach(
-                fun(User) ->
-                    insert_user(User, Opts)
-                end,
-                Users
-            )
-        end
-    ).
+    Fun = fun() ->
+        lists:foldl(
+            fun(User, Acc) ->
+                Return = insert_user(User, Opts),
+                N = maps:get(Return, Acc, 0),
+                maps:put(Return, N + 1, Acc)
+            end,
+            #{success => 0, skipped => 0, override => 0, failed => 0},
+            Users
+        )
+    end,
+    Res = trans(Fun),
+    {ok, Res#{total => length(Users)}}.
 
 add_user(
     UserInfo,
@@ -241,7 +233,7 @@ do_add_user(
 ) ->
     case mnesia:read(?TAB, DBUserID, write) of
         [] ->
-            insert_user(UserInfoRecord),
+            ok = insert_user(UserInfoRecord),
             {ok, #{user_id => UserID, is_superuser => IsSuperuser}};
         [_] ->
             {error, already_exist}
@@ -281,7 +273,7 @@ do_update_user(
             {error, not_found};
         [#user_info{} = UserInfoRecord] ->
             NUserInfoRecord = update_user_record(UserInfoRecord, FieldsToUpdate),
-            insert_user(NUserInfoRecord),
+            ok = insert_user(NUserInfoRecord),
             {ok, #{user_id => UserID, is_superuser => NUserInfoRecord#user_info.is_superuser}}
     end.
 
@@ -332,6 +324,7 @@ run_fuzzy_filter(
 %% Internal functions
 %%------------------------------------------------------------------------------
 
+-spec insert_user(map(), map()) -> success | skipped | override | failed.
 insert_user(User, Opts) ->
     #{
         <<"user_group">> := UserGroup,
@@ -345,26 +338,28 @@ insert_user(User, Opts) ->
         user_info_record(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
     case mnesia:read(?TAB, DBUserID, write) of
         [] ->
-            insert_user(UserInfoRecord);
+            ok = insert_user(UserInfoRecord),
+            success;
         [UserInfoRecord] ->
-            ok;
+            skipped;
         [_] ->
-            Msg =
-                case maps:get(override, Opts, false) of
-                    true ->
-                        insert_user(UserInfoRecord),
-                        "override_an_exists_userid_into_authentication_database_ok";
-                    false ->
-                        "import_an_exists_userid_into_authentication_database_failed"
-                end,
-            ?SLOG(warning, #{
-                msg => Msg,
-                user_id => UserID,
-                group_id => UserGroup,
-                bootstrap_file => maps:get(filename, Opts),
-                suggestion =>
-                    "If you've altered it differently, delete the user_id from the bootstrap file."
-            })
+            LogF = fun(Msg) ->
+                ?SLOG(warning, #{
+                    msg => Msg,
+                    user_id => UserID,
+                    group_id => UserGroup,
+                    bootstrap_file => maps:get(filename, Opts)
+                })
+            end,
+            case maps:get(override, Opts, false) of
+                true ->
+                    ok = insert_user(UserInfoRecord),
+                    LogF("override_an_exists_userid_into_authentication_database_ok"),
+                    override;
+                false ->
+                    LogF("import_an_exists_userid_into_authentication_database_failed"),
+                    failed
+            end
     end.
 
 insert_user(#user_info{} = UserInfoRecord) ->

+ 11 - 5
apps/emqx_auth_mnesia/test/emqx_authn_api_mnesia_SUITE.erl

@@ -330,22 +330,28 @@ test_authenticator_import_users(PathPrefix) ->
     CSVFileName = filename:join([Dir, <<"data/user-credentials.csv">>]),
 
     {ok, JSONData} = file:read_file(JSONFileName),
-    {ok, 204, _} = multipart_formdata_request(ImportUri, [], [
+    {ok, 200, Result1} = multipart_formdata_request(ImportUri, [], [
         {filename, "user-credentials.json", JSONData}
     ]),
+    ?assertMatch(
+        #{<<"total">> := 2, <<"success">> := 2}, emqx_utils_json:decode(Result1, [return_maps])
+    ),
 
     {ok, CSVData} = file:read_file(CSVFileName),
-    {ok, 204, _} = multipart_formdata_request(ImportUri, [], [
+    {ok, 200, Result2} = multipart_formdata_request(ImportUri, [], [
         {filename, "user-credentials.csv", CSVData}
     ]),
+    ?assertMatch(
+        #{<<"total">> := 2, <<"success">> := 2}, emqx_utils_json:decode(Result2, [return_maps])
+    ),
 
     %% test application/json
-    {ok, 204, _} = request(post, ImportUri ++ "?type=hash", emqx_utils_json:decode(JSONData)),
+    {ok, 200, _} = request(post, ImportUri ++ "?type=hash", emqx_utils_json:decode(JSONData)),
     {ok, JSONData1} = file:read_file(filename:join([Dir, <<"data/user-credentials-plain.json">>])),
-    {ok, 204, _} = request(post, ImportUri ++ "?type=plain", emqx_utils_json:decode(JSONData1)),
+    {ok, 200, _} = request(post, ImportUri ++ "?type=plain", emqx_utils_json:decode(JSONData1)),
 
     %% test application/json; charset=utf-8
-    {ok, 204, _} = request_with_charset(post, ImportUri ++ "?type=plain", JSONData1),
+    {ok, 200, _} = request_with_charset(post, ImportUri ++ "?type=plain", JSONData1),
     ok.
 
 %%------------------------------------------------------------------------------

+ 18 - 18
apps/emqx_auth_mnesia/test/emqx_authn_mnesia_SUITE.erl

@@ -279,16 +279,16 @@ t_import_users(_) ->
     Config = Config0#{password_hash_algorithm => #{name => sha256}},
     {ok, State} = emqx_authn_mnesia:create(?AUTHN_ID, Config),
 
-    ?assertEqual(
-        ok,
+    ?assertMatch(
+        {ok, _},
         emqx_authn_mnesia:import_users(
             sample_filename_and_data(<<"user-credentials.json">>),
             State
         )
     ),
 
-    ?assertEqual(
-        ok,
+    ?assertMatch(
+        {ok, _},
         emqx_authn_mnesia:import_users(
             sample_filename_and_data(<<"user-credentials.csv">>),
             State
@@ -372,8 +372,8 @@ t_import_users_plain(_) ->
     Config = Config0#{password_hash_algorithm => #{name => sha256, salt_position => suffix}},
     {ok, State} = emqx_authn_mnesia:create(?AUTHN_ID, Config),
 
-    ?assertEqual(
-        ok,
+    ?assertMatch(
+        {ok, _},
         emqx_authn_mnesia:import_users(
             sample_filename_and_data(plain, <<"user-credentials-plain.json">>),
             State
@@ -388,8 +388,8 @@ t_import_users_plain(_) ->
         )
     ),
 
-    ?assertEqual(
-        ok,
+    ?assertMatch(
+        {ok, _},
         emqx_authn_mnesia:import_users(
             sample_filename_and_data(plain, <<"user-credentials-plain.csv">>),
             State
@@ -422,16 +422,16 @@ t_import_users_prepared_list(_) ->
         }
     ],
 
-    ?assertEqual(
-        ok,
+    ?assertMatch(
+        {ok, _},
         emqx_authn_mnesia:import_users(
             {plain, prepared_user_list, Users1},
             State
         )
     ),
 
-    ?assertEqual(
-        ok,
+    ?assertMatch(
+        {ok, _},
         emqx_authn_mnesia:import_users(
             {hash, prepared_user_list, Users2},
             State
@@ -443,15 +443,15 @@ t_import_users_duplicated_records(_) ->
     Config = Config0#{password_hash_algorithm => #{name => plain, salt_position => disable}},
     {ok, State} = emqx_authn_mnesia:create(?AUTHN_ID, Config),
 
-    ?assertEqual(
-        ok,
+    ?assertMatch(
+        {ok, _},
         emqx_authn_mnesia:import_users(
             sample_filename_and_data(plain, <<"user-credentials-plain-dup.json">>),
             State
         )
     ),
-    ?assertEqual(
-        ok,
+    ?assertMatch(
+        {ok, _},
         emqx_authn_mnesia:import_users(
             sample_filename_and_data(plain, <<"user-credentials-plain-dup.csv">>),
             State
@@ -469,8 +469,8 @@ t_import_users_duplicated_records(_) ->
             <<"is_superuser">> => false
         }
     ],
-    ?assertEqual(
-        ok,
+    ?assertMatch(
+        {ok, _},
         emqx_authn_mnesia:import_users(
             {plain, prepared_user_list, Users1},
             State

+ 1 - 1
apps/emqx_auth_mongodb/src/emqx_auth_mongodb.app.src

@@ -1,7 +1,7 @@
 %% -*- mode: erlang -*-
 {application, emqx_auth_mongodb, [
     {description, "EMQX MongoDB Authentication and Authorization"},
-    {vsn, "0.2.2"},
+    {vsn, "0.3.0"},
     {registered, []},
     {mod, {emqx_auth_mongodb_app, []}},
     {applications, [

+ 1 - 0
apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl

@@ -41,6 +41,7 @@
     ?VAR_PEERHOST,
     ?VAR_CERT_CN_NAME,
     ?VAR_CERT_SUBJECT,
+    ?VAR_ZONE,
     ?VAR_NS_CLIENT_ATTRS
 ]).
 

+ 1 - 1
apps/emqx_auth_mysql/src/emqx_auth_mysql.app.src

@@ -1,7 +1,7 @@
 %% -*- mode: erlang -*-
 {application, emqx_auth_mysql, [
     {description, "EMQX MySQL Authentication and Authorization"},
-    {vsn, "0.2.2"},
+    {vsn, "0.3.0"},
     {registered, []},
     {mod, {emqx_auth_mysql_app, []}},
     {applications, [

+ 1 - 0
apps/emqx_auth_mysql/src/emqx_authz_mysql.erl

@@ -43,6 +43,7 @@
     ?VAR_PEERHOST,
     ?VAR_CERT_CN_NAME,
     ?VAR_CERT_SUBJECT,
+    ?VAR_ZONE,
     ?VAR_NS_CLIENT_ATTRS
 ]).
 

+ 1 - 1
apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src

@@ -1,7 +1,7 @@
 %% -*- mode: erlang -*-
 {application, emqx_auth_postgresql, [
     {description, "EMQX PostgreSQL Authentication and Authorization"},
-    {vsn, "0.2.2"},
+    {vsn, "0.3.0"},
     {registered, []},
     {mod, {emqx_auth_postgresql_app, []}},
     {applications, [

+ 1 - 0
apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl

@@ -43,6 +43,7 @@
     ?VAR_PEERHOST,
     ?VAR_CERT_CN_NAME,
     ?VAR_CERT_SUBJECT,
+    ?VAR_ZONE,
     ?VAR_NS_CLIENT_ATTRS
 ]).
 

+ 1 - 1
apps/emqx_auth_redis/src/emqx_auth_redis.app.src

@@ -1,7 +1,7 @@
 %% -*- mode: erlang -*-
 {application, emqx_auth_redis, [
     {description, "EMQX Redis Authentication and Authorization"},
-    {vsn, "0.2.2"},
+    {vsn, "0.3.0"},
     {registered, []},
     {mod, {emqx_auth_redis_app, []}},
     {applications, [

+ 1 - 0
apps/emqx_auth_redis/src/emqx_authz_redis.erl

@@ -41,6 +41,7 @@
     ?VAR_PEERHOST,
     ?VAR_CLIENTID,
     ?VAR_USERNAME,
+    ?VAR_ZONE,
     ?VAR_NS_CLIENT_ATTRS
 ]).
 

+ 1 - 1
apps/emqx_bridge/src/emqx_bridge.app.src

@@ -1,7 +1,7 @@
 %% -*- mode: erlang -*-
 {application, emqx_bridge, [
     {description, "EMQX bridges"},
-    {vsn, "0.2.5"},
+    {vsn, "0.2.6"},
     {registered, [emqx_bridge_sup]},
     {mod, {emqx_bridge_app, []}},
     {applications, [

+ 8 - 4
apps/emqx_bridge/src/emqx_bridge_v2.erl

@@ -1179,10 +1179,14 @@ post_config_update([ConfRootKey, BridgeType, BridgeName], _Req, NewConf, OldConf
         ok ->
             ok;
         {error, timeout} ->
-            throw(<<
-                "Timed out trying to remove action or source.  Please try again and,"
-                " if the error persists, try disabling the connector before retrying."
-            >>);
+            ErrorContext = #{
+                error => uninstall_timeout,
+                reason => <<
+                    "Timed out trying to remove action or source.  Please try again and,"
+                    " if the error persists, try disabling the connector before retrying."
+                >>
+            },
+            throw(ErrorContext);
         {error, not_found} ->
             %% Should not happen, unless config is inconsistent.
             throw(<<"Referenced connector not found">>)

+ 8 - 2
apps/emqx_bridge/src/emqx_bridge_v2_api.erl

@@ -292,7 +292,8 @@ schema("/actions/:id") ->
             responses => #{
                 200 => actions_get_response_body_schema(),
                 404 => error_schema('NOT_FOUND', "Bridge not found"),
-                400 => error_schema('BAD_REQUEST', "Update bridge failed")
+                400 => error_schema('BAD_REQUEST', "Update bridge failed"),
+                503 => error_schema('SERVICE_UNAVAILABLE', "Service unavailable")
             }
         },
         delete => #{
@@ -503,7 +504,8 @@ schema("/sources/:id") ->
             responses => #{
                 200 => sources_get_response_body_schema(),
                 404 => error_schema('NOT_FOUND', "Source not found"),
-                400 => error_schema('BAD_REQUEST', "Update source failed")
+                400 => error_schema('BAD_REQUEST', "Update source failed"),
+                503 => error_schema('SERVICE_UNAVAILABLE', "Service unavailable")
             }
         },
         delete => #{
@@ -1446,6 +1448,10 @@ do_create_or_update_bridge(ConfRootKey, BridgeType, BridgeName, Conf, HttpStatus
             PreOrPostConfigUpdate =:= post_config_update
         ->
             ?BAD_REQUEST(emqx_utils_api:to_json(redact(Reason)));
+        {error, Reason} when is_binary(Reason) ->
+            ?BAD_REQUEST(Reason);
+        {error, #{error := uninstall_timeout} = Reason} ->
+            ?SERVICE_UNAVAILABLE(emqx_utils_api:to_json(redact(Reason)));
         {error, Reason} when is_map(Reason) ->
             ?BAD_REQUEST(emqx_utils_api:to_json(redact(Reason)))
     end.

+ 98 - 0
apps/emqx_bridge/test/emqx_bridge_v2_api_SUITE.erl

@@ -636,6 +636,24 @@ create_action_api(Name, Type, Params) ->
     ]),
     emqx_mgmt_api_test_util:simplify_result(Res).
 
+update_action_api(Name, Type, Params) ->
+    Res = emqx_bridge_v2_testlib:update_bridge_api([
+        {bridge_kind, action},
+        {action_type, Type},
+        {action_name, Name},
+        {action_config, Params}
+    ]),
+    emqx_mgmt_api_test_util:simplify_result(Res).
+
+update_source_api(Name, Type, Params) ->
+    Res = emqx_bridge_v2_testlib:update_bridge_api([
+        {bridge_kind, source},
+        {source_type, Type},
+        {source_name, Name},
+        {source_config, Params}
+    ]),
+    emqx_mgmt_api_test_util:simplify_result(Res).
+
 list_sources_api() ->
     Res = emqx_bridge_v2_testlib:list_sources_http_api(),
     emqx_mgmt_api_test_util:simplify_result(Res).
@@ -1908,3 +1926,83 @@ t_kind_dependencies(Config) when is_list(Config) ->
         []
     ),
     ok.
+
+%% Verifies that we return thrown messages as is to the API.
+t_thrown_messages(matrix) ->
+    [
+        [single, actions],
+        [single, sources]
+    ];
+t_thrown_messages(Config) when is_list(Config) ->
+    meck:expect(?CONNECTOR_IMPL, on_remove_channel, fun(_ConnResId, ConnState, _ActionResid) ->
+        timer:sleep(20_000),
+        {ok, ConnState}
+    end),
+    ?check_trace(
+        begin
+            [_SingleOrCluster, Kind | _] = group_path(Config),
+            ConnectorType = ?SOURCE_CONNECTOR_TYPE,
+            ConnectorName = <<"c">>,
+            {ok, {{_, 201, _}, _, _}} =
+                emqx_bridge_v2_testlib:create_connector_api([
+                    {connector_config, source_connector_create_config(#{})},
+                    {connector_name, ConnectorName},
+                    {connector_type, ConnectorType}
+                ]),
+            do_t_thrown_messages(Kind, Config, ConnectorName),
+            meck:expect(?CONNECTOR_IMPL, on_remove_channel, 3, {ok, connector_state}),
+            ok
+        end,
+        []
+    ),
+    ok.
+
+do_t_thrown_messages(actions, _Config, ConnectorName) ->
+    Name = <<"a1">>,
+    %% MQTT
+    Type = ?SOURCE_TYPE,
+    CreateConfig = mqtt_action_create_config(#{
+        <<"connector">> => ConnectorName
+    }),
+    {201, _} = create_action_api(
+        Name,
+        Type,
+        CreateConfig
+    ),
+    UpdateConfig = maps:remove(<<"type">>, CreateConfig),
+    ?assertMatch(
+        {503, #{
+            <<"message">> :=
+                #{<<"reason">> := <<"Timed out trying to remove", _/binary>>}
+        }},
+        update_action_api(
+            Name,
+            Type,
+            UpdateConfig
+        )
+    ),
+    ok;
+do_t_thrown_messages(sources, _Config, ConnectorName) ->
+    Name = <<"s1">>,
+    Type = ?SOURCE_TYPE,
+    CreateConfig = source_create_config(#{
+        <<"connector">> => ConnectorName
+    }),
+    {201, _} = create_source_api(
+        Name,
+        Type,
+        CreateConfig
+    ),
+    UpdateConfig = maps:remove(<<"type">>, CreateConfig),
+    ?assertMatch(
+        {503, #{
+            <<"message">> :=
+                #{<<"reason">> := <<"Timed out trying to remove", _/binary>>}
+        }},
+        update_source_api(
+            Name,
+            Type,
+            UpdateConfig
+        )
+    ),
+    ok.

+ 1 - 1
apps/emqx_bridge_iotdb/rebar.config

@@ -10,7 +10,7 @@
     {emqx_resource, {path, "../../apps/emqx_resource"}},
     {emqx_bridge, {path, "../../apps/emqx_bridge"}},
     {emqx_bridge_http, {path, "../emqx_bridge_http"}},
-    {iotdb, {git, "https://github.com/emqx/iotdb-client-erl.git", {tag, "0.1.5"}}}
+    {iotdb, {git, "https://github.com/emqx/iotdb-client-erl.git", {tag, "0.1.7"}}}
 ]}.
 {plugins, [rebar3_path_deps]}.
 {project_plugins, [erlfmt]}.

+ 12 - 1
apps/emqx_bridge_snowflake/test/emqx_bridge_snowflake_SUITE.erl

@@ -44,6 +44,11 @@
     | T
 ]).
 
+-define(tpal(MSG), begin
+    ct:pal(MSG),
+    ?tp(notice, MSG, #{})
+end).
+
 %%------------------------------------------------------------------------------
 %% CT boilerplate
 %%------------------------------------------------------------------------------
@@ -568,8 +573,10 @@ t_aggreg_upload(Config) ->
                 {<<"C4">>, <<"t/42">>, <<"won't appear in results">>}
             ]),
             ok = publish_messages(Messages1),
+            ?tpal("published first batch"),
             %% Wait until the delivery is completed.
             ?block_until(#{?snk_kind := connector_aggreg_delivery_completed, action := AggregId}),
+            ?tpal("first batch delivered"),
             %% Send a second batch of messages to be staged in a second file
             Messages2 = lists:map(fun mk_message/1, [
                 {<<"C4">>, T4 = <<"sf/a/b/c">>, P4 = <<"{\"hello\":\"world\"}">>},
@@ -578,9 +585,13 @@ t_aggreg_upload(Config) ->
             ]),
             {ok, {ok, _}} =
                 ?wait_async_action(
-                    publish_messages(Messages2),
+                    begin
+                        publish_messages(Messages2),
+                        ?tpal("published second batch")
+                    end,
                     #{?snk_kind := connector_aggreg_delivery_completed, action := AggregId}
                 ),
+            ?tpal("second batch delivered"),
             %% Check the uploaded objects.
             ExpectedNumFiles = 2,
             wait_until_processed(Config, ActionResId, BeginMark, ExpectedNumFiles),

+ 1 - 1
apps/emqx_cluster_link/src/emqx_cluster_link.app.src

@@ -2,7 +2,7 @@
 {application, emqx_cluster_link, [
     {description, "EMQX Cluster Linking"},
     % strict semver, bump manually!
-    {vsn, "0.1.1"},
+    {vsn, "0.1.2"},
     {modules, []},
     {registered, []},
     {applications, [

+ 63 - 17
apps/emqx_cluster_link/src/emqx_cluster_link.erl

@@ -28,6 +28,9 @@
     on_message_publish/1
 ]).
 
+%% Internal exports
+-export([do_handle_route_op_msg/1]).
+
 -include("emqx_cluster_link.hrl").
 -include_lib("emqx/include/emqx.hrl").
 -include_lib("emqx/include/emqx_hooks.hrl").
@@ -109,6 +112,44 @@ forward(Routes, Delivery) ->
 %%--------------------------------------------------------------------
 
 on_message_publish(
+    #message{topic = <<?ROUTE_TOPIC_PREFIX, _/binary>>} = Msg
+) ->
+    case handle_route_op_msg(Msg) of
+        ok ->
+            {stop, []};
+        error ->
+            %% Disconnect so that upstream agent starts anew
+            Headers0 = Msg#message.headers,
+            Headers = Headers0#{
+                allow_publish => false,
+                should_disconnect => true
+            },
+            StopMsg = emqx_message:set_headers(Headers, Msg),
+            {stop, StopMsg}
+    end;
+on_message_publish(#message{topic = <<?MSG_TOPIC_PREFIX, ClusterName/binary>>, payload = Payload}) ->
+    case emqx_cluster_link_mqtt:decode_forwarded_msg(Payload) of
+        #message{} = ForwardedMsg ->
+            {stop, maybe_filter_incoming_msg(ForwardedMsg, ClusterName)};
+        _Err ->
+            %% Just ignore it. It must be already logged by the decoder
+            {stop, []}
+    end;
+on_message_publish(_Msg) ->
+    ok.
+
+put_hook() ->
+    emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []}, ?HP_SYS_MSGS).
+
+delete_hook() ->
+    emqx_hooks:del('message.publish', {?MODULE, on_message_publish, []}).
+
+%%--------------------------------------------------------------------
+%% Internal exports
+%%--------------------------------------------------------------------
+
+%% Exported only for mocking in tests
+do_handle_route_op_msg(
     #message{topic = <<?ROUTE_TOPIC_PREFIX, ClusterName/binary>>, payload = Payload} = Msg
 ) ->
     case emqx_cluster_link_mqtt:decode_route_op(Payload) of
@@ -126,24 +167,8 @@ on_message_publish(
                 payload => ParsedPayload
             })
     end,
-    {stop, []};
-on_message_publish(#message{topic = <<?MSG_TOPIC_PREFIX, ClusterName/binary>>, payload = Payload}) ->
-    case emqx_cluster_link_mqtt:decode_forwarded_msg(Payload) of
-        #message{} = ForwardedMsg ->
-            {stop, maybe_filter_incomming_msg(ForwardedMsg, ClusterName)};
-        _Err ->
-            %% Just ignore it. It must be already logged by the decoder
-            {stop, []}
-    end;
-on_message_publish(_Msg) ->
     ok.
 
-put_hook() ->
-    emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []}, ?HP_SYS_MSGS).
-
-delete_hook() ->
-    emqx_hooks:del('message.publish', {?MODULE, on_message_publish, []}).
-
 %%--------------------------------------------------------------------
 %% Internal functions
 %%--------------------------------------------------------------------
@@ -151,6 +176,27 @@ delete_hook() ->
 -define(PD_EXTROUTER_ACTOR, '$clink_extrouter_actor').
 -define(PD_EXTROUTER_ACTOR_STATE, '$clink_extrouter_actor_state').
 
+handle_route_op_msg(
+    #message{topic = <<?ROUTE_TOPIC_PREFIX, ClusterName/binary>>} = Msg
+) ->
+    try
+        ?MODULE:do_handle_route_op_msg(Msg)
+    catch
+        K:E:Stacktrace ->
+            MyClusterName = emqx_cluster_link_config:cluster(),
+            ?SLOG(error, #{
+                msg => "cluster_link_routesync_protocol_error",
+                kind => K,
+                reason => E,
+                stacktrace => Stacktrace,
+                %% How this cluster names itself
+                local_name => MyClusterName,
+                %% How the remote cluster names itself
+                received_from => ClusterName
+            }),
+            error
+    end.
+
 maybe_push_route_op(Op, Topic, RouteID) ->
     maybe_push_route_op(Op, Topic, RouteID, push).
 
@@ -264,7 +310,7 @@ update_actor_state(ActorSt) ->
 with_sender_name(#message{extra = Extra} = Msg, ClusterName) when is_map(Extra) ->
     Msg#message{extra = Extra#{link_origin => ClusterName}}.
 
-maybe_filter_incomming_msg(#message{topic = T} = Msg, ClusterName) ->
+maybe_filter_incoming_msg(#message{topic = T} = Msg, ClusterName) ->
     %% Should prevent irrelevant messages from being dispatched in case
     %% the remote routing state lags behind the local config changes.
     #{enable := Enable, topics := Topics} = emqx_cluster_link_config:link(ClusterName),

+ 2 - 4
apps/emqx_cluster_link/src/emqx_cluster_link_app.erl

@@ -8,16 +8,14 @@
 
 -export([start/2, prep_stop/1, stop/1]).
 
--define(BROKER_MOD, emqx_cluster_link).
-
 start(_StartType, _StartArgs) ->
     ok = mria:wait_for_tables(emqx_cluster_link_extrouter:create_tables()),
     emqx_cluster_link_config:add_handler(),
     LinksConf = emqx_cluster_link_config:enabled_links(),
+    ok = emqx_cluster_link:register_external_broker(),
+    ok = emqx_cluster_link:put_hook(),
     case LinksConf of
         [_ | _] ->
-            ok = emqx_cluster_link:register_external_broker(),
-            ok = emqx_cluster_link:put_hook(),
             ok = start_msg_fwd_resources(LinksConf);
         _ ->
             ok

+ 0 - 13
apps/emqx_cluster_link/src/emqx_cluster_link_config.erl

@@ -231,7 +231,6 @@ pre_config_update(?LINKS_PATH, NewRawConf, OldRawConf) ->
 post_config_update(?LINKS_PATH, _Req, Old, Old, _AppEnvs) ->
     ok;
 post_config_update(?LINKS_PATH, _Req, New, Old, _AppEnvs) ->
-    ok = toggle_hook_and_broker(enabled_links(New), enabled_links(Old)),
     #{
         removed := Removed,
         added := Added,
@@ -252,18 +251,6 @@ post_config_update(?LINKS_PATH, _Req, New, Old, _AppEnvs) ->
 %% Internal functions
 %%--------------------------------------------------------------------
 
-toggle_hook_and_broker([_ | _] = _NewEnabledLinks, [] = _OldEnabledLinks) ->
-    ok = emqx_cluster_link:register_external_broker(),
-    ok = emqx_cluster_link:put_hook();
-toggle_hook_and_broker([] = _NewEnabledLinks, _OldLinks) ->
-    _ = emqx_cluster_link:unregister_external_broker(),
-    ok = emqx_cluster_link:delete_hook();
-toggle_hook_and_broker(_, _) ->
-    ok.
-
-enabled_links(LinksConf) ->
-    [L || #{enable := true} = L <- LinksConf].
-
 all_ok(Results) ->
     lists:all(
         fun

+ 11 - 1
apps/emqx_cluster_link/src/emqx_cluster_link_extrouter_gc.erl

@@ -5,10 +5,11 @@
 -module(emqx_cluster_link_extrouter_gc).
 
 -include_lib("emqx/include/logger.hrl").
+-include_lib("snabbkaffe/include/trace.hrl").
 
 -export([start_link/0]).
 
--export([run/0]).
+-export([run/0, force/1]).
 
 -behaviour(gen_server).
 -export([
@@ -34,6 +35,14 @@ start_link() ->
 run() ->
     gen_server:call(?SERVER, run).
 
+force(Timestamp) ->
+    case emqx_cluster_link_extrouter:actor_gc(#{timestamp => Timestamp}) of
+        1 ->
+            force(Timestamp);
+        0 ->
+            ok
+    end.
+
 %%
 
 -record(st, {
@@ -56,6 +65,7 @@ handle_cast(Cast, State) ->
 
 handle_info({timeout, TRef, _GC}, St = #st{gc_timer = TRef}) ->
     Result = run_gc_exclusive(),
+    ?tp("clink_extrouter_gc_ran", #{result => Result}),
     Timeout = choose_timeout(Result),
     {noreply, schedule_gc(Timeout, St#st{gc_timer = undefined})};
 handle_info(Info, St) ->

+ 8 - 2
apps/emqx_cluster_link/src/emqx_cluster_link_mqtt.erl

@@ -93,6 +93,8 @@
 
 -define(PUB_TIMEOUT, 10_000).
 
+-define(AUTO_RECONNECT_INTERVAL_S, 2).
+
 -type cluster_name() :: binary().
 
 -spec resource_id(cluster_name()) -> resource_id().
@@ -173,6 +175,7 @@ on_start(ResourceId, #{pool_size := PoolSize} = ClusterConf) ->
         {name, PoolName},
         {pool_size, PoolSize},
         {pool_type, hash},
+        {auto_reconnect, ?AUTO_RECONNECT_INTERVAL_S},
         {client_opts, emqtt_client_opts(?MSG_CLIENTID_SUFFIX, ClusterConf)}
     ],
     ok = emqx_resource:allocate_resource(ResourceId, pool_name, PoolName),
@@ -211,7 +214,7 @@ on_query_async(
     Callback = {fun on_async_result/2, [CallbackIn]},
     #message{topic = Topic, qos = QoS} = FwdMsg,
     %% TODO check message ordering, pick by topic,client pair?
-    ecpool:pick_and_do(
+    Result = ecpool:pick_and_do(
         {PoolName, Topic},
         fun(ConnPid) ->
             %% #delivery{} record has no valuable data for a remote link...
@@ -226,7 +229,10 @@ on_query_async(
             PubResult
         end,
         no_handover
-    ).
+    ),
+    %% This result could be `{error, ecpool_empty}', for example, which should be
+    %% recoverable.  If we didn't handle it here, it would be considered unrecoverable.
+    handle_send_result(Result).
 
 %% copied from emqx_bridge_mqtt_connector
 

+ 11 - 6
apps/emqx_cluster_link/src/emqx_cluster_link_router_syncer.erl

@@ -397,8 +397,8 @@ handle_info(
                 clink_handshake_error,
                 #{actor => {St1#st.actor, St1#st.incarnation}, reason => Reason}
             ),
-            %% TODO: retry after a timeout?
-            {noreply, St1#st{error = Reason, status = disconnected}}
+            St2 = ensure_reconnect_timer(St1#st{error = Reason, status = disconnected}),
+            {noreply, St2}
     end;
 handle_info({timeout, TRef, reconnect}, St = #st{reconnect_timer = TRef}) ->
     {noreply, process_connect(St#st{reconnect_timer = undefined})};
@@ -472,6 +472,13 @@ post_actor_init(
     NSt = schedule_heartbeat(St#st{client = ClientPid}),
     process_bootstrap(NSt, NeedBootstrap).
 
+ensure_reconnect_timer(#st{reconnect_timer = undefined} = St) ->
+    TRef = erlang:start_timer(?RECONNECT_TIMEOUT, self(), reconnect),
+    St#st{reconnect_timer = TRef};
+ensure_reconnect_timer(#st{reconnect_timer = TRef} = St) ->
+    _ = erlang:cancel_timer(TRef),
+    ensure_reconnect_timer(St#st{reconnect_timer = undefined}).
+
 handle_connect_error(Reason, St) ->
     ?SLOG(error, #{
         msg => "cluster_link_connection_failed",
@@ -479,16 +486,14 @@ handle_connect_error(Reason, St) ->
         target_cluster => St#st.target,
         actor => St#st.actor
     }),
-    TRef = erlang:start_timer(?RECONNECT_TIMEOUT, self(), reconnect),
     _ = maybe_alarm(Reason, St),
-    St#st{reconnect_timer = TRef, error = Reason, status = disconnected}.
+    ensure_reconnect_timer(St#st{error = Reason, status = disconnected}).
 
 handle_client_down(
     Reason,
     St = #st{target = TargetCluster, actor = Actor, bootstrapped = Bootstrapped}
 ) ->
-    ?SLOG(error, #{
-        msg => "cluster_link_connection_failed",
+    ?tp(error, "cluster_link_connection_failed", #{
         reason => Reason,
         target_cluster => St#st.target,
         actor => St#st.actor

+ 63 - 6
apps/emqx_cluster_link/test/emqx_cluster_link_SUITE.erl

@@ -12,6 +12,8 @@
 -compile(export_all).
 -compile(nowarn_export_all).
 
+-define(ON(NODE, DO), erpc:call(NODE, fun() -> DO end)).
+
 %%
 
 all() ->
@@ -83,8 +85,8 @@ mk_target_cluster(BaseName, Config) ->
         "\n     topics = [\"#\"]"
         "\n   }"
         "\n ]}",
-    TargetApps1 = [{emqx_conf, combine([conf_log(), TargetConf])}],
-    TargetApps2 = [{emqx_conf, combine([conf_log(), conf_mqtt_listener(31883), TargetConf])}],
+    TargetApps1 = [{emqx_conf, combine([conf_log(), conf_mqtt_listener(31883), TargetConf])}],
+    TargetApps2 = [{emqx_conf, combine([conf_log(), TargetConf])}],
     emqx_cth_cluster:mk_nodespecs(
         [
             {mk_nodename(BaseName, t1), #{apps => TargetApps1, base_port => 20100}},
@@ -201,22 +203,28 @@ t_target_extrouting_gc(Config) ->
     TargetC2 = start_client_unlink("t_target_extrouting_gc2", TargetNode2),
     IsShared = ?config(is_shared_sub, Config),
 
-    {ok, _, _} = emqtt:subscribe(TargetC1, maybe_shared_topic(IsShared, <<"t/#">>), qos1),
-    {ok, _, _} = emqtt:subscribe(TargetC2, maybe_shared_topic(IsShared, <<"t/+">>), qos1),
+    TopicFilter1 = <<"t/+">>,
+    TopicFilter2 = <<"t/#">>,
+    {ok, _, _} = emqtt:subscribe(TargetC1, maybe_shared_topic(IsShared, TopicFilter1), qos1),
+    {ok, _, _} = emqtt:subscribe(TargetC2, maybe_shared_topic(IsShared, TopicFilter2), qos1),
     {ok, _} = ?block_until(#{?snk_kind := clink_route_sync_complete}),
     {ok, _} = emqtt:publish(SourceC1, <<"t/1">>, <<"HELLO1">>, qos1),
     {ok, _} = emqtt:publish(SourceC1, <<"t/2/ext">>, <<"HELLO2">>, qos1),
     {ok, _} = emqtt:publish(SourceC1, <<"t/3/ext">>, <<"HELLO3">>, qos1),
     Pubs1 = [M || {publish, M} <- ?drainMailbox(1_000)],
+    %% We switch off `TargetNode2' first.  Since `TargetNode1' is the sole endpoint
+    %% configured in Target Cluster, the link will keep working (i.e., CL MQTT ecpool
+    %% workers will stay connected).  If we turned `TargetNode1' first, then the link
+    %% would stay down and stop replicating messages.
     {ok, _} = ?wait_async_action(
-        emqx_cth_cluster:stop_node(TargetNode1),
+        emqx_cth_cluster:stop_node(TargetNode2),
         #{?snk_kind := clink_extrouter_actor_cleaned, cluster := <<"cl.target">>}
     ),
     {ok, _} = emqtt:publish(SourceC1, <<"t/4/ext">>, <<"HELLO4">>, qos1),
     {ok, _} = emqtt:publish(SourceC1, <<"t/5">>, <<"HELLO5">>, qos1),
     Pubs2 = [M || {publish, M} <- ?drainMailbox(1_000)],
     {ok, _} = ?wait_async_action(
-        emqx_cth_cluster:stop_node(TargetNode2),
+        emqx_cth_cluster:stop_node(TargetNode1),
         #{?snk_kind := clink_extrouter_actor_cleaned, cluster := <<"cl.target">>}
     ),
     ok = emqtt:stop(SourceC1),
@@ -236,6 +244,9 @@ t_target_extrouting_gc(Config) ->
             #{topic := <<"t/1">>, payload := <<"HELLO1">>, client_pid := _C2},
             #{topic := <<"t/2/ext">>, payload := <<"HELLO2">>},
             #{topic := <<"t/3/ext">>, payload := <<"HELLO3">>},
+            %% We expect only `HELLO5' and not `HELLO4' to be here because the former was
+            %% published while only `TargetNode1' was alive, and this node held only the
+            %% `t/+' subscription at that time.
             #{topic := <<"t/5">>, payload := <<"HELLO5">>}
         ],
         lists:sort(emqx_utils_maps:key_comparer(topic), Pubs1 ++ Pubs2)
@@ -253,6 +264,52 @@ t_target_extrouting_gc(Config) ->
         Trace
     ).
 
+%% Checks that, if an exception occurs while handling a route op message, we disconnect
+%% the upstream agent client so it restarts.
+t_disconnect_on_errors('init', Config) ->
+    SourceNodes = emqx_cth_cluster:start(mk_source_cluster(?FUNCTION_NAME, Config)),
+    [TargetNodeSpec | _] = mk_target_cluster(?FUNCTION_NAME, Config),
+    TargetNodes = emqx_cth_cluster:start([TargetNodeSpec]),
+    _Apps = start_cluster_link(SourceNodes ++ TargetNodes, Config),
+    ok = snabbkaffe:start_trace(),
+    [
+        {source_nodes, SourceNodes},
+        {target_nodes, TargetNodes}
+        | Config
+    ];
+t_disconnect_on_errors('end', Config) ->
+    ok = snabbkaffe:stop(),
+    ok = emqx_cth_cluster:stop(?config(source_nodes, Config)),
+    ok = emqx_cth_cluster:stop(?config(target_nodes, Config)).
+t_disconnect_on_errors(Config) ->
+    ct:timetrap({seconds, 20}),
+    [SN1 | _] = nodes_source(Config),
+    [TargetNode] = nodes_target(Config),
+    SC1 = start_client("t_disconnect_on_errors", SN1),
+    ok = ?ON(SN1, meck:new(emqx_cluster_link, [passthrough, no_link, no_history])),
+    ?assertMatch(
+        {_, {ok, _}},
+        ?wait_async_action(
+            begin
+                ok = ?ON(
+                    TargetNode,
+                    meck:expect(
+                        emqx_cluster_link,
+                        do_handle_route_op_msg,
+                        fun(_Msg) ->
+                            meck:exception(error, {unexpected, error})
+                        end
+                    )
+                ),
+                emqtt:subscribe(SC1, <<"t/u/v">>, 1)
+            end,
+            #{?snk_kind := "cluster_link_connection_failed"}
+        )
+    ),
+    _ = ?ON(TargetNode, meck:unload()),
+    ok = emqtt:stop(SC1),
+    ok.
+
 %%
 
 maybe_shared_topic(true = _IsShared, Topic) ->

+ 114 - 23
apps/emqx_cluster_link/test/emqx_cluster_link_api_SUITE.erl

@@ -10,6 +10,7 @@
 -include_lib("eunit/include/eunit.hrl").
 -include_lib("common_test/include/ct.hrl").
 -include_lib("snabbkaffe/include/snabbkaffe.hrl").
+-include_lib("emqx/include/asserts.hrl").
 
 -import(emqx_common_test_helpers, [on_exit/1]).
 
@@ -60,7 +61,8 @@ groups() ->
 cluster_test_cases() ->
     [
         t_status,
-        t_metrics
+        t_metrics,
+        t_disable_reenable
     ].
 
 init_per_suite(Config) ->
@@ -202,6 +204,50 @@ link_params(Overrides) ->
     },
     emqx_utils_maps:deep_merge(Default, Overrides).
 
+remove_api_virtual_fields(Response) ->
+    maps:without([<<"name">>, <<"node_status">>, <<"status">>], Response).
+
+%% Node
+disable_and_force_gc(TargetOrSource, Name, Params, TCConfig, Opts) ->
+    NExpectedDeletions = maps:get(expected_num_route_deletions, Opts),
+    {ok, SRef} = snabbkaffe:subscribe(
+        ?match_event(#{?snk_kind := "cluster_link_extrouter_route_deleted"}),
+        NExpectedDeletions,
+        infinity
+    ),
+    Nodes =
+        case TargetOrSource of
+            target -> ?config(source_nodes, TCConfig);
+            source -> ?config(target_nodes, TCConfig)
+        end,
+    {200, _} = update_link(TargetOrSource, Name, Params#{<<"enable">> := false}),
+    %% Note that only when the GC runs and collects the stopped actor it'll actually
+    %% remove the routes
+    NowMS = erlang:system_time(millisecond),
+    TTL = emqx_cluster_link_config:actor_ttl(),
+    ct:pal("gc"),
+    Timestamp = NowMS + TTL * 3,
+    lists:foreach(fun(N) -> ok = do_actor_gc(N, Timestamp) end, Nodes),
+    {ok, _} = snabbkaffe:receive_events(SRef),
+    ct:pal("gc done"),
+    ok.
+
+do_actor_gc(Node, Timestamp) ->
+    %% 2 Actors: one for normal routes, one for PS routes
+    ?ON(Node, emqx_cluster_link_extrouter_gc:force(Timestamp)).
+
+wait_for_routes([Node | Nodes], ExpectedTopics) ->
+    Topics = ?ON(Node, emqx_cluster_link_extrouter:topics()),
+    case lists:sort(ExpectedTopics) == lists:sort(Topics) of
+        true ->
+            wait_for_routes(Nodes, ExpectedTopics);
+        false ->
+            timer:sleep(100),
+            wait_for_routes([Node | Nodes], ExpectedTopics)
+    end;
+wait_for_routes([], _ExpectedTopics) ->
+    ok.
+
 %%------------------------------------------------------------------------------
 %% Test cases
 %%------------------------------------------------------------------------------
@@ -704,25 +750,10 @@ t_metrics(Config) ->
     %% Disabling the link should remove the routes.
     ct:pal("disabling"),
     {200, TargetLink0} = get_link(target, TargetName),
-    TargetLink1 = maps:without([<<"status">>, <<"node_status">>], TargetLink0),
-    TargetLink2 = TargetLink1#{<<"enable">> := false},
-    {_, {ok, _}} =
-        ?wait_async_action(
-            begin
-                {200, _} = update_link(target, TargetName, TargetLink2),
-                %% Note that only when the GC runs and collects the stopped actor it'll actually
-                %% remove the routes
-                NowMS = erlang:system_time(millisecond),
-                TTL = emqx_cluster_link_config:actor_ttl(),
-                ct:pal("gc"),
-                %% 2 Actors: one for normal routes, one for PS routes
-                1 = ?ON(SN1, emqx_cluster_link_extrouter:actor_gc(#{timestamp => NowMS + TTL * 3})),
-                1 = ?ON(SN1, emqx_cluster_link_extrouter:actor_gc(#{timestamp => NowMS + TTL * 3})),
-                ct:pal("gc done"),
-                ok
-            end,
-            #{?snk_kind := "cluster_link_extrouter_route_deleted"}
-        ),
+    TargetLink1 = remove_api_virtual_fields(TargetLink0),
+    ok = disable_and_force_gc(target, TargetName, TargetLink1, Config, #{
+        expected_num_route_deletions => 1
+    }),
 
     ?retry(
         300,
@@ -737,11 +768,11 @@ t_metrics(Config) ->
     ),
 
     %% Enabling again
-    TargetLink3 = TargetLink2#{<<"enable">> := true},
+    TargetLink2 = TargetLink1#{<<"enable">> := true},
     {_, {ok, _}} =
         ?wait_async_action(
             begin
-                {200, _} = update_link(target, TargetName, TargetLink3)
+                {200, _} = update_link(target, TargetName, TargetLink2)
             end,
             #{?snk_kind := "cluster_link_extrouter_route_added"}
         ),
@@ -799,7 +830,7 @@ t_update_password(_Config) ->
             {201, Response1} = create_link(Name, Params1),
             [#{name := Name, password := WrappedPassword0}] = emqx_config:get([cluster, links]),
             ?assertEqual(Password, emqx_secret:unwrap(WrappedPassword0)),
-            Params2A = maps:without([<<"name">>, <<"node_status">>, <<"status">>], Response1),
+            Params2A = remove_api_virtual_fields(Response1),
             Params2 = Params2A#{<<"pool_size">> := 2},
             ?assertEqual(?REDACTED, maps:get(<<"password">>, Params2)),
             ?assertMatch({200, _}, update_link(Name, Params2)),
@@ -842,3 +873,63 @@ t_optional_fields_update(_Config) ->
     {201, _} = create_link(Name, Params0),
     ?assertMatch({200, _}, update_link(Name, Params0)),
     ok.
+
+%% Verifies that, if we disable a link and then re-enable it, it should keep working.
+t_disable_reenable(Config) ->
+    ct:timetrap({seconds, 20}),
+    [SN1, _SN2] = SourceNodes = ?config(source_nodes, Config),
+    [TN1, TN2] = ?config(target_nodes, Config),
+    SourceName = <<"cl.target">>,
+    SourceC1 = emqx_cluster_link_SUITE:start_client(<<"sc1">>, SN1),
+    TargetC1 = emqx_cluster_link_SUITE:start_client(<<"tc1">>, TN1),
+    TargetC2 = emqx_cluster_link_SUITE:start_client(<<"tc2">>, TN2),
+    Topic1 = <<"t/tc1">>,
+    Topic2 = <<"t/tc2">>,
+    {ok, _, _} = emqtt:subscribe(TargetC1, Topic1),
+    {ok, _, _} = emqtt:subscribe(TargetC2, Topic2),
+    %% fixme: use snabbkaffe subscription
+    ?block_until(#{?snk_kind := clink_route_sync_complete}),
+    {ok, _} = emqtt:publish(SourceC1, Topic1, <<"1">>, [{qos, 1}]),
+    {ok, _} = emqtt:publish(SourceC1, Topic2, <<"2">>, [{qos, 1}]),
+    %% Sanity check: link is working, initially.
+    ?assertReceive({publish, #{topic := Topic1, payload := <<"1">>}}),
+    ?assertReceive({publish, #{topic := Topic2, payload := <<"2">>}}),
+
+    %% Now we just disable and re-enable it in the link in the source cluster.
+    {200, #{<<"enable">> := true} = SourceLink0} = get_link(source, SourceName),
+    SourceLink1 = remove_api_virtual_fields(SourceLink0),
+    %% We force GC to simulate that we left the link disable for enough time that the GC
+    %% kicks in.
+    ?assertMatch(
+        {200, #{<<"enable">> := false}},
+        update_link(source, SourceName, SourceLink1#{<<"enable">> := false})
+    ),
+    %% In the original issue, GC deleted the state of target cluster's agent in source
+    %% cluster.  After the fix, there's no longer GC, so we ignore timeouts here.
+    _ = ?block_until(
+        #{?snk_kind := "clink_extrouter_gc_ran", result := NumDeleted} when
+            NumDeleted > 0,
+        emqx_cluster_link_config:actor_ttl() + 1_000
+    ),
+    ?assertMatch(
+        {200, #{<<"enable">> := true}},
+        update_link(source, SourceName, SourceLink1)
+    ),
+
+    Topic3 = <<"t/tc3">>,
+    Topic4 = <<"t/tc4">>,
+    {ok, _, _} = emqtt:subscribe(TargetC1, Topic3),
+    {ok, _, _} = emqtt:subscribe(TargetC2, Topic4),
+    ct:pal("waiting for routes to be synced..."),
+    ExpectedTopics = [Topic1, Topic2, Topic3, Topic4],
+    wait_for_routes(SourceNodes, ExpectedTopics),
+
+    {ok, _} = emqtt:publish(SourceC1, Topic1, <<"3">>, [{qos, 1}]),
+    {ok, _} = emqtt:publish(SourceC1, Topic2, <<"4">>, [{qos, 1}]),
+    {ok, _} = emqtt:publish(SourceC1, Topic3, <<"5">>, [{qos, 1}]),
+    {ok, _} = emqtt:publish(SourceC1, Topic4, <<"6">>, [{qos, 1}]),
+    ?assertReceive({publish, #{topic := Topic1, payload := <<"3">>}}),
+    ?assertReceive({publish, #{topic := Topic2, payload := <<"4">>}}),
+    ?assertReceive({publish, #{topic := Topic3, payload := <<"5">>}}),
+    ?assertReceive({publish, #{topic := Topic4, payload := <<"6">>}}),
+    ok.

+ 1 - 1
apps/emqx_conf/src/emqx_conf.app.src

@@ -1,6 +1,6 @@
 {application, emqx_conf, [
     {description, "EMQX configuration management"},
-    {vsn, "0.4.0"},
+    {vsn, "0.4.1"},
     {registered, []},
     {mod, {emqx_conf_app, []}},
     {applications, [kernel, stdlib]},

+ 5 - 3
apps/emqx_conf/src/emqx_conf_cli.erl

@@ -284,6 +284,8 @@ print(Json) ->
 
 print_hocon(Hocon) when is_map(Hocon) ->
     emqx_ctl:print("~ts~n", [hocon_pp:do(Hocon, #{})]);
+print_hocon(undefined) ->
+    emqx_ctl:print("No value~n", []);
 print_hocon({error, Error}) ->
     emqx_ctl:warning("~ts~n", [Error]).
 
@@ -811,8 +813,8 @@ print_inconsistent_conf(Keys, Target, Status, AllConfs) ->
         fun(Key) ->
             lists:foreach(
                 fun({Node, OtherConf}) ->
-                    TargetV = maps:get(Key, TargetConf),
-                    PrevV = maps:get(Key, OtherConf),
+                    TargetV = maps:get(Key, TargetConf, undefined),
+                    PrevV = maps:get(Key, OtherConf, undefined),
                     NodeTnxId = get_tnx_id(Node, Status),
                     Options = #{
                         key => Key,
@@ -855,7 +857,7 @@ print_inconsistent_conf(New, Old, Options) ->
         target := {Target, TargetTnxId},
         node := {Node, NodeTnxId}
     } = Options,
-    emqx_ctl:print("~ts(tnx_id=~w)'s ~s is diff from ~ts(tnx_id=~w).~n", [
+    emqx_ctl:print("~ts(tnx_id=~w)'s ~s is different from ~ts(tnx_id=~w).~n", [
         Node, NodeTnxId, Key, Target, TargetTnxId
     ]),
     emqx_ctl:print("~ts:~n", [Node]),

+ 1 - 1
apps/emqx_connector_aggregator/src/emqx_connector_aggregator.app.src

@@ -1,6 +1,6 @@
 {application, emqx_connector_aggregator, [
     {description, "EMQX Enterprise Connector Data Aggregator"},
-    {vsn, "0.1.3"},
+    {vsn, "0.1.4"},
     {registered, []},
     {applications, [
         kernel,

+ 17 - 11
apps/emqx_connector_aggregator/src/emqx_connector_aggregator.erl

@@ -105,7 +105,11 @@ write_records_limited(Name, Buffer = #buffer{max_records = MaxRecords}, Records)
 write_records(Name, Buffer = #buffer{fd = Writer, max_records = MaxRecords}, Records, NumWritten) ->
     case emqx_connector_aggreg_buffer:write(Records, Writer) of
         ok ->
-            ?tp(connector_aggreg_records_written, #{action => Name, records => Records}),
+            ?tp(connector_aggreg_records_written, #{
+                action => Name,
+                records => Records,
+                buffer => Buffer
+            }),
             case is_number(NumWritten) andalso NumWritten >= MaxRecords of
                 true ->
                     rotate_buffer_async(Name, Buffer);
@@ -186,10 +190,10 @@ init(St0 = #st{name = Name}) ->
 
 handle_call({next_buffer, Timestamp}, _From, St0) ->
     St = #st{buffer = Buffer} = handle_next_buffer(Timestamp, St0),
-    {reply, Buffer, St, 0};
+    {reply, Buffer, St};
 handle_call({rotate_buffer, FD}, _From, St0) ->
     St = #st{buffer = Buffer} = handle_rotate_buffer(FD, St0),
-    {reply, Buffer, St, 0};
+    {reply, Buffer, St};
 handle_call(take_error, _From, St0) ->
     {MaybeError, St} = handle_take_error(St0),
     {reply, MaybeError, St}.
@@ -198,12 +202,12 @@ handle_cast({close_buffer, Timestamp}, St) ->
     {noreply, handle_close_buffer(Timestamp, St)};
 handle_cast({rotate_buffer, FD}, St0) ->
     St = handle_rotate_buffer(FD, St0),
-    {noreply, St, 0};
+    {noreply, St};
+handle_cast(enqueue_delivery, St0) ->
+    {noreply, handle_queued_buffer(St0)};
 handle_cast(_Cast, St) ->
     {noreply, St}.
 
-handle_info(timeout, St) ->
-    {noreply, handle_queued_buffer(St)};
 handle_info({'DOWN', MRef, _, Pid, Reason}, St0 = #st{name = Name, deliveries = Ds0}) ->
     case maps:take(MRef, Ds0) of
         {Buffer, Ds} ->
@@ -254,6 +258,7 @@ handle_rotate_buffer(_ClosedFD, St) ->
     St.
 
 enqueue_closed_buffer(Buffer, St = #st{queued = undefined}) ->
+    trigger_enqueue_delivery(),
     St#st{queued = Buffer};
 enqueue_closed_buffer(Buffer, St0) ->
     %% NOTE: Should never really happen unless interval / max records are too tight.
@@ -277,7 +282,7 @@ allocate_buffer(Since, Seq, St = #st{name = Name}) ->
     {ok, FD} = file:open(Filename, [write, binary]),
     Writer = emqx_connector_aggreg_buffer:new_writer(FD, _Meta = []),
     _ = add_counter(Counter),
-    ?tp(connector_aggreg_buffer_allocated, #{action => Name, filename => Filename}),
+    ?tp(connector_aggreg_buffer_allocated, #{action => Name, filename => Filename, buffer => Buffer}),
     Buffer#buffer{fd = Writer}.
 
 recover_buffer(Buffer = #buffer{filename = Filename, cnt_records = Counter}) ->
@@ -386,6 +391,9 @@ lookup_current_buffer(Name) ->
 
 %%
 
+trigger_enqueue_delivery() ->
+    gen_server:cast(self(), enqueue_delivery).
+
 enqueue_delivery(Buffer, St = #st{name = Name, deliveries = Ds}) ->
     case emqx_connector_aggreg_upload_sup:start_delivery(Name, Buffer) of
         {ok, Pid} ->
@@ -398,16 +406,14 @@ enqueue_delivery(Buffer, St = #st{name = Name, deliveries = Ds}) ->
 handle_delivery_exit(Buffer, Normal, St = #st{name = Name}) when
     Normal == normal; Normal == noproc
 ->
-    ?SLOG(debug, #{
-        msg => "aggregated_buffer_delivery_completed",
+    ?tp(debug, "aggregated_buffer_delivery_completed", #{
         action => Name,
         buffer => Buffer#buffer.filename
     }),
     ok = discard_buffer(Buffer),
     St;
 handle_delivery_exit(Buffer, {shutdown, {skipped, Reason}}, St = #st{name = Name}) ->
-    ?SLOG(info, #{
-        msg => "aggregated_buffer_delivery_skipped",
+    ?tp(info, "aggregated_buffer_delivery_skipped", #{
         action => Name,
         buffer => {Buffer#buffer.since, Buffer#buffer.seq},
         reason => Reason

+ 1 - 1
apps/emqx_dashboard/src/emqx_dashboard.app.src

@@ -2,7 +2,7 @@
 {application, emqx_dashboard, [
     {description, "EMQX Web Dashboard"},
     % strict semver, bump manually!
-    {vsn, "5.1.5"},
+    {vsn, "5.1.6"},
     {modules, []},
     {registered, [emqx_dashboard_sup]},
     {applications, [

+ 28 - 24
apps/emqx_dashboard/src/emqx_dashboard_monitor.erl

@@ -75,6 +75,7 @@
 -define(CLEAN_EXPIRED_INTERVAL, 10 * ?MINUTES).
 -define(RETENTION_TIME, 7 * ?DAYS).
 -define(MAX_POSSIBLE_SAMPLES, 1440).
+-define(LOG(LEVEL, DATA), ?SLOG(LEVEL, DATA, #{tag => "DASHBOARD"})).
 
 -record(state, {
     last,
@@ -121,8 +122,8 @@ current_rate(Node) when Node == node() ->
     try
         do_call(current_rate)
     catch
-        _E:R ->
-            ?SLOG(warning, #{msg => "dashboard_monitor_error", reason => R}),
+        _E:R:Stacktrace ->
+            ?LOG(warning, #{msg => "dashboard_monitor_error", reason => R, stacktrace => Stacktrace}),
             %% Rate map 0, ensure api will not crash.
             %% When joining cluster, dashboard monitor restart.
             Rate0 = [
@@ -134,31 +135,28 @@ current_rate(Node) when Node == node() ->
 current_rate(Node) ->
     case emqx_dashboard_proto_v1:current_rate(Node) of
         {badrpc, Reason} ->
-            {badrpc, {Node, Reason}};
+            {badrpc, #{node => Node, reason => Reason}};
         {ok, Rate} ->
             {ok, Rate}
     end.
 
 %% Get the current rate. Not the current sampler data.
 current_rate_cluster() ->
-    Fun =
+    Nodes = mria:cluster_nodes(running),
+    %% each call has 5s timeout, so it's ok to wait infinity here
+    L0 = emqx_utils:pmap(fun(Node) -> current_rate(Node) end, Nodes, infinity),
+    {L1, Failed} = lists:partition(
         fun
-            (Node, Cluster) when is_map(Cluster) ->
-                case current_rate(Node) of
-                    {ok, CurrentRate} ->
-                        merge_cluster_rate(CurrentRate, Cluster);
-                    {badrpc, Reason} ->
-                        {badrpc, {Node, Reason}}
-                end;
-            (_Node, Error) ->
-                Error
+            ({ok, _}) -> true;
+            (_) -> false
         end,
-    case lists:foldl(Fun, #{}, mria:cluster_nodes(running)) of
-        {badrpc, Reason} ->
-            {badrpc, Reason};
-        Metrics ->
-            {ok, adjust_synthetic_cluster_metrics(Metrics)}
-    end.
+        L0
+    ),
+    Failed =/= [] andalso
+        ?LOG(badrpc_log_level(L1), #{msg => "failed_to_sample_current_rate", errors => Failed}),
+    Fun = fun({ok, Result}, Cluster) -> merge_cluster_rate(Result, Cluster) end,
+    Metrics = lists:foldl(Fun, #{}, L1),
+    {ok, adjust_synthetic_cluster_metrics(Metrics)}.
 
 %% -------------------------------------------------------------------------------------------------
 %% gen_server functions
@@ -326,6 +324,10 @@ do_sample_local(Time) ->
     %% downsample before return RPC calls for less data to merge by the caller nodes
     downsample(Time, Map).
 
+%% log error level when there is no success (unlikely to happen), and warning otherwise
+badrpc_log_level([]) -> error;
+badrpc_log_level(_) -> warning.
+
 sample_nodes(Nodes, Time) ->
     ResList = concurrently_sample_nodes(Nodes, Time),
     {Failed, Success} = lists:partition(
@@ -336,15 +338,14 @@ sample_nodes(Nodes, Time) ->
         ResList
     ),
     Failed =/= [] andalso
-        ?SLOG(warning, #{msg => "failed_to_sample_monitor_data", errors => Failed}),
+        ?LOG(badrpc_log_level(Success), #{msg => "failed_to_sample_monitor_data", errors => Failed}),
     lists:foldl(fun(I, B) -> merge_samplers(Time, I, B) end, #{}, Success).
 
 concurrently_sample_nodes(Nodes, Time) ->
     %% emqx_dashboard_proto_v1:do_sample has a timeout (5s),
-    Timeout = ?RPC_TIMEOUT + ?ONE_SECOND,
     %% call emqx_utils:pmap here instead of a rpc multicall
     %% to avoid having to introduce a new bpapi proto version
-    emqx_utils:pmap(fun(Node) -> do_sample(Node, Time) end, Nodes, Timeout).
+    emqx_utils:pmap(fun(Node) -> do_sample(Node, Time) end, Nodes, infinity).
 
 merge_samplers(SinceTime, Increment0, Base) ->
     Increment =
@@ -489,10 +490,13 @@ sample_interval(_Age) ->
     10 * ?MINUTES.
 
 sample_fill_gap(Node, SinceTs) ->
-    Samples = do_sample(Node, SinceTs),
+    %% make a remote call so it can be mocked for testing
+    Samples = ?MODULE:do_sample(Node, SinceTs),
     fill_gaps(Samples, SinceTs).
 
-fill_gaps(Samples, SinceTs) ->
+fill_gaps({badrpc, _} = BadRpc, _) ->
+    BadRpc;
+fill_gaps(Samples, SinceTs) when is_map(Samples) ->
     TsList = lists:sort(maps:keys(Samples)),
     case length(TsList) >= 2 of
         true ->

+ 19 - 0
apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl

@@ -315,6 +315,25 @@ t_monitor_sampler_format(_Config) ->
     [?assert(lists:member(SamplerName, SamplerKeys)) || SamplerName <- ?SAMPLER_LIST],
     ok.
 
+t_sample_specific_node_but_badrpc(_Config) ->
+    meck:new(emqx_dashboard_monitor, [non_strict, passthrough, no_history, no_link]),
+    meck:expect(
+        emqx_dashboard_monitor,
+        do_sample,
+        fun(_Node, _Time) -> {badrpc, test} end
+    ),
+    ?assertMatch(
+        {error, {404, #{<<"code">> := <<"NOT_FOUND">>}}},
+        request(["monitor", "nodes", "a@b.net"], "latest=1000")
+    ),
+    %% arguably, it should be a 503
+    ?assertMatch(
+        {error, {400, #{<<"code">> := <<"BAD_REQUEST">>}}},
+        request(["monitor", "nodes", atom_to_list(node())], "latest=1000")
+    ),
+    meck:unload(emqx_dashboard_monitor),
+    ok.
+
 t_handle_old_monitor_data(_Config) ->
     Now = erlang:system_time(second),
     FakeOldData = maps:from_list(

+ 1 - 1
apps/emqx_dashboard_sso/src/emqx_dashboard_sso.app.src

@@ -1,6 +1,6 @@
 {application, emqx_dashboard_sso, [
     {description, "EMQX Dashboard Single Sign-On"},
-    {vsn, "0.1.7"},
+    {vsn, "0.1.8"},
     {registered, [emqx_dashboard_sso_sup]},
     {applications, [
         kernel,

+ 4 - 4
apps/emqx_dashboard_sso/src/emqx_dashboard_sso_oidc_session.erl

@@ -46,7 +46,7 @@ start_link(Cfg) ->
 
 start(Name, #{issuer := Issuer, session_expiry := SessionExpiry0}) ->
     case
-        emqx_dashboard_sso_sup:start_child(
+        emqx_dashboard_sso_oidc_sup:start_child(
             oidcc_provider_configuration_worker,
             [
                 #{
@@ -63,12 +63,12 @@ start(Name, #{issuer := Issuer, session_expiry := SessionExpiry0}) ->
             Error;
         _ ->
             SessionExpiry = timer:seconds(SessionExpiry0),
-            emqx_dashboard_sso_sup:start_child(?MODULE, [SessionExpiry])
+            emqx_dashboard_sso_oidc_sup:start_child(?MODULE, [SessionExpiry])
     end.
 
 stop() ->
-    _ = emqx_dashboard_sso_sup:stop_child(oidcc_provider_configuration_worker),
-    _ = emqx_dashboard_sso_sup:stop_child(?MODULE),
+    _ = emqx_dashboard_sso_oidc_sup:stop_child(oidcc_provider_configuration_worker),
+    _ = emqx_dashboard_sso_oidc_sup:stop_child(?MODULE),
     ok.
 
 new(Data) ->

+ 27 - 0
apps/emqx_dashboard_sso/src/emqx_dashboard_sso_oidc_sup.erl

@@ -0,0 +1,27 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+
+-module(emqx_dashboard_sso_oidc_sup).
+
+-behaviour(supervisor).
+
+-export([start_link/0, start_child/2, stop_child/1]).
+
+-export([init/1]).
+
+-define(CHILD(I, Args, Restart), {I, {I, start_link, Args}, Restart, 5000, worker, [I]}).
+
+start_link() ->
+    supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+start_child(Mod, Args) ->
+    supervisor:start_child(?MODULE, ?CHILD(Mod, Args, transient)).
+
+stop_child(Mod) ->
+    _ = supervisor:terminate_child(?MODULE, Mod),
+    _ = supervisor:delete_child(?MODULE, Mod),
+    ok.
+
+init([]) ->
+    {ok, {{one_for_one, 0, 1}, []}}.

+ 24 - 14
apps/emqx_dashboard_sso/src/emqx_dashboard_sso_sup.erl

@@ -6,26 +6,36 @@
 
 -behaviour(supervisor).
 
--export([start_link/0, start_child/2, stop_child/1]).
+-export([start_link/0]).
 
 -export([init/1]).
 
--define(CHILD(I, Args, Restart), {I, {I, start_link, Args}, Restart, 5000, worker, [I]}).
--define(CHILD(I), ?CHILD(I, [], permanent)).
-
 start_link() ->
     supervisor:start_link({local, ?MODULE}, ?MODULE, []).
 
-start_child(Mod, Args) ->
-    supervisor:start_child(?MODULE, ?CHILD(Mod, Args, transient)).
-
-stop_child(Mod) ->
-    _ = supervisor:terminate_child(?MODULE, Mod),
-    _ = supervisor:delete_child(?MODULE, Mod),
-    ok.
-
 init([]) ->
     {ok,
-        {{one_for_one, 5, 100}, [
-            ?CHILD(emqx_dashboard_sso_manager)
+        {{one_for_one, 10, 100}, [
+            sup_spec(emqx_dashboard_sso_oidc_sup),
+            child_spec(emqx_dashboard_sso_manager, permanent)
         ]}}.
+
+sup_spec(Mod) ->
+    #{
+        id => Mod,
+        start => {Mod, start_link, []},
+        restart => permanent,
+        shutdown => infinity,
+        type => supervisor,
+        modules => [Mod]
+    }.
+
+child_spec(Mod, Restart) ->
+    #{
+        id => Mod,
+        start => {Mod, start_link, []},
+        restart => Restart,
+        shutdown => 15000,
+        type => worker,
+        modules => [Mod]
+    }.

+ 39 - 23
apps/emqx_ds_shared_sub/src/emqx_ds_shared_sub_leader.erl

@@ -302,8 +302,9 @@ renew_streams(#{topic := Topic} = Data0) ->
     {Data1, VanishedStreams} = update_progresses(Data0, NewStreamsWRanks, TopicFilter, StartTime),
     Data2 = store_put_rank_progress(Data1, RankProgress),
     Data3 = removed_vanished_streams(Data2, VanishedStreams),
-    Data4 = revoke_streams(Data3),
-    Data5 = assign_streams(Data4),
+    DesiredCounts = desired_stream_count_for_agents(Data3),
+    Data4 = revoke_streams(Data3, DesiredCounts),
+    Data5 = assign_streams(Data4, DesiredCounts),
     ?SLOG(info, #{
         msg => leader_renew_streams,
         topic_filter => TopicFilter,
@@ -370,12 +371,12 @@ removed_vanished_streams(Data0, VanishedStreams) ->
 %% We revoke only from replaying agents.
 %% After revoking, no unassigned streams appear. Streams will become unassigned
 %% only after agents report them as acked and unsubscribed.
-revoke_streams(Data0) ->
-    DesiredStreamsPerAgent = desired_stream_count_per_agent(Data0),
+revoke_streams(Data0, DesiredCounts) ->
     Agents = replaying_agents(Data0),
     lists:foldl(
         fun(Agent, DataAcc) ->
-            revoke_excess_streams_from_agent(DataAcc, Agent, DesiredStreamsPerAgent)
+            DesiredCount = maps:get(Agent, DesiredCounts),
+            revoke_excess_streams_from_agent(DataAcc, Agent, DesiredCount)
         end,
         Data0,
         Agents
@@ -424,12 +425,12 @@ select_streams_for_revoke(
 
 %% We assign streams to agents that have too few streams (< desired_stream_count_per_agent).
 %% We assign only to replaying agents.
-assign_streams(Data0) ->
-    DesiredStreamsPerAgent = desired_stream_count_per_agent(Data0),
+assign_streams(Data0, DesiredCounts) ->
     Agents = replaying_agents(Data0),
     lists:foldl(
         fun(Agent, DataAcc) ->
-            assign_lacking_streams(DataAcc, Agent, DesiredStreamsPerAgent)
+            DesiredCount = maps:get(Agent, DesiredCounts),
+            assign_lacking_streams(DataAcc, Agent, DesiredCount)
         end,
         Data0,
         Agents
@@ -549,7 +550,8 @@ connect_agent(
             }),
             reconnect_agent(Data, Agent, AgentMetadata, AgentState);
         _ ->
-            DesiredCount = desired_stream_count_for_new_agent(Data),
+            DesiredCounts = desired_stream_count_for_agents(Data, [Agent | maps:keys(Agents)]),
+            DesiredCount = maps:get(Agent, DesiredCounts),
             assign_initial_streams_to_agent(Data, Agent, AgentMetadata, DesiredCount)
     end.
 
@@ -947,26 +949,40 @@ replaying_agents(#{agents := AgentStates}) ->
         maps:to_list(AgentStates)
     ).
 
-desired_stream_count_per_agent(#{agents := AgentStates} = Data) ->
-    desired_stream_count_per_agent(Data, maps:size(AgentStates)).
+desired_stream_count_for_agents(#{agents := AgentStates} = Data) ->
+    desired_stream_count_for_agents(Data, maps:keys(AgentStates)).
 
-desired_stream_count_for_new_agent(#{agents := AgentStates} = Data) ->
-    desired_stream_count_per_agent(Data, maps:size(AgentStates) + 1).
-
-desired_stream_count_per_agent(Data, AgentCount) ->
-    case AgentCount of
-        0 ->
+desired_stream_count_for_agents(Data, Agents) ->
+    case Agents of
+        [] ->
             0;
         _ ->
             StreamCount = store_num_streams(Data),
-            case StreamCount rem AgentCount of
-                0 ->
-                    StreamCount div AgentCount;
-                _ ->
-                    1 + StreamCount div AgentCount
-            end
+            AgentCount = length(Agents),
+            maps:from_list(
+                lists:map(
+                    fun({I, Agent}) ->
+                        {Agent, desired_stream_count_for_agent(StreamCount, AgentCount, I)}
+                    end,
+                    enumerate(lists:sort(Agents))
+                )
+            )
     end.
 
+enumerate(List) ->
+    enumerate(0, List).
+
+enumerate(_, []) ->
+    [];
+enumerate(I, [H | T]) ->
+    [{I, H} | enumerate(I + 1, T)].
+
+desired_stream_count_for_agent(StreamCount, AgentCount, I) ->
+    (StreamCount div AgentCount) + extra_stream_count_for_agent(StreamCount, AgentCount, I).
+
+extra_stream_count_for_agent(StreamCount, AgentCount, I) when I < (StreamCount rem AgentCount) -> 1;
+extra_stream_count_for_agent(_StreamCount, _AgentCount, _I) -> 0.
+
 stream_progresses(Data, Streams) ->
     lists:map(
         fun(Stream) ->

+ 1 - 1
apps/emqx_gateway/src/emqx_gateway.app.src

@@ -1,7 +1,7 @@
 %% -*- mode: erlang -*-
 {application, emqx_gateway, [
     {description, "The Gateway management application"},
-    {vsn, "0.2.1"},
+    {vsn, "0.2.2"},
     {registered, []},
     {mod, {emqx_gateway_app, []}},
     {applications, [

+ 1 - 1
apps/emqx_gateway/src/emqx_gateway_api.erl

@@ -710,7 +710,7 @@ examples_gateway_confs() ->
                         enable_stats => true,
                         idle_timeout => <<"30s">>,
                         mountpoint => <<"lwm2m/">>,
-                        xml_dir => <<"/etc/emqx/lwm2m_xml">>,
+                        xml_dir => <<"etc/lwm2m_xml">>,
                         lifetime_min => <<"1s">>,
                         lifetime_max => <<"86400s">>,
                         qmode_time_window => <<"22s">>,

+ 20 - 37
apps/emqx_gateway/src/emqx_gateway_api_authn_user_import.erl

@@ -67,28 +67,12 @@ import_users(post, #{
     bindings := #{name := Name0},
     body := Body
 }) ->
-    with_authn(Name0, fun(
-        _GwName,
-        #{
-            id := AuthId,
-            chain_name := ChainName
-        }
-    ) ->
-        case maps:get(<<"filename">>, Body, undefined) of
-            undefined ->
-                emqx_authn_api:serialize_error({missing_parameter, filename});
-            File ->
-                [{FileName, FileData}] = maps:to_list(maps:without([type], File)),
-                case
-                    emqx_authn_chains:import_users(
-                        ChainName, AuthId, {hash, FileName, FileData}
-                    )
-                of
-                    ok -> {204};
-                    {error, Reason} -> emqx_authn_api:serialize_error(Reason)
-                end
+    with_authn(
+        Name0,
+        fun(_GwName, #{id := AuthId, chain_name := ChainName}) ->
+            do_import_users(ChainName, AuthId, Body)
         end
-    end).
+    ).
 
 import_listener_users(post, #{
     bindings := #{name := Name0, id := Id},
@@ -98,23 +82,22 @@ import_listener_users(post, #{
         Name0,
         Id,
         fun(_GwName, #{id := AuthId, chain_name := ChainName}) ->
-            case maps:get(<<"filename">>, Body, undefined) of
-                undefined ->
-                    emqx_authn_api:serialize_error({missing_parameter, filename});
-                File ->
-                    [{FileName, FileData}] = maps:to_list(maps:without([type], File)),
-                    case
-                        emqx_authn_chains:import_users(
-                            ChainName, AuthId, {hash, FileName, FileData}
-                        )
-                    of
-                        ok -> {204};
-                        {error, Reason} -> emqx_authn_api:serialize_error(Reason)
-                    end
-            end
+            do_import_users(ChainName, AuthId, Body)
         end
     ).
 
+do_import_users(ChainName, AuthId, HttpBody) ->
+    case maps:get(<<"filename">>, HttpBody, undefined) of
+        undefined ->
+            emqx_authn_api:serialize_error({missing_parameter, filename});
+        File ->
+            [{FileName, FileData}] = maps:to_list(maps:without([type], File)),
+            case emqx_authn_chains:import_users(ChainName, AuthId, {hash, FileName, FileData}) of
+                {ok, Result} -> {200, Result};
+                {error, Reason} -> emqx_authn_api:serialize_error(Reason)
+            end
+    end.
+
 %%--------------------------------------------------------------------
 %% Swagger defines
 %%--------------------------------------------------------------------
@@ -130,7 +113,7 @@ schema("/gateways/:name/authentication/import_users") ->
                 parameters => params_gateway_name_in_path(),
                 'requestBody' => emqx_dashboard_swagger:file_schema(filename),
                 responses =>
-                    ?STANDARD_RESP(#{204 => <<"Imported">>})
+                    ?STANDARD_RESP(#{200 => emqx_authn_user_import_api:import_result_schema()})
             }
     };
 schema("/gateways/:name/listeners/:id/authentication/import_users") ->
@@ -145,7 +128,7 @@ schema("/gateways/:name/listeners/:id/authentication/import_users") ->
                     params_listener_id_in_path(),
                 'requestBody' => emqx_dashboard_swagger:file_schema(filename),
                 responses =>
-                    ?STANDARD_RESP(#{204 => <<"Imported">>})
+                    ?STANDARD_RESP(#{200 => emqx_authn_user_import_api:import_result_schema()})
             }
     }.
 

+ 36 - 12
apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl

@@ -379,15 +379,27 @@ t_authn_data_mgmt(_) ->
     Dir = code:lib_dir(emqx_auth, test),
     JSONFileName = filename:join([Dir, <<"data/user-credentials.json">>]),
     {ok, JSONData} = file:read_file(JSONFileName),
-    {ok, 204, _} = emqx_dashboard_api_test_helpers:multipart_formdata_request(ImportUri, [], [
-        {filename, "user-credentials.json", JSONData}
-    ]),
+    {ok, 200, ImportedResults} = emqx_dashboard_api_test_helpers:multipart_formdata_request(
+        ImportUri, [], [
+            {filename, "user-credentials.json", JSONData}
+        ]
+    ),
+    ?assertMatch(
+        #{<<"total">> := 2, <<"success">> := 2},
+        emqx_utils_json:decode(ImportedResults, [return_maps])
+    ),
 
     CSVFileName = filename:join([Dir, <<"data/user-credentials.csv">>]),
     {ok, CSVData} = file:read_file(CSVFileName),
-    {ok, 204, _} = emqx_dashboard_api_test_helpers:multipart_formdata_request(ImportUri, [], [
-        {filename, "user-credentials.csv", CSVData}
-    ]),
+    {ok, 200, ImportedResults2} = emqx_dashboard_api_test_helpers:multipart_formdata_request(
+        ImportUri, [], [
+            {filename, "user-credentials.csv", CSVData}
+        ]
+    ),
+    ?assertMatch(
+        #{<<"total">> := 2, <<"success">> := 2},
+        emqx_utils_json:decode(ImportedResults2, [return_maps])
+    ),
 
     {204, _} = request(delete, "/gateways/stomp/authentication"),
     {204, _} = request(get, "/gateways/stomp/authentication"),
@@ -584,15 +596,27 @@ t_listeners_authn_data_mgmt(_) ->
     Dir = code:lib_dir(emqx_auth, test),
     JSONFileName = filename:join([Dir, <<"data/user-credentials.json">>]),
     {ok, JSONData} = file:read_file(JSONFileName),
-    {ok, 204, _} = emqx_dashboard_api_test_helpers:multipart_formdata_request(ImportUri, [], [
-        {filename, "user-credentials.json", JSONData}
-    ]),
+    {ok, 200, ImportedResults} = emqx_dashboard_api_test_helpers:multipart_formdata_request(
+        ImportUri, [], [
+            {filename, "user-credentials.json", JSONData}
+        ]
+    ),
+    ?assertMatch(
+        #{<<"total">> := 2, <<"success">> := 2},
+        emqx_utils_json:decode(ImportedResults, [return_maps])
+    ),
 
     CSVFileName = filename:join([Dir, <<"data/user-credentials.csv">>]),
     {ok, CSVData} = file:read_file(CSVFileName),
-    {ok, 204, _} = emqx_dashboard_api_test_helpers:multipart_formdata_request(ImportUri, [], [
-        {filename, "user-credentials.csv", CSVData}
-    ]),
+    {ok, 200, ImportedResults2} = emqx_dashboard_api_test_helpers:multipart_formdata_request(
+        ImportUri, [], [
+            {filename, "user-credentials.csv", CSVData}
+        ]
+    ),
+    ?assertMatch(
+        #{<<"total">> := 2, <<"success">> := 2},
+        emqx_utils_json:decode(ImportedResults2, [return_maps])
+    ),
 
     ok.
 

+ 1 - 1
apps/emqx_gateway_ocpp/rebar.config

@@ -1,7 +1,7 @@
 %% -*- mode: erlang; -*-
 
 {deps, [
-    {jesse, {git, "https://github.com/emqx/jesse.git", {tag, "1.8.0.1"}}},
+    {jesse, {git, "https://github.com/emqx/jesse.git", {tag, "1.8.1.1"}}},
     {emqx, {path, "../../apps/emqx"}},
     {emqx_utils, {path, "../emqx_utils"}},
     {emqx_gateway, {path, "../../apps/emqx_gateway"}}

+ 2 - 0
apps/emqx_machine/src/emqx_machine.erl

@@ -241,6 +241,8 @@ mria_lb_custom_info() ->
     get_emqx_vsn().
 
 %% Note: this function is stored in the Mria's application environment
+%% This function is only evaluated by replicant nodes.
+%% Should return `true' if the input node version may be connected to by the current node.
 mria_lb_custom_info_check(undefined) ->
     false;
 mria_lb_custom_info_check(OtherVsn) ->

+ 0 - 0
apps/emqx_machine/src/emqx_machine_replicant_health_probe.erl


Някои файлове не бяха показани, защото твърде много файлове са промени