Kaynağa Gözat

Merge remote-tracking branch 'upstream/release-58' into 20240930-sync-release-58

Ivan Dyachkov 1 yıl önce
ebeveyn
işleme
b5b7a492db
100 değiştirilmiş dosya ile 2191 ekleme ve 548 silme
  1. 1 1
      .ci/docker-compose-file/.env
  2. 0 2
      .ci/docker-compose-file/docker-compose-sqlserver.yaml
  3. 0 2
      .ci/docker-compose-file/docker-compose-toxiproxy.yaml
  4. 0 2
      .ci/docker-compose-file/docker-compose.yaml
  5. 5 5
      .ci/docker-compose-file/odbc/odbcinst.ini
  6. 12 3
      .ci/docker-compose-file/openldap/Dockerfile
  7. 14 0
      .ci/docker-compose-file/openldap/entrypoint
  8. 2 1
      .ci/docker-compose-file/openldap/slapd.conf
  9. 4 2
      .github/workflows/_push-entrypoint.yaml
  10. 14 1
      .github/workflows/build_and_push_docker_images.yaml
  11. 1 1
      .github/workflows/run_emqx_app_tests.yaml
  12. 5 1
      Makefile
  13. 1 1
      apps/emqx/include/emqx.hrl
  14. 2 2
      apps/emqx/include/emqx_release.hrl
  15. 2 2
      apps/emqx/rebar.config
  16. 2 1
      apps/emqx/src/emqx.erl
  17. 21 15
      apps/emqx/src/emqx_banned.erl
  18. 1 2
      apps/emqx/src/emqx_broker.erl
  19. 2 1
      apps/emqx/src/emqx_broker_sup.erl
  20. 3 3
      apps/emqx/src/emqx_config_handler.erl
  21. 8 36
      apps/emqx/src/emqx_frame.erl
  22. 6 2
      apps/emqx/src/emqx_pool_sup.erl
  23. 18 10
      apps/emqx/src/emqx_schema.erl
  24. 51 1
      apps/emqx/src/emqx_session.erl
  25. 13 0
      apps/emqx/test/emqx_banned_SUITE.erl
  26. 2 2
      apps/emqx_auth/etc/acl.conf
  27. 1 0
      apps/emqx_auth/include/emqx_authn.hrl
  28. 5 1
      apps/emqx_auth/src/emqx_auth_utils.erl
  29. 5 8
      apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl
  30. 1 0
      apps/emqx_auth/src/emqx_authz/emqx_authz.erl
  31. 7 2
      apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl
  32. 23 0
      apps/emqx_auth/test/emqx_authz/emqx_authz_file_SUITE.erl
  33. 94 0
      apps/emqx_auth_cinfo/BSL.txt
  34. 49 0
      apps/emqx_auth_cinfo/README.md
  35. 12 0
      apps/emqx_auth_cinfo/include/emqx_auth_cinfo.hrl
  36. 33 0
      apps/emqx_auth_cinfo/mix.exs
  37. 7 0
      apps/emqx_auth_cinfo/rebar.config
  38. 16 0
      apps/emqx_auth_cinfo/src/emqx_auth_cinfo.app.src
  39. 20 0
      apps/emqx_auth_cinfo/src/emqx_auth_cinfo_app.erl
  40. 25 0
      apps/emqx_auth_cinfo/src/emqx_auth_cinfo_sup.erl
  41. 137 0
      apps/emqx_auth_cinfo/src/emqx_authn_cinfo.erl
  42. 93 0
      apps/emqx_auth_cinfo/src/emqx_authn_cinfo_schema.erl
  43. 170 0
      apps/emqx_auth_cinfo/test/emqx_authn_cinfo_SUITE.erl
  44. 156 0
      apps/emqx_auth_cinfo/test/emqx_authn_cinfo_int_SUITE.erl
  45. 1 3
      apps/emqx_auth_ext/src/emqx_auth_ext.app.src
  46. 1 1
      apps/emqx_auth_kerberos/rebar.config
  47. 28 2
      apps/emqx_auth_ldap/test/emqx_authn_ldap_SUITE.erl
  48. 6 1
      apps/emqx_bridge/src/emqx_bridge.erl
  49. 1 1
      apps/emqx_bridge/src/emqx_bridge_app.erl
  50. 1 1
      apps/emqx_bridge/src/emqx_bridge_resource.erl
  51. 27 12
      apps/emqx_bridge/src/emqx_bridge_v2.erl
  52. 16 14
      apps/emqx_bridge/src/emqx_bridge_v2_api.erl
  53. 2 2
      apps/emqx_bridge/src/schema/emqx_bridge_schema.erl
  54. 52 0
      apps/emqx_bridge/test/emqx_bridge_v2_testlib.erl
  55. 16 11
      apps/emqx_bridge_azure_blob_storage/src/emqx_bridge_azure_blob_storage_connector.erl
  56. 23 5
      apps/emqx_bridge_azure_blob_storage/src/emqx_bridge_azure_blob_storage_connector_schema.erl
  57. 28 2
      apps/emqx_bridge_azure_blob_storage/test/emqx_bridge_azure_blob_storage_SUITE.erl
  58. 1 1
      apps/emqx_bridge_azure_event_hub/rebar.config
  59. 1 1
      apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.app.src
  60. 2 0
      apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.erl
  61. 4 1
      apps/emqx_bridge_cassandra/test/emqx_bridge_cassandra_SUITE.erl
  62. 1 1
      apps/emqx_bridge_confluent/rebar.config
  63. 1 1
      apps/emqx_bridge_confluent/src/emqx_bridge_confluent.app.src
  64. 2 0
      apps/emqx_bridge_confluent/src/emqx_bridge_confluent_producer.erl
  65. 10 18
      apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl
  66. 0 1
      apps/emqx_bridge_http/src/emqx_bridge_http_connector_info.erl
  67. 144 55
      apps/emqx_bridge_http/test/emqx_bridge_http_v2_SUITE.erl
  68. 28 4
      apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb_connector.erl
  69. 2 0
      apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb_connector_info.erl
  70. 1 1
      apps/emqx_bridge_kafka/rebar.config
  71. 1 1
      apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src
  72. 14 2
      apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl
  73. 7 5
      apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl
  74. 3 10
      apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_producer_SUITE.erl
  75. 2 0
      apps/emqx_bridge_kafka/test/emqx_bridge_v2_kafka_producer_SUITE.erl
  76. 1 1
      apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src
  77. 1 1
      apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_connector_schema.erl
  78. 112 103
      apps/emqx_bridge_snowflake/docs/dev-quick-ref.md
  79. 117 0
      apps/emqx_bridge_snowflake/docs/user-guide.md
  80. 13 1
      apps/emqx_bridge_snowflake/src/emqx_bridge_snowflake_action_schema.erl
  81. 47 16
      apps/emqx_bridge_snowflake/src/emqx_bridge_snowflake_connector.erl
  82. 15 2
      apps/emqx_bridge_snowflake/src/emqx_bridge_snowflake_connector_schema.erl
  83. 1 1
      apps/emqx_bridge_snowflake/test/emqx_bridge_snowflake_SUITE.erl
  84. 1 1
      apps/emqx_bridge_sqlserver/src/emqx_bridge_sqlserver.app.src
  85. 3 5
      apps/emqx_bridge_sqlserver/src/emqx_bridge_sqlserver_connector.erl
  86. 2 6
      apps/emqx_bridge_sqlserver/test/emqx_bridge_sqlserver_SUITE.erl
  87. 46 2
      apps/emqx_cluster_link/src/emqx_cluster_link_api.erl
  88. 11 0
      apps/emqx_cluster_link/src/emqx_cluster_link_metrics.erl
  89. 33 0
      apps/emqx_cluster_link/test/emqx_cluster_link_api_SUITE.erl
  90. 1 1
      apps/emqx_conf/src/emqx_conf.app.src
  91. 17 2
      apps/emqx_conf/src/emqx_conf_schema.erl
  92. 2 1
      apps/emqx_conf/src/emqx_conf_schema_inject.erl
  93. 29 0
      apps/emqx_conf/test/emqx_conf_schema_tests.erl
  94. 1 1
      apps/emqx_connector/mix.exs
  95. 4 0
      apps/emqx_connector/src/emqx_connector.erl
  96. 12 12
      apps/emqx_connector/src/emqx_connector_api.erl
  97. 6 4
      apps/emqx_connector/src/emqx_connector_resource.erl
  98. 2 0
      apps/emqx_connector/test/emqx_connector_api_SUITE.erl
  99. 244 121
      apps/emqx_dashboard/src/emqx_dashboard_monitor.erl
  100. 0 0
      apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl

+ 1 - 1
.ci/docker-compose-file/.env

@@ -15,7 +15,7 @@ HSTREAMDB_ZK_TAG=3.8.1
 DATALAYERS_TAG=v2.1.7
 
 MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server
-SQLSERVER_TAG=2019-CU19-ubuntu-20.04
+SQLSERVER_TAG=2022-CU15-ubuntu-22.04
 
 
 # Password for the 'elastic' user (at least 6 characters)

+ 0 - 2
.ci/docker-compose-file/docker-compose-sqlserver.yaml

@@ -1,5 +1,3 @@
-version: '3.9'
-
 services:
   sql_server:
     container_name: sqlserver

+ 0 - 2
.ci/docker-compose-file/docker-compose-toxiproxy.yaml

@@ -1,5 +1,3 @@
-version: '3.9'
-
 services:
   toxiproxy:
     container_name: toxiproxy

+ 0 - 2
.ci/docker-compose-file/docker-compose.yaml

@@ -1,5 +1,3 @@
-version: '3.9'
-
 services:
   erlang:
     hostname: erlang.emqx.net

+ 5 - 5
.ci/docker-compose-file/odbc/odbcinst.ini

@@ -1,9 +1,9 @@
 [ms-sql]
-Description=Microsoft ODBC Driver 17 for SQL Server
-Driver=/opt/microsoft/msodbcsql17/lib64/libmsodbcsql-17.10.so.2.1
+Description=Microsoft ODBC Driver 18 for SQL Server
+Driver=/opt/microsoft/msodbcsql18/lib64/libmsodbcsql-18.4.so.1.1
 UsageCount=1
 
-[ODBC Driver 17 for SQL Server]
-Description=Microsoft ODBC Driver 17 for SQL Server
-Driver=/opt/microsoft/msodbcsql17/lib64/libmsodbcsql-17.10.so.2.1
+[ODBC Driver 18 for SQL Server]
+Description=Microsoft ODBC Driver 18 for SQL Server
+Driver=/opt/microsoft/msodbcsql18/lib64/libmsodbcsql-18.4.so.1.1
 UsageCount=1

+ 12 - 3
.ci/docker-compose-file/openldap/Dockerfile

@@ -1,8 +1,17 @@
-FROM docker.io/zmstone/openldap:2.5.16@sha256:a813922115a1d1f1b974399595921d1778fae22b3f1ee15dcfa8cfa89700dbc7
+FROM ghcr.io/emqx/openldap:1.0.0
 
+## entrypoint
+COPY .ci/docker-compose-file/openldap/entrypoint /usr/local/etc/openldap/entrypoint
+
+## conf && schema
 COPY .ci/docker-compose-file/openldap/slapd.conf /usr/local/etc/openldap/slapd.conf
-COPY apps/emqx_ldap/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif
 COPY apps/emqx_ldap/test/data/emqx.schema /usr/local/etc/openldap/schema/emqx.schema
+
+## data
+COPY apps/emqx_ldap/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif
+COPY apps/emqx_ldap/test/data/emqx.groups.ldif /usr/local/etc/openldap/schema/emqx.groups.ldif
+
+## pem
 COPY .ci/docker-compose-file/certs/ca.crt /usr/local/etc/openldap/cacert.pem
 COPY .ci/docker-compose-file/certs/server.crt /usr/local/etc/openldap/cert.pem
 COPY .ci/docker-compose-file/certs/server.key /usr/local/etc/openldap/key.pem
@@ -14,6 +23,6 @@ WORKDIR /usr/local/etc/openldap
 
 EXPOSE 389 636
 
-ENTRYPOINT ["/usr/local/libexec/slapd", "-h", "ldap:/// ldaps:///", "-d", "3", "-f", "/usr/local/etc/openldap/slapd.conf"]
+ENTRYPOINT ["./entrypoint"]
 
 CMD []

+ 14 - 0
.ci/docker-compose-file/openldap/entrypoint

@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -e
+
+/usr/local/libexec/slapd -h "ldap:/// ldaps:///" -f /usr/local/etc/openldap/slapd.conf
+
+if [ ! -f ADDED_GROUPS ]; then
+    ldapadd -x -D cn=root,dc=emqx,dc=io -w public -f /usr/local/etc/openldap/schema/emqx.groups.ldif
+    touch ADDED_GROUPS
+fi
+
+PID=$(pgrep -o slapd)
+tail -f --pid="$PID"
+

+ 2 - 1
.ci/docker-compose-file/openldap/slapd.conf

@@ -11,5 +11,6 @@ database mdb
 suffix "dc=emqx,dc=io"
 rootdn "cn=root,dc=emqx,dc=io"
 rootpw {SSHA}eoF7NhNrejVYYyGHqnt+MdKNBh4r1w3W
-
 directory       /usr/local/etc/openldap/data
+overlay memberof
+memberof-refint TRUE

+ 4 - 2
.github/workflows/_push-entrypoint.yaml

@@ -173,7 +173,7 @@ jobs:
     needs:
       - prepare
       - compile
-    runs-on: ubuntu-22.04'
+    runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
     steps:
       - name: Create PR in emqx/emqx-i18n
         env:
@@ -186,7 +186,7 @@ jobs:
     needs:
       - prepare
       - compile
-    runs-on: ubuntu-22.04
+    runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
     steps:
       - name: Create PR in emqx/emqx-docs
         env:
@@ -195,8 +195,10 @@ jobs:
           gh --repo emqx/emqx-docs workflow run update-api-and-cfg-manual.yaml -f version=${GITHUB_REF_NAME##[v|e]} -f repository=${GITHUB_REPOSITORY} -f run_id=${GITHUB_RUN_ID}
 
   run_emqx_app_tests:
+    if: needs.prepare.outputs.release != 'true'
     needs:
       - init
+      - prepare
       - compile
     uses: ./.github/workflows/run_emqx_app_tests.yaml
     with:

+ 14 - 1
.github/workflows/build_and_push_docker_images.yaml

@@ -192,7 +192,7 @@ jobs:
 
       - name: export docker image
         run: |
-          docker save $_EMQX_DOCKER_IMAGE_TAG | gzip > $PROFILE-docker-$PKG_VSN.tar.gz
+          docker save "${_EMQX_DOCKER_IMAGE_TAG}" | gzip > $PROFILE-docker-$PKG_VSN.tar.gz
 
       - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
         with:
@@ -209,3 +209,16 @@ jobs:
           DOCKER_LOAD: false
         run: |
           ./build ${PROFILE} docker
+
+      - name: Build and publish docker image with Snowflake ODBC driver
+        if: (inputs.publish || github.repository_owner != 'emqx') && matrix.profile[0] == 'emqx-enterprise'
+        env:
+          DOCKER_PUSH: true
+          DOCKER_BUILD_NOCACHE: false
+          DOCKER_PLATFORMS: linux/amd64,linux/arm64
+          DOCKER_LOAD: false
+          EMQX_DOCKERFILE: deploy/docker/Dockerfile.sfodbc
+        run: |
+          export BUILD_FROM="${_EMQX_DOCKER_IMAGE_TAG}"
+          export EMQX_IMAGE_TAG="${_EMQX_DOCKER_IMAGE_TAG##docker.io/}-sf"
+          ./build ${PROFILE} docker

+ 1 - 1
.github/workflows/run_emqx_app_tests.yaml

@@ -55,7 +55,7 @@ jobs:
           exit 0
         else
           echo 'skip=false' | tee -a $GITHUB_OUTPUT
-          echo 'matrix=[{"type": "eunit_proper_and_static"},{"type": "1_3"},{"type": "2_3"},{"type": "3_3"}]' | tee -a $GITHUB_OUTPUT
+          echo 'matrix=[{"type": "eunit_proper_and_static"},{"type": "1_4"},{"type": "2_4"},{"type": "3_4"},{"type": "4_4"}]' | tee -a $GITHUB_OUTPUT
         fi
 
   run_emqx_app_tests:

+ 5 - 1
Makefile

@@ -11,7 +11,7 @@ include env.sh
 # Dashboard version
 # from https://github.com/emqx/emqx-dashboard5
 export EMQX_DASHBOARD_VERSION ?= v1.10.0
-export EMQX_EE_DASHBOARD_VERSION ?= e1.8.0
+export EMQX_EE_DASHBOARD_VERSION ?= e1.8.1-beta.5
 
 export EMQX_RELUP ?= true
 export EMQX_REL_FORM ?= tgz
@@ -332,6 +332,10 @@ fmt: $(REBAR)
 	@$(SCRIPTS)/erlfmt -w 'bin/nodetool'
 	@mix format
 
+.PHONY: fmt-diff
+fmt-diff:
+	@env ERLFMT_WRITE=true ./scripts/git-hook-pre-commit.sh
+
 .PHONY: clean-test-cluster-config
 clean-test-cluster-config:
 	@rm -f apps/emqx_conf/data/configs/cluster.hocon || true

+ 1 - 1
apps/emqx/include/emqx.hrl

@@ -103,7 +103,7 @@
     by :: binary(),
     reason :: binary(),
     at :: integer(),
-    until :: integer()
+    until :: integer() | infinity
 }).
 
 %%--------------------------------------------------------------------

+ 2 - 2
apps/emqx/include/emqx_release.hrl

@@ -32,7 +32,7 @@
 %% `apps/emqx/src/bpapi/README.md'
 
 %% Opensource edition
--define(EMQX_RELEASE_CE, "5.8.0").
+-define(EMQX_RELEASE_CE, "5.8.1-alpha.1").
 
 %% Enterprise edition
--define(EMQX_RELEASE_EE, "5.8.0").
+-define(EMQX_RELEASE_EE, "5.8.1-alpha.1").

+ 2 - 2
apps/emqx/rebar.config

@@ -29,9 +29,9 @@
     {gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}},
     {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}},
     {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.12.0"}}},
-    {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.5"}}},
+    {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.6"}}},
     {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.4.0"}}},
-    {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.43.3"}}},
+    {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.43.4"}}},
     {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},
     {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
     {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},

+ 2 - 1
apps/emqx/src/emqx.erl

@@ -217,8 +217,9 @@ update_config(KeyPath, UpdateReq, Opts) ->
 ) ->
     {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
 update_config([RootName | _] = KeyPath, UpdateReq, Opts, ClusterRpcOpts) ->
+    Mod = emqx_config:get_schema_mod(RootName),
     emqx_config_handler:update_config(
-        emqx_config:get_schema_mod(RootName),
+        Mod,
         KeyPath,
         {{update, UpdateReq}, Opts},
         ClusterRpcOpts

+ 21 - 15
apps/emqx/src/emqx_banned.erl

@@ -40,6 +40,7 @@
     info/1,
     format/1,
     parse/1,
+    parse_who/1,
     clear/0,
     who/2,
     tables/0
@@ -68,10 +69,6 @@
 -define(BANNED_INDIVIDUAL_TAB, ?MODULE).
 -define(BANNED_RULE_TAB, emqx_banned_rules).
 
-%% The default expiration time should be infinite
-%% but for compatibility, a large number (1 years) is used here to represent the 'infinite'
--define(EXPIRATION_TIME, 31536000).
-
 -ifdef(TEST).
 -compile(export_all).
 -compile(nowarn_export_all).
@@ -149,7 +146,7 @@ parse(Params) ->
             By = maps:get(<<"by">>, Params, <<"mgmt_api">>),
             Reason = maps:get(<<"reason">>, Params, <<"">>),
             At = maps:get(<<"at">>, Params, erlang:system_time(second)),
-            Until = maps:get(<<"until">>, Params, At + ?EXPIRATION_TIME),
+            Until = maps:get(<<"until">>, Params, infinity),
             case Until > erlang:system_time(second) of
                 true ->
                     {ok, #banned{
@@ -341,19 +338,26 @@ parse_stream([], Ok, Error) ->
     {ok, Ok}.
 
 normalize_parse_item(#{<<"as">> := As} = Item) ->
-    ParseTime = fun(Name, Input) ->
-        maybe
-            #{Name := Time} ?= Input,
-            {ok, Epoch} ?= emqx_utils_calendar:to_epoch_second(emqx_utils_conv:str(Time)),
-            {ok, Input#{Name := Epoch}}
-        else
-            {error, _} = Error ->
-                Error;
-            NoTime when is_map(NoTime) ->
-                {ok, NoTime}
+    ToSecond = fun(Name, Time, Input) ->
+        case emqx_utils_calendar:to_epoch_second(emqx_utils_conv:str(Time)) of
+            {ok, Epoch} ->
+                {ok, Input#{Name := Epoch}};
+            Error ->
+                Error
         end
     end,
 
+    ParseTime = fun
+        (<<"at">>, #{<<"at">> := Time} = Input) ->
+            ToSecond(<<"at">>, Time, Input);
+        (<<"until">>, #{<<"until">> := <<"infinity">>} = Input) ->
+            {ok, Input#{<<"until">> := infinity}};
+        (<<"until">>, #{<<"until">> := Time} = Input) ->
+            ToSecond(<<"until">>, Time, Input);
+        (_, Input) ->
+            {ok, Input}
+    end,
+
     maybe
         {ok, Type} ?= emqx_utils:safe_to_existing_atom(As),
         {ok, Item1} ?= ParseTime(<<"at">>, Item#{<<"as">> := Type}),
@@ -468,6 +472,8 @@ format_who({AsRE, {_RE, REOriginal}}) when AsRE =:= clientid_re orelse AsRE =:=
 format_who({As, Who}) when As =:= clientid orelse As =:= username ->
     {As, Who}.
 
+to_rfc3339(infinity) ->
+    infinity;
 to_rfc3339(Timestamp) ->
     emqx_utils_calendar:epoch_to_rfc3339(Timestamp, second).
 

+ 1 - 2
apps/emqx/src/emqx_broker.erl

@@ -25,7 +25,7 @@
 -include("types.hrl").
 -include("emqx_mqtt.hrl").
 
--export([start_link/2]).
+-export([start_link/2, create_tabs/0]).
 
 %% PubSub
 -export([
@@ -104,7 +104,6 @@
 
 -spec start_link(atom(), pos_integer()) -> startlink_ret().
 start_link(Pool, Id) ->
-    ok = create_tabs(),
     gen_server:start_link(
         {local, emqx_utils:proc_name(?BROKER, Id)},
         ?MODULE,

+ 2 - 1
apps/emqx/src/emqx_broker_sup.erl

@@ -32,8 +32,9 @@ start_link() ->
 
 init([]) ->
     %% Broker pool
+    ok = emqx_broker:create_tabs(),
     PoolSize = emqx:get_config([node, broker_pool_size], emqx_vm:schedulers() * 2),
-    BrokerPool = emqx_pool_sup:spec(broker_pool_sup, [
+    BrokerPool = emqx_pool_sup:spec(broker_pool_sup, permanent, [
         broker_pool,
         hash,
         PoolSize,

+ 3 - 3
apps/emqx/src/emqx_config_handler.erl

@@ -761,7 +761,7 @@ merge_to_override_config(RawConf, Opts) ->
 upgrade_conf(Conf) ->
     ConfigLoader = emqx_app:get_config_loader(),
     %% ensure module loaded
-    _ = ConfigLoader:module_info(),
+    ok = emqx_utils:interactive_load(ConfigLoader),
     case erlang:function_exported(ConfigLoader, schema_module, 0) of
         true ->
             try_upgrade_conf(apply(ConfigLoader, schema_module, []), Conf);
@@ -849,7 +849,7 @@ remove_empty_leaf(KeyPath, Handlers) ->
     end.
 
 assert_callback_function(Mod) ->
-    _ = apply(Mod, module_info, []),
+    emqx_utils:interactive_load(Mod),
     case
         erlang:function_exported(Mod, pre_config_update, 3) orelse
             erlang:function_exported(Mod, post_config_update, 5) orelse
@@ -902,7 +902,7 @@ get_function_arity(_Module, _Callback, []) ->
     false;
 get_function_arity(Module, Callback, [Arity | Opts]) ->
     %% ensure module is loaded
-    Module = Module:module_info(module),
+    ok = emqx_utils:interactive_load(Module),
     case erlang:function_exported(Module, Callback, Arity) of
         true -> Arity;
         false -> get_function_arity(Module, Callback, Opts)

+ 8 - 36
apps/emqx/src/emqx_frame.erl

@@ -1258,43 +1258,15 @@ validate_utf8(Bin) ->
     end.
 
 %% Is the utf8 string respecting UTF-8 characters defined by MQTT Spec?
-%% i.e. contains invalid UTF-8 char or control char
+%% i.e. does the string contains control characters?
+%% Note: this is under the assumption that the string is already validated by `unicode:characters_to_binary/1`
+%% hence there is no need to validate utf8 byte sequence integrity
 validate_mqtt_utf8_char(<<>>) ->
     true;
-%% ==== 1-Byte UTF-8 invalid: [[U+0000 .. U+001F] && [U+007F]]
-validate_mqtt_utf8_char(<<B1, Bs/binary>>) when
-    B1 >= 16#20, B1 =< 16#7E
+validate_mqtt_utf8_char(<<H/utf8, _Rest/binary>>) when
+    H >= 16#00, H =< 16#1F;
+    H >= 16#7F, H =< 16#9F
 ->
-    validate_mqtt_utf8_char(Bs);
-validate_mqtt_utf8_char(<<B1, _Bs/binary>>) when
-    B1 >= 16#00, B1 =< 16#1F;
-    B1 =:= 16#7F
-->
-    %% [U+0000 .. U+001F] && [U+007F]
-    false;
-%% ==== 2-Bytes UTF-8 invalid: [U+0080 .. U+009F]
-validate_mqtt_utf8_char(<<B1, B2, Bs/binary>>) when
-    B1 =:= 16#C2;
-    B2 >= 16#A0, B2 =< 16#BF;
-    B1 > 16#C3, B1 =< 16#DE;
-    B2 >= 16#80, B2 =< 16#BF
-->
-    validate_mqtt_utf8_char(Bs);
-validate_mqtt_utf8_char(<<16#C2, B2, _Bs/binary>>) when
-    B2 >= 16#80, B2 =< 16#9F
-->
-    %% [U+0080 .. U+009F]
     false;
-%% ==== 3-Bytes UTF-8 invalid: [U+D800 .. U+DFFF]
-validate_mqtt_utf8_char(<<B1, _B2, _B3, Bs/binary>>) when
-    B1 >= 16#E0, B1 =< 16#EE;
-    B1 =:= 16#EF
-->
-    validate_mqtt_utf8_char(Bs);
-validate_mqtt_utf8_char(<<16#ED, _B2, _B3, _Bs/binary>>) ->
-    false;
-%% ==== 4-Bytes UTF-8
-validate_mqtt_utf8_char(<<B1, _B2, _B3, _B4, Bs/binary>>) when
-    B1 =:= 16#0F
-->
-    validate_mqtt_utf8_char(Bs).
+validate_mqtt_utf8_char(<<_H/utf8, Rest/binary>>) ->
+    validate_mqtt_utf8_char(Rest).

+ 6 - 2
apps/emqx/src/emqx_pool_sup.erl

@@ -20,7 +20,7 @@
 
 -include("types.hrl").
 
--export([spec/1, spec/2]).
+-export([spec/1, spec/2, spec/3]).
 
 -export([
     start_link/0,
@@ -39,10 +39,14 @@ spec(Args) ->
 
 -spec spec(any(), list()) -> supervisor:child_spec().
 spec(ChildId, Args) ->
+    spec(ChildId, transient, Args).
+
+-spec spec(any(), transient | permanent | temporary, list()) -> supervisor:child_spec().
+spec(ChildId, Restart, Args) ->
     #{
         id => ChildId,
         start => {?MODULE, start_link, Args},
-        restart => transient,
+        restart => Restart,
         shutdown => infinity,
         type => supervisor,
         modules => [?MODULE]

+ 18 - 10
apps/emqx/src/emqx_schema.erl

@@ -2255,16 +2255,6 @@ common_ssl_opts_schema(Defaults, Type) ->
             )},
         {"versions", tls_versions_schema(Collection)},
         {"ciphers", ciphers_schema(D(ciphers))},
-        {"user_lookup_fun",
-            sc(
-                typerefl:alias("string", any()),
-                #{
-                    default => <<"emqx_tls_psk:lookup">>,
-                    converter => fun ?MODULE:user_lookup_fun_tr/2,
-                    importance => ?IMPORTANCE_HIDDEN,
-                    desc => ?DESC(common_ssl_opts_schema_user_lookup_fun)
-                }
-            )},
         {"secure_renegotiate",
             sc(
                 boolean(),
@@ -2342,6 +2332,16 @@ server_ssl_opts_schema(Defaults, IsRanchListener) ->
                         default => Df(handshake_timeout, <<"15s">>),
                         desc => ?DESC(server_ssl_opts_schema_handshake_timeout)
                     }
+                )},
+            {"user_lookup_fun",
+                sc(
+                    typerefl:alias("string", any()),
+                    #{
+                        default => <<"emqx_tls_psk:lookup">>,
+                        converter => fun ?MODULE:user_lookup_fun_tr/2,
+                        importance => ?IMPORTANCE_HIDDEN,
+                        desc => ?DESC(common_ssl_opts_schema_user_lookup_fun)
+                    }
                 )}
         ] ++
         [
@@ -2459,6 +2459,14 @@ client_ssl_opts_schema(Defaults) ->
                         validator => fun emqx_schema:non_empty_string/1,
                         desc => ?DESC(client_ssl_opts_schema_server_name_indication)
                     }
+                )},
+            {"user_lookup_fun",
+                sc(
+                    string(),
+                    #{
+                        deprecated => {since, "5.8.1"},
+                        importance => ?IMPORTANCE_HIDDEN
+                    }
                 )}
         ].
 

+ 51 - 1
apps/emqx/src/emqx_session.erl

@@ -184,16 +184,66 @@
 
 -callback create(clientinfo(), conninfo(), emqx_maybe:t(message()), conf()) ->
     t().
+
 -callback open(clientinfo(), conninfo(), emqx_maybe:t(message()), conf()) ->
     {_IsPresent :: true, t(), _ReplayContext} | false.
+
 -callback destroy(t() | clientinfo()) -> ok.
+
 -callback clear_will_message(t()) -> t().
+
 -callback publish_will_message_now(t(), message()) -> t().
+
 -callback handle_timeout(clientinfo(), common_timer_name() | custom_timer_name(), t()) ->
     {ok, replies(), t()}
     | {ok, replies(), timeout(), t()}.
+
 -callback handle_info(term(), t(), clientinfo()) -> t().
 
+-callback get_subscription(emqx_types:topic(), t()) ->
+    emqx_types:subopts() | undefined.
+
+-callback subscribe(emqx_types:topic(), emqx_types:subopts(), t()) ->
+    {ok, t()} | {error, emqx_types:reason_code()}.
+
+-callback unsubscribe(emqx_types:topic(), t()) ->
+    {ok, t(), emqx_types:subopts()}
+    | {error, emqx_types:reason_code()}.
+
+-callback publish(emqx_types:packet_id(), emqx_types:message(), t()) ->
+    {ok, emqx_types:publish_result(), t()}
+    | {error, emqx_types:reason_code()}.
+
+-callback puback(clientinfo(), emqx_types:packet_id(), t()) ->
+    {ok, emqx_types:message(), replies(), t()}
+    | {error, emqx_types:reason_code()}.
+
+-callback pubrec(emqx_types:packet_id(), t()) ->
+    {ok, emqx_types:message(), t()}
+    | {error, emqx_types:reason_code()}.
+
+-callback pubrel(emqx_types:packet_id(), t()) ->
+    {ok, t()}
+    | {error, emqx_types:reason_code()}.
+
+-callback pubcomp(clientinfo(), emqx_types:packet_id(), t()) ->
+    {ok, replies(), t()}
+    | {error, emqx_types:reason_code()}.
+
+-callback replay(clientinfo(), [emqx_types:message()], t()) ->
+    {ok, replies(), t()}.
+
+-callback deliver(clientinfo(), [emqx_types:deliver()], t()) ->
+    {ok, replies(), t()}.
+
+-callback info(atom(), t()) -> term().
+
+-callback stats(t()) -> emqx_types:stats().
+
+-callback disconnect(t(), conninfo()) -> {idle | shutdown, t()}.
+
+-callback terminate(Reason :: term(), t()) -> ok.
+
 %%--------------------------------------------------------------------
 %% Create a Session
 %%--------------------------------------------------------------------
@@ -397,7 +447,7 @@ pubcomp(ClientInfo, PacketId, Session) ->
 
 %%--------------------------------------------------------------------
 
--spec replay(clientinfo(), _ReplayContext, t()) ->
+-spec replay(clientinfo(), [emqx_types:message()], t()) ->
     {ok, replies(), t()}.
 replay(ClientInfo, ReplayContext, Session) ->
     ?IMPL(Session):replay(ClientInfo, ReplayContext, Session).

+ 13 - 0
apps/emqx/test/emqx_banned_SUITE.erl

@@ -293,6 +293,19 @@ t_error_bootstrap_file(_) ->
     ?assertMatch(Keys, [element(2, Data) || Data <- get_banned_list()]),
     ok.
 
+t_until_expiry(_) ->
+    Who = #{<<"as">> => clientid, <<"who">> => <<"t_until_expiry">>},
+
+    {ok, Banned} = emqx_banned:parse(Who),
+    {ok, _} = emqx_banned:create(Banned),
+
+    [Data] = emqx_banned:look_up(Who),
+    ?assertEqual(Banned, Data),
+    ?assertMatch(#{until := infinity}, emqx_banned:format(Data)),
+
+    emqx_banned:clear(),
+    ok.
+
 receive_messages(Count) ->
     receive_messages(Count, []).
 receive_messages(0, Msgs) ->

+ 2 - 2
apps/emqx_auth/etc/acl.conf

@@ -113,9 +113,9 @@
 %%   topics prefixed by their own client ID.
 %%
 %%   Supported placeholders are:
-%%   - `${cn}`: TLS certificate common name.
-%%   - `${clientid}`: The client ID.
 %%   - `${username}`: The username.
+%%   - `${clientid}`: The client ID.
+%%   - `${cert_common_name}`: TLS certificate common name.
 %%   - `${client_attrs.NAME}`: A client attribute named `NAME`, which can be initialized by
 %%     `mqtt.client_attrs_init` config or extended by certain authentication backends.
 %%   NOTE: Placeholder is not rendered as empty string if the referencing value is not

+ 1 - 0
apps/emqx_auth/include/emqx_authn.hrl

@@ -18,6 +18,7 @@
 -define(EMQX_AUTHN_HRL, true).
 
 -include("emqx_authn_chains.hrl").
+-include_lib("emqx/include/emqx_placeholder.hrl").
 
 -define(AUTHN, emqx_authn_chains).
 

+ 5 - 1
apps/emqx_auth/src/emqx_auth_utils.erl

@@ -29,7 +29,8 @@
     render_deep_for_raw/2,
     render_str/2,
     render_urlencoded_str/2,
-    render_sql_params/2
+    render_sql_params/2,
+    render_strict/2
 ]).
 
 %% URL parsing
@@ -216,6 +217,9 @@ render_var(?VAR_PASSWORD, Value) ->
 render_var(_Name, Value) ->
     Value.
 
+render_strict(Topic, ClientInfo) ->
+    emqx_template:render_strict(Topic, rename_client_info_vars(ClientInfo)).
+
 rename_client_info_vars(ClientInfo) ->
     Renames = [
         {cn, cert_common_name},

+ 5 - 8
apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl

@@ -137,19 +137,16 @@ select_union_member(_Kind, Value, _Mods) ->
     throw(#{reason => "not_a_struct", value => Value}).
 
 mod_select_union_member(Kind, Value, Mod) ->
-    emqx_utils:call_first_defined([
-        {Mod, select_union_member, [Kind, Value]},
-        {Mod, select_union_member, [Value]}
-    ]).
+    Args1 = [Kind, Value],
+    Args2 = [Value],
+    ArgsL = [Args1, Args2],
+    emqx_utils:call_first_defined(Mod, select_union_member, ArgsL).
 
 config_refs(Kind, Mods) ->
     lists:append([mod_refs(Kind, Mod) || Mod <- Mods]).
 
 mod_refs(Kind, Mod) ->
-    emqx_utils:call_first_defined([
-        {Mod, refs, [Kind]},
-        {Mod, refs, []}
-    ]).
+    emqx_utils:call_first_defined(Mod, refs, [[Kind], []]).
 
 root_type() ->
     hoconsc:array(authenticator_type()).

+ 1 - 0
apps/emqx_auth/src/emqx_authz/emqx_authz.erl

@@ -713,6 +713,7 @@ format_for_api(Source) ->
 
 maybe_write_source_files(Source) ->
     Module = authz_module(type(Source)),
+    ok = emqx_utils:interactive_load(Module),
     case erlang:function_exported(Module, write_files, 1) of
         true ->
             Module:write_files(Source);

+ 7 - 2
apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl

@@ -122,7 +122,12 @@
 ]).
 
 -define(IS_PERMISSION(Permission), (Permission =:= allow orelse Permission =:= deny)).
--define(ALLOWED_VARS, [?VAR_USERNAME, ?VAR_CLIENTID, ?VAR_NS_CLIENT_ATTRS]).
+-define(ALLOWED_VARS, [
+    ?VAR_USERNAME,
+    ?VAR_CLIENTID,
+    ?VAR_CERT_CN_NAME,
+    ?VAR_NS_CLIENT_ATTRS
+]).
 
 -spec compile(permission_resolution_precompile(), who_precompile(), action_precompile(), [
     topic_precompile()
@@ -402,7 +407,7 @@ match_topic(Topic, TopicFilter) ->
 
 render_topic(Topic, ClientInfo) ->
     try
-        bin(emqx_template:render_strict(Topic, ClientInfo))
+        bin(emqx_auth_utils:render_strict(Topic, ClientInfo))
     catch
         error:Reason ->
             ?SLOG(debug, #{

+ 23 - 0
apps/emqx_auth/test/emqx_authz/emqx_authz_file_SUITE.erl

@@ -98,6 +98,29 @@ t_client_attrs(_Config) ->
     ),
     ok.
 
+t_cert_common_name(_Config) ->
+    ClientInfo0 = emqx_authz_test_lib:base_client_info(),
+    ClientInfo = ClientInfo0#{cn => <<"mycn">>},
+    ok = setup_config(?RAW_SOURCE#{
+        <<"rules">> => <<"{allow, all, all, [\"t/${cert_common_name}/#\"]}.">>
+    }),
+
+    ?assertEqual(
+        allow,
+        emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t/mycn/1">>)
+    ),
+
+    ?assertEqual(
+        allow,
+        emqx_access_control:authorize(ClientInfo, ?AUTHZ_SUBSCRIBE, <<"t/mycn/#">>)
+    ),
+
+    ?assertEqual(
+        deny,
+        emqx_access_control:authorize(ClientInfo, ?AUTHZ_SUBSCRIBE, <<"t/othercn/1">>)
+    ),
+    ok.
+
 t_rich_actions(_Config) ->
     ClientInfo = emqx_authz_test_lib:base_client_info(),
 

+ 94 - 0
apps/emqx_auth_cinfo/BSL.txt

@@ -0,0 +1,94 @@
+Business Source License 1.1
+
+Licensor:             Hangzhou EMQ Technologies Co., Ltd.
+Licensed Work:        EMQX Enterprise Edition
+                      The Licensed Work is (c) 2023
+                      Hangzhou EMQ Technologies Co., Ltd.
+Additional Use Grant: Students and educators are granted right to copy,
+                      modify, and create derivative work for research
+                      or education.
+Change Date:          2028-01-26
+Change License:       Apache License, Version 2.0
+
+For information about alternative licensing arrangements for the Software,
+please contact Licensor: https://www.emqx.com/en/contact
+
+Notice
+
+The Business Source License (this document, or the “License”) is not an Open
+Source license. However, the Licensed Work will eventually be made available
+under an Open Source License, as stated in this License.
+
+License text copyright (c) 2017 MariaDB Corporation Ab, All Rights Reserved.
+“Business Source License” is a trademark of MariaDB Corporation Ab.
+
+-----------------------------------------------------------------------------
+
+Business Source License 1.1
+
+Terms
+
+The Licensor hereby grants you the right to copy, modify, create derivative
+works, redistribute, and make non-production use of the Licensed Work. The
+Licensor may make an Additional Use Grant, above, permitting limited
+production use.
+
+Effective on the Change Date, or the fourth anniversary of the first publicly
+available distribution of a specific version of the Licensed Work under this
+License, whichever comes first, the Licensor hereby grants you rights under
+the terms of the Change License, and the rights granted in the paragraph
+above terminate.
+
+If your use of the Licensed Work does not comply with the requirements
+currently in effect as described in this License, you must purchase a
+commercial license from the Licensor, its affiliated entities, or authorized
+resellers, or you must refrain from using the Licensed Work.
+
+All copies of the original and modified Licensed Work, and derivative works
+of the Licensed Work, are subject to this License. This License applies
+separately for each version of the Licensed Work and the Change Date may vary
+for each version of the Licensed Work released by Licensor.
+
+You must conspicuously display this License on each original or modified copy
+of the Licensed Work. If you receive the Licensed Work in original or
+modified form from a third party, the terms and conditions set forth in this
+License apply to your use of that work.
+
+Any use of the Licensed Work in violation of this License will automatically
+terminate your rights under this License for the current and all other
+versions of the Licensed Work.
+
+This License does not grant you any right in any trademark or logo of
+Licensor or its affiliates (provided that you may use a trademark or logo of
+Licensor as expressly required by this License).
+
+TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON
+AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS,
+EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND
+TITLE.
+
+MariaDB hereby grants you permission to use this License’s text to license
+your works, and to refer to it using the trademark “Business Source License”,
+as long as you comply with the Covenants of Licensor below.
+
+Covenants of Licensor
+
+In consideration of the right to use this License’s text and the “Business
+Source License” name and trademark, Licensor covenants to MariaDB, and to all
+other recipients of the licensed work to be provided by Licensor:
+
+1. To specify as the Change License the GPL Version 2.0 or any later version,
+   or a license that is compatible with GPL Version 2.0 or a later version,
+   where “compatible” means that software provided under the Change License can
+   be included in a program with software provided under GPL Version 2.0 or a
+   later version. Licensor may specify additional Change Licenses without
+   limitation.
+
+2. To either: (a) specify an additional grant of rights to use that does not
+   impose any additional restriction on the right granted in this License, as
+   the Additional Use Grant; or (b) insert the text “None”.
+
+3. To specify a Change Date.
+
+4. Not to modify this License in any other way.

+ 49 - 0
apps/emqx_auth_cinfo/README.md

@@ -0,0 +1,49 @@
+# Authenticate clients with connection information
+
+This application implements an extended authentication for EMQX Enterprise edition.
+
+Client-info (of type `cinfo`) authentication is a lightweight authentication mechanism which checks client properties and attributes against user defined rules.
+The rules make use of the Variform expression to define match conditions, and the authentication result when match is found.
+For example, to quickly fencing off clients without a username, the match condition can be `str_eq(username, '')` associated with a attributes result `deny`.
+
+The new authenticator config look is like below.
+
+```
+authentication = [
+  {
+    mechanism = cinfo
+    checks = [
+      # allow clients with username starts with 'super-'
+      {
+        is_match = "regex_match(username, '^super-.+$')"
+        result = allow
+      },
+      # deny clients with empty username and client ID starts with 'v1-'
+      {
+        # when is_match is an array, it yields 'true' if all individual checks yield 'true'
+        is_match = ["str_eq(username, '')", "str_eq(nth(1,tokens(clientid,'-')), 'v1')"]
+        result = deny
+      }
+      # if all checks are exhausted without an 'allow' or a 'deny' result, continue to the next authentication
+    ]
+  },
+  # ... more authentications ...
+  # ...
+  # if all authenticators are exhausted without an 'allow' or a 'deny' result, the client is not rejected
+]
+```
+
+More match expression examples:
+
+- TLS certificate common name is the same as username: `str_eq(cert_common_name, username)`
+- Password is the `sha1` hash of environment variable `EMQXVAR_SECRET` concatenated to client ID: `str_eq(password, hash(sha1, concat([clientid, getenv('SECRET')])))`
+- Client attributes `client_attrs.group` is not 'g0': `str_neq(client_attrs.group, 'g0')`
+- Client ID starts with zone name: `regex_match(clientid, concat(['^', zone, '.+$']))`
+
+# Contributing
+
+Please see our [contributing.md](../../CONTRIBUTING.md).
+
+# License
+
+EMQ Business Source License 1.1, refer to [LICENSE](BSL.txt).

+ 12 - 0
apps/emqx_auth_cinfo/include/emqx_auth_cinfo.hrl

@@ -0,0 +1,12 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+
+-ifndef(EMQX_AUTH_CINFO_HRL).
+-define(EMQX_AUTH_CINFO_HRL, true).
+
+-define(AUTHN_MECHANISM, cinfo).
+-define(AUTHN_MECHANISM_BIN, <<"cinfo">>).
+-define(AUTHN_TYPE, ?AUTHN_MECHANISM).
+
+-endif.

+ 33 - 0
apps/emqx_auth_cinfo/mix.exs

@@ -0,0 +1,33 @@
+defmodule EMQXAuthCinfo.MixProject do
+  use Mix.Project
+  alias EMQXUmbrella.MixProject, as: UMP
+
+  def project do
+    [
+      app: :emqx_auth_cinfo,
+      version: "0.1.0",
+      build_path: "../../_build",
+      # config_path: "../../config/config.exs",
+      erlc_options: UMP.erlc_options(),
+      erlc_paths: UMP.erlc_paths(),
+      deps_path: "../../deps",
+      lockfile: "../../mix.lock",
+      elixir: "~> 1.14",
+      start_permanent: Mix.env() == :prod,
+      deps: deps()
+    ]
+  end
+
+  # Run "mix help compile.app" to learn about applications
+  def application do
+    [extra_applications: UMP.extra_applications(), mod: {:emqx_auth_cinfo_app, []}]
+  end
+
+  def deps() do
+    [
+      {:emqx, in_umbrella: true},
+      {:emqx_auth, in_umbrella: true},
+      UMP.common_dep(:jose),
+    ]
+  end
+end

+ 7 - 0
apps/emqx_auth_cinfo/rebar.config

@@ -0,0 +1,7 @@
+%% -*- mode: erlang -*-
+
+{deps, [
+    {emqx, {path, "../emqx"}},
+    {emqx_utils, {path, "../emqx_utils"}},
+    {emqx_auth, {path, "../emqx_auth"}}
+]}.

+ 16 - 0
apps/emqx_auth_cinfo/src/emqx_auth_cinfo.app.src

@@ -0,0 +1,16 @@
+%% -*- mode: erlang -*-
+{application, emqx_auth_cinfo, [
+    {description, "EMQX Client Information Authorization"},
+    {vsn, "0.1.0"},
+    {registered, []},
+    {mod, {emqx_auth_cinfo_app, []}},
+    {applications, [
+        kernel,
+        stdlib,
+        emqx,
+        emqx_auth
+    ]},
+    {env, []},
+    {modules, []},
+    {links, []}
+]}.

+ 20 - 0
apps/emqx_auth_cinfo/src/emqx_auth_cinfo_app.erl

@@ -0,0 +1,20 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+
+-module(emqx_auth_cinfo_app).
+
+-include("emqx_auth_cinfo.hrl").
+
+-behaviour(application).
+
+-export([start/2, stop/1]).
+
+start(_StartType, _StartArgs) ->
+    {ok, Sup} = emqx_auth_cinfo_sup:start_link(),
+    ok = emqx_authn:register_provider(?AUTHN_TYPE, emqx_authn_cinfo),
+    {ok, Sup}.
+
+stop(_State) ->
+    ok = emqx_authn:deregister_provider(?AUTHN_TYPE),
+    ok.

+ 25 - 0
apps/emqx_auth_cinfo/src/emqx_auth_cinfo_sup.erl

@@ -0,0 +1,25 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+
+-module(emqx_auth_cinfo_sup).
+
+-behaviour(supervisor).
+
+-export([start_link/0]).
+
+-export([init/1]).
+
+-define(SERVER, ?MODULE).
+
+start_link() ->
+    supervisor:start_link({local, ?SERVER}, ?MODULE, []).
+
+init([]) ->
+    SupFlags = #{
+        strategy => one_for_all,
+        intensity => 0,
+        period => 1
+    },
+    ChildSpecs = [],
+    {ok, {SupFlags, ChildSpecs}}.

+ 137 - 0
apps/emqx_auth_cinfo/src/emqx_authn_cinfo.erl

@@ -0,0 +1,137 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+
+-module(emqx_authn_cinfo).
+
+-include_lib("emqx_auth/include/emqx_authn.hrl").
+-include_lib("emqx/include/logger.hrl").
+-include_lib("emqx/include/emqx_placeholder.hrl").
+-include_lib("jose/include/jose_jwk.hrl").
+
+-export([
+    create/2,
+    update/2,
+    authenticate/2,
+    destroy/1
+]).
+
+create(AuthenticatorID, #{checks := Checks}) ->
+    case compile(AuthenticatorID, Checks) of
+        {ok, Compiled} ->
+            {ok, #{
+                id => AuthenticatorID,
+                checks => Compiled
+            }};
+        {error, Reason} ->
+            {error, Reason}
+    end.
+
+compile(ID, Checks) ->
+    try
+        {ok, compile_checks(Checks, [])}
+    catch
+        throw:Error ->
+            {error, Error#{authenticator => ID}}
+    end.
+
+compile_checks([], Acc) ->
+    lists:reverse(Acc);
+compile_checks([Check | Checks], Acc) ->
+    compile_checks(Checks, [compile_check(Check) | Acc]).
+
+compile_check(#{is_match := Expressions} = C) ->
+    Compiled = compile_exprs(Expressions),
+    %% is_match being non-empty is ensured by schema module
+    Compiled =:= [] andalso error(empty),
+    C#{is_match => Compiled}.
+
+compile_exprs(Expression) when is_binary(Expression) ->
+    [compile_expr(Expression)];
+compile_exprs(Expressions) when is_list(Expressions) ->
+    lists:map(fun compile_expr/1, Expressions).
+
+compile_expr(Expression) ->
+    %% Expression not empty string is ensured by schema
+    true = (<<"">> =/= Expression),
+    %% emqx_variform:compile(Expression) return 'ok' tuple is ensured by schema
+    {ok, Compiled} = emqx_variform:compile(Expression),
+    Compiled.
+
+update(#{enable := false}, State) ->
+    {ok, State};
+update(Config, #{id := ID}) ->
+    create(ID, Config).
+
+authenticate(#{auth_method := _}, _) ->
+    %% enhanced authentication is not supported by this provider
+    ignore;
+authenticate(Credential0, #{checks := Checks}) ->
+    Credential = add_credential_aliases(Credential0),
+    check(Checks, Credential).
+
+check([], _) ->
+    ignore;
+check([Check | Rest], Credential) ->
+    case do_check(Check, Credential) of
+        nomatch ->
+            check(Rest, Credential);
+        {match, ignore} ->
+            ignore;
+        {match, allow} ->
+            {ok, #{}};
+        {match, deny} ->
+            {error, bad_username_or_password}
+    end.
+
+do_check(#{is_match := CompiledExprs, result := Result}, Credential) ->
+    case is_match(CompiledExprs, Credential) of
+        true ->
+            {match, Result};
+        false ->
+            nomatch
+    end.
+
+is_match([], _Credential) ->
+    true;
+is_match([CompiledExpr | CompiledExprs], Credential) ->
+    case emqx_variform:render(CompiledExpr, Credential) of
+        {ok, <<"true">>} ->
+            is_match(CompiledExprs, Credential);
+        {ok, <<"false">>} ->
+            false;
+        {ok, Other} ->
+            ?SLOG(debug, "clientinfo_auth_expression_yield_non_boolean", #{
+                expr => emqx_variform:decompile(CompiledExpr),
+                yield => Other
+            }),
+            false;
+        {error, Reason} ->
+            {error, #{
+                cause => "clientinfo_auth_expression_evaluation_error",
+                error => Reason
+            }}
+    end.
+
+destroy(_) ->
+    ok.
+
+%% Add aliases for credential fields
+%% - cert_common_name for cn
+%% - cert_subject for dn
+add_credential_aliases(Credential) ->
+    Aliases = [
+        {cn, cert_common_name},
+        {dn, cert_subject}
+    ],
+    add_credential_aliases(Credential, Aliases).
+
+add_credential_aliases(Credential, []) ->
+    Credential;
+add_credential_aliases(Credential, [{Field, Alias} | Rest]) ->
+    case maps:find(Field, Credential) of
+        {ok, Value} ->
+            add_credential_aliases(Credential#{Alias => Value}, Rest);
+        error ->
+            add_credential_aliases(Credential, Rest)
+    end.

+ 93 - 0
apps/emqx_auth_cinfo/src/emqx_authn_cinfo_schema.erl

@@ -0,0 +1,93 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+
+-module(emqx_authn_cinfo_schema).
+
+-behaviour(emqx_authn_schema).
+
+-export([
+    namespace/0,
+    fields/1,
+    desc/1,
+    refs/0,
+    select_union_member/1
+]).
+
+-include("emqx_auth_cinfo.hrl").
+-include_lib("hocon/include/hoconsc.hrl").
+-include_lib("typerefl/include/types.hrl").
+
+namespace() -> "authn".
+
+refs() ->
+    [
+        ?R_REF("cinfo")
+    ].
+
+select_union_member(#{<<"mechanism">> := ?AUTHN_MECHANISM_BIN}) ->
+    [?R_REF("cinfo")];
+select_union_member(_Value) ->
+    undefined.
+
+fields("cinfo") ->
+    [
+        {mechanism, emqx_authn_schema:mechanism(?AUTHN_MECHANISM)},
+        {checks,
+            hoconsc:mk(
+                hoconsc:array(?R_REF("cinfo_check")),
+                #{
+                    required => true,
+                    desc => ?DESC(checks),
+                    validator => fun validate_checks/1,
+                    importance => ?IMPORTANCE_HIGH
+                }
+            )}
+    ] ++ emqx_authn_schema:common_fields();
+fields("cinfo_check") ->
+    [
+        {is_match,
+            hoconsc:mk(
+                hoconsc:union([binary(), hoconsc:array(binary())]),
+                #{
+                    required => true,
+                    desc => ?DESC(is_match),
+                    importance => ?IMPORTANCE_HIGH,
+                    validator => fun validate_expressions/1
+                }
+            )},
+        {result,
+            hoconsc:mk(
+                hoconsc:enum([allow, deny, ignore]),
+                #{
+                    required => true,
+                    desc => ?DESC(result),
+                    importance => ?IMPORTANCE_HIGH
+                }
+            )}
+    ].
+
+desc("cinfo") ->
+    ?DESC("cinfo");
+desc("cinfo_check") ->
+    ?DESC("check").
+
+validate_checks([]) ->
+    throw("require_at_least_one_check");
+validate_checks(List) when is_list(List) ->
+    ok.
+
+validate_expressions(Expr) when is_binary(Expr) ->
+    validate_expression(Expr);
+validate_expressions(Exprs) when is_list(Exprs) ->
+    lists:foreach(fun validate_expression/1, Exprs).
+
+validate_expression(<<>>) ->
+    throw("should not be empty string");
+validate_expression(Expr) ->
+    case emqx_variform:compile(Expr) of
+        {ok, _} ->
+            ok;
+        {error, Reason} ->
+            throw(Reason)
+    end.

+ 170 - 0
apps/emqx_auth_cinfo/test/emqx_authn_cinfo_SUITE.erl

@@ -0,0 +1,170 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2020-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+
+-module(emqx_authn_cinfo_SUITE).
+
+-compile(export_all).
+-compile(nowarn_export_all).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-define(AUTHN_ID, <<"mechanism:cinfo">>).
+
+all() ->
+    emqx_common_test_helpers:all(?MODULE).
+
+init_per_suite(Config) ->
+    Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_auth, emqx_auth_cinfo], #{
+        work_dir => emqx_cth_suite:work_dir(Config)
+    }),
+    %% ensure module loaded
+    _ = emqx_variform_bif:module_info(),
+    [{apps, Apps} | Config].
+
+end_per_suite(Config) ->
+    ok = emqx_cth_suite:stop(?config(apps, Config)),
+    ok.
+
+end_per_testcase(_TestCase, _Config) ->
+    emqx_common_test_helpers:call_janitor(),
+    ok.
+
+%%------------------------------------------------------------------------------
+%% Tests
+%%------------------------------------------------------------------------------
+
+t_ignore_enhanced_auth(_) ->
+    ?assertEqual(ignore, emqx_authn_cinfo:authenticate(#{auth_method => <<"enhanced">>}, state)).
+
+t_username_equal_clientid(_) ->
+    Checks =
+        [
+            #{
+                is_match => <<"str_eq(username, '')">>,
+                result => deny
+            },
+            #{
+                is_match => <<"str_eq(username, clientid)">>,
+                result => allow
+            }
+        ],
+    with_checks(
+        Checks,
+        fun(State) ->
+            ?assertMatch(
+                {error, bad_username_or_password},
+                emqx_authn_cinfo:authenticate(#{username => <<>>}, State)
+            ),
+            ?assertMatch(
+                {ok, #{}},
+                emqx_authn_cinfo:authenticate(#{username => <<"a">>, clientid => <<"a">>}, State)
+            ),
+            ?assertMatch(
+                ignore,
+                emqx_authn_cinfo:authenticate(#{username => <<"a">>, clientid => <<"b">>}, State)
+            )
+        end
+    ).
+
+t_ignore_if_is_match_yield_false(_) ->
+    Checks =
+        [
+            #{
+                is_match => <<"str_eq(username, 'a')">>,
+                result => deny
+            }
+        ],
+    with_checks(
+        Checks,
+        fun(State) ->
+            ?assertEqual(ignore, emqx_authn_cinfo:authenticate(#{username => <<"b">>}, State))
+        end
+    ).
+
+t_ignore_if_is_match_yield_non_boolean(_) ->
+    Checks = [
+        #{
+            %% return 'no-identity' if both username and clientid are missing
+            %% this should lead to a 'false' result for 'is_match'
+            is_match => <<"coalesce(username,clientid,'no-identity')">>,
+            result => deny
+        }
+    ],
+    with_checks(
+        Checks,
+        fun(State) ->
+            ?assertEqual(ignore, emqx_authn_cinfo:authenticate(#{username => <<"b">>}, State))
+        end
+    ).
+
+t_multiple_is_match_expressions(_) ->
+    Checks = [
+        #{
+            %% use AND to connect multiple is_match expressions
+            %% this one means username is not empty, and clientid is 'super'
+            is_match => [
+                <<"str_neq('', username)">>, <<"str_eq(clientid, 'super')">>
+            ],
+            result => allow
+        }
+    ],
+    with_checks(
+        Checks,
+        fun(State) ->
+            ?assertEqual(
+                ignore,
+                emqx_authn_cinfo:authenticate(#{username => <<"">>, clientid => <<"super">>}, State)
+            ),
+            ?assertMatch(
+                {ok, #{}},
+                emqx_authn_cinfo:authenticate(
+                    #{username => <<"a">>, clientid => <<"super">>}, State
+                )
+            )
+        end
+    ).
+
+t_cert_fields_as_alias(_) ->
+    Checks = [
+        #{
+            is_match => [
+                <<"str_eq(clientid, coalesce(cert_common_name,''))">>
+            ],
+            result => allow
+        },
+        #{
+            is_match => <<"true">>,
+            result => deny
+        }
+    ],
+    with_checks(
+        Checks,
+        fun(State) ->
+            ?assertEqual(
+                {error, bad_username_or_password},
+                emqx_authn_cinfo:authenticate(#{username => <<"u">>, clientid => <<"c">>}, State)
+            ),
+            ?assertMatch(
+                {ok, #{}},
+                emqx_authn_cinfo:authenticate(#{cn => <<"CN1">>, clientid => <<"CN1">>}, State)
+            )
+        end
+    ).
+
+config(Checks) ->
+    #{
+        mechanism => cinfo,
+        checks => Checks
+    }.
+
+with_checks(Checks, F) ->
+    Config = config(Checks),
+    {ok, State} = emqx_authn_cinfo:create(?AUTHN_ID, Config),
+    try
+        F(State)
+    after
+        ?assertEqual(ok, emqx_authn_cinfo:destroy(State))
+    end,
+    ok.

+ 156 - 0
apps/emqx_auth_cinfo/test/emqx_authn_cinfo_int_SUITE.erl

@@ -0,0 +1,156 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2020-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+
+%% end-to-end integration test
+-module(emqx_authn_cinfo_int_SUITE).
+
+-compile(export_all).
+-compile(nowarn_export_all).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("emqx_auth/include/emqx_authn.hrl").
+
+-define(AUTHN_ID, <<"mechanism:cinfo">>).
+
+all() ->
+    emqx_common_test_helpers:all(?MODULE).
+
+init_per_suite(Config) ->
+    Apps = emqx_cth_suite:start(
+        [
+            emqx_conf,
+            emqx,
+            emqx_auth,
+            %% to load schema
+            {emqx_auth_cinfo, #{start => false}},
+            emqx_management,
+            {emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"}
+        ],
+        #{
+            work_dir => filename:join(?config(priv_dir, Config), ?MODULE)
+        }
+    ),
+    _ = emqx_common_test_http:create_default_app(),
+    ok = emqx_authn_chains:register_providers([{cinfo, emqx_authn_cinfo}]),
+    ?AUTHN:delete_chain(?GLOBAL),
+    {ok, Chains} = ?AUTHN:list_chains(),
+    ?assertEqual(length(Chains), 0),
+    [{apps, Apps} | Config].
+
+end_per_suite(Config) ->
+    ok = emqx_cth_suite:stop(?config(apps, Config)),
+    ok.
+
+init_per_testcase(_Case, Config) ->
+    emqx_authn_test_lib:delete_authenticators(
+        [?CONF_NS_ATOM],
+        ?GLOBAL
+    ),
+    Config.
+
+end_per_testcase(_TestCase, _Config) ->
+    ok.
+
+%%------------------------------------------------------------------------------
+%% Tests
+%%------------------------------------------------------------------------------
+
+t_create_ok(_Config) ->
+    {ok, Config} = hocon:binary(config(?FUNCTION_NAME)),
+    {ok, 200, _} = request(post, uri([?CONF_NS]), Config),
+    {ok, Client1} = emqtt:start_link([
+        {proto_ver, v5},
+        {username, <<"magic1">>},
+        {password, <<"ignore">>}
+    ]),
+    unlink(Client1),
+    {ok, Client2} = emqtt:start_link([
+        {proto_ver, v5},
+        {username, <<"magic2">>},
+        {password, <<"ignore">>}
+    ]),
+    unlink(Client2),
+    {ok, Client3} = emqtt:start_link([
+        {proto_ver, v5},
+        {username, <<"magic3">>},
+        {password, <<"ignore">>}
+    ]),
+    unlink(Client3),
+    ?assertMatch({ok, _}, emqtt:connect(Client1)),
+    ok = emqtt:disconnect(Client1),
+    ?assertMatch({error, {bad_username_or_password, #{}}}, emqtt:connect(Client2)),
+    ?assertMatch({error, {not_authorized, #{}}}, emqtt:connect(Client3)),
+    ok.
+
+t_empty_checks_is_not_allowed(_Config) ->
+    {ok, Config} = hocon:binary(config(?FUNCTION_NAME)),
+    ?assertMatch(
+        {ok, 400, _},
+        request(post, uri([?CONF_NS]), Config)
+    ),
+    ok.
+
+t_empty_is_match_not_allowed(_Config) ->
+    {ok, Config} = hocon:binary(config(?FUNCTION_NAME)),
+    ?assertMatch(
+        {ok, 400, _},
+        request(post, uri([?CONF_NS]), Config)
+    ),
+    ok.
+
+t_expression_compile_error(_Config) ->
+    {ok, Config} = hocon:binary(config(?FUNCTION_NAME)),
+    ?assertMatch(
+        {ok, 400, _},
+        request(post, uri([?CONF_NS]), Config)
+    ),
+    ok.
+
+%% erlfmt-ignore
+config(t_create_ok) ->
+    "{
+        mechanism = cinfo,
+        checks = [
+          {
+            is_match = \"str_eq(username,'magic1')\"
+            result = allow
+          },
+          {
+            is_match = \"str_eq(username, 'magic2')\"
+            result = deny
+          }
+        ]
+    }";
+config(t_empty_checks_is_not_allowed) ->
+    "{
+        mechanism = cinfo,
+        checks = []
+    }";
+config(t_empty_is_match_not_allowed) ->
+    "{
+        mechanism = cinfo,
+        checks = [
+          {
+            is_match = []
+            result = allow
+          }
+        ]
+    }";
+config(t_expression_compile_error) ->
+    "{
+        mechanism = cinfo,
+        checks = [
+          {
+            is_match = \"1\"
+            result = allow
+          }
+        ]
+    }".
+
+request(Method, Url, Body) ->
+    emqx_mgmt_api_test_util:request(Method, Url, Body).
+
+uri(Path) ->
+    emqx_mgmt_api_test_util:uri(Path).

+ 1 - 3
apps/emqx_auth_ext/src/emqx_auth_ext.app.src

@@ -1,6 +1,6 @@
 {application, emqx_auth_ext, [
     {description, "EMQX Extended Auth Library"},
-    {vsn, "0.1.0"},
+    {vsn, "0.1.1"},
     {registered, []},
     {applications, [
         kernel,
@@ -15,7 +15,5 @@
         emqx_auth_ext_tls_lib,
         emqx_auth_ext_tls_const_v1
     ]},
-
-    {licenses, ["Apache-2.0"]},
     {links, []}
 ]}.

+ 1 - 1
apps/emqx_auth_kerberos/rebar.config

@@ -3,5 +3,5 @@
 {deps, [
     {emqx, {path, "../emqx"}},
     {emqx_utils, {path, "../emqx_utils"}},
-    {sasl_auth, "2.3.2"}
+    {sasl_auth, "2.3.3"}
 ]}.

+ 28 - 2
apps/emqx_auth_ldap/test/emqx_authn_ldap_SUITE.erl

@@ -336,16 +336,21 @@ deprecated_raw_ldap_auth_config() ->
     }.
 
 user_seeds() ->
-    New = fun(Username, Password, Result) ->
+    New4 = fun(Username, Password, Result, Params) ->
         #{
             credentials => #{
                 username => Username,
                 password => Password
             },
-            config_params => #{},
+            config_params => Params,
             result => Result
         }
     end,
+
+    New = fun(Username, Password, Result) ->
+        New4(Username, Password, Result, #{})
+    end,
+
     Valid =
         lists:map(
             fun(Idx) ->
@@ -368,6 +373,27 @@ user_seeds() ->
             <<"mqttuser0009 \\\\test\\\\">>,
             <<"mqttuser0009 \\\\test\\\\">>,
             {ok, #{is_superuser => true}}
+        ),
+        %% not in group
+        New4(
+            <<"mqttuser0002">>,
+            <<"mqttuser0002">>,
+            {error, not_authorized},
+            #{<<"filter">> => <<"(memberOf=cn=test,ou=Groups,dc=emqx,dc=io)">>}
+        ),
+        %% in group
+        New4(
+            <<"mqttuser0003">>,
+            <<"mqttuser0003">>,
+            {ok, #{is_superuser => false}},
+            #{<<"filter">> => <<"(memberOf=cn=test,ou=Groups,dc=emqx,dc=io)">>}
+        ),
+        %% non exists group
+        New4(
+            <<"mqttuser0003">>,
+            <<"mqttuser0003">>,
+            {error, not_authorized},
+            #{<<"filter">> => <<"(memberOf=cn=nonexists,ou=Groups,dc=emqx,dc=io)">>}
         )
         | Valid
     ].

+ 6 - 1
apps/emqx_bridge/src/emqx_bridge.erl

@@ -40,6 +40,7 @@
     unload/0,
     lookup/1,
     lookup/2,
+    is_exist_v1/2,
     get_metrics/2,
     create/3,
     disable_enable/3,
@@ -256,7 +257,8 @@ send_message(BridgeType, BridgeName, ResId, Message, QueryOpts0) ->
             QueryOpts = maps:merge(query_opts(Config), QueryOpts0),
             emqx_resource:query(ResId, {send_message, Message}, QueryOpts);
         #{enable := false} ->
-            {error, bridge_stopped}
+            %% race
+            {error, bridge_disabled}
     end.
 
 query_opts(Config) ->
@@ -326,6 +328,9 @@ lookup(Id) ->
     {Type, Name} = emqx_bridge_resource:parse_bridge_id(Id),
     lookup(Type, Name).
 
+is_exist_v1(Type, Name) ->
+    emqx_resource:is_exist(emqx_bridge_resource:resource_id(Type, Name)).
+
 lookup(Type, Name) ->
     case emqx_bridge_v2:is_bridge_v2_type(Type) of
         true ->

+ 1 - 1
apps/emqx_bridge/src/emqx_bridge_app.erl

@@ -49,7 +49,7 @@ stop(_State) ->
 
 -if(?EMQX_RELEASE_EDITION == ee).
 ensure_enterprise_schema_loaded() ->
-    _ = emqx_bridge_enterprise:module_info(),
+    emqx_utils:interactive_load(emqx_bridge_enterprise),
     ok.
 -else.
 ensure_enterprise_schema_loaded() ->

+ 1 - 1
apps/emqx_bridge/src/emqx_bridge_resource.erl

@@ -282,7 +282,7 @@ create_dry_run(Type0, Conf0) ->
     end.
 
 create_dry_run_bridge_v1(Type, Conf0) ->
-    TmpName = iolist_to_binary([?TEST_ID_PREFIX, emqx_utils:gen_id(8)]),
+    TmpName = ?PROBE_ID_NEW(),
     TmpPath = emqx_utils:safe_filename(TmpName),
     %% Already type checked, no need to catch errors
     TypeBin = bin(Type),

+ 27 - 12
apps/emqx_bridge/src/emqx_bridge_v2.erl

@@ -44,6 +44,8 @@
     list/1,
     lookup/2,
     lookup/3,
+    lookup_raw_conf/3,
+    is_exist/3,
     create/3,
     create/4,
     %% The remove/2 function is only for internal use as it may create
@@ -56,7 +58,7 @@
     check_deps_and_remove/3,
     check_deps_and_remove/4
 ]).
--export([lookup_action/2, lookup_source/2]).
+-export([is_action_exist/2, is_source_exist/2]).
 
 %% Operations
 
@@ -234,11 +236,22 @@ unload_bridges(ConfRooKey) ->
 lookup(Type, Name) ->
     lookup(?ROOT_KEY_ACTIONS, Type, Name).
 
-lookup_action(Type, Name) ->
-    lookup(?ROOT_KEY_ACTIONS, Type, Name).
+is_action_exist(Type, Name) ->
+    is_exist(?ROOT_KEY_ACTIONS, Type, Name).
+
+is_source_exist(Type, Name) ->
+    is_exist(?ROOT_KEY_SOURCES, Type, Name).
+
+is_exist(ConfRootName, Type, Name) ->
+    {error, not_found} =/= lookup_raw_conf(ConfRootName, Type, Name).
 
-lookup_source(Type, Name) ->
-    lookup(?ROOT_KEY_SOURCES, Type, Name).
+lookup_raw_conf(ConfRootName, Type, Name) ->
+    case emqx:get_raw_config([ConfRootName, Type, Name], not_found) of
+        not_found ->
+            {error, not_found};
+        #{<<"connector">> := _} = RawConf ->
+            {ok, RawConf}
+    end.
 
 -spec lookup(root_cfg_key(), bridge_v2_type(), bridge_v2_name()) ->
     {ok, bridge_v2_info()} | {error, not_found}.
@@ -670,8 +683,9 @@ query(BridgeType, BridgeName, Message, QueryOpts0) ->
             Config = combine_connector_and_bridge_v2_config(BridgeType, BridgeName, Config0),
             do_query_with_enabled_config(BridgeType, BridgeName, Message, QueryOpts0, Config);
         #{enable := false} ->
-            {error, bridge_stopped};
-        _Error ->
+            {error, bridge_disabled};
+        {error, bridge_not_found} ->
+            %% race
             {error, bridge_not_found}
     end.
 
@@ -725,9 +739,10 @@ health_check(ConfRootKey, BridgeType, BridgeName) ->
                 ConnectorId, id_with_root_name(ConfRootKey, BridgeType, BridgeName, ConnectorName)
             );
         #{enable := false} ->
-            {error, bridge_stopped};
-        Error ->
-            Error
+            {error, bridge_disabled};
+        {error, bridge_not_found} ->
+            %% race
+            {error, bridge_not_found}
     end.
 
 -spec create_dry_run(bridge_v2_type(), Config :: map()) -> ok | {error, term()}.
@@ -766,7 +781,7 @@ create_dry_run(ConfRootKey, Type, Conf0) ->
     end.
 
 create_dry_run_helper(ConfRootKey, BridgeV2Type, ConnectorRawConf, BridgeV2RawConf) ->
-    BridgeName = iolist_to_binary([?TEST_ID_PREFIX, emqx_utils:gen_id(8)]),
+    BridgeName = ?PROBE_ID_NEW(),
     ConnectorType = connector_type(BridgeV2Type),
     OnReadyCallback =
         fun(ConnectorId) ->
@@ -1693,7 +1708,7 @@ get_conf_root_key(_NoMatch) ->
 
 bridge_v1_create_dry_run(BridgeType, RawConfig0) ->
     RawConf = maps:without([<<"name">>], RawConfig0),
-    TmpName = iolist_to_binary([?TEST_ID_PREFIX, emqx_utils:gen_id(8)]),
+    TmpName = ?PROBE_ID_NEW(),
     PreviousRawConf = undefined,
     try
         #{

+ 16 - 14
apps/emqx_bridge/src/emqx_bridge_v2_api.erl

@@ -796,10 +796,10 @@ handle_list(ConfRootKey) ->
     end.
 
 handle_create(ConfRootKey, Type, Name, Conf0) ->
-    case emqx_bridge_v2:lookup(ConfRootKey, Type, Name) of
-        {ok, _} ->
+    case emqx_bridge_v2:is_exist(ConfRootKey, Type, Name) of
+        true ->
             ?BAD_REQUEST('ALREADY_EXISTS', <<"bridge already exists">>);
-        {error, not_found} ->
+        false ->
             Conf = filter_out_request_body(Conf0),
             create_bridge(ConfRootKey, Type, Name, Conf)
     end.
@@ -808,12 +808,12 @@ handle_update(ConfRootKey, Id, Conf0) ->
     Conf1 = filter_out_request_body(Conf0),
     ?TRY_PARSE_ID(
         Id,
-        case emqx_bridge_v2:lookup(ConfRootKey, BridgeType, BridgeName) of
-            {ok, _} ->
+        case emqx_bridge_v2:is_exist(ConfRootKey, BridgeType, BridgeName) of
+            true ->
                 RawConf = emqx:get_raw_config([ConfRootKey, BridgeType, BridgeName], #{}),
                 Conf = emqx_utils:deobfuscate(Conf1, RawConf),
                 update_bridge(ConfRootKey, BridgeType, BridgeName, Conf);
-            {error, not_found} ->
+            false ->
                 ?BRIDGE_NOT_FOUND(BridgeType, BridgeName)
         end
     ).
@@ -821,8 +821,8 @@ handle_update(ConfRootKey, Id, Conf0) ->
 handle_delete(ConfRootKey, Id, QueryStringOpts) ->
     ?TRY_PARSE_ID(
         Id,
-        case emqx_bridge_v2:lookup(ConfRootKey, BridgeType, BridgeName) of
-            {ok, _} ->
+        case emqx_bridge_v2:is_exist(ConfRootKey, BridgeType, BridgeName) of
+            true ->
                 AlsoDeleteActions =
                     case maps:get(<<"also_delete_dep_actions">>, QueryStringOpts, <<"false">>) of
                         <<"true">> -> true;
@@ -851,7 +851,7 @@ handle_delete(ConfRootKey, Id, QueryStringOpts) ->
                     {error, Reason} ->
                         ?INTERNAL_ERROR(Reason)
                 end;
-            {error, not_found} ->
+            false ->
                 ?BRIDGE_NOT_FOUND(BridgeType, BridgeName)
         end
     ).
@@ -920,7 +920,7 @@ handle_probe(ConfRootKey, Request) ->
     RequestMeta = #{module => ?MODULE, method => post, path => Path},
     case emqx_dashboard_swagger:filter_check_request_and_translate_body(Request, RequestMeta) of
         {ok, #{body := #{<<"type">> := Type} = Params}} ->
-            Params1 = maybe_deobfuscate_bridge_probe(Params),
+            Params1 = maybe_deobfuscate_bridge_probe(ConfRootKey, Params),
             Params2 = maps:remove(<<"type">>, Params1),
             case emqx_bridge_v2:create_dry_run(ConfRootKey, Type, Params2) of
                 ok ->
@@ -942,9 +942,11 @@ handle_probe(ConfRootKey, Request) ->
     end.
 
 %%% API helpers
-maybe_deobfuscate_bridge_probe(#{<<"type">> := ActionType, <<"name">> := BridgeName} = Params) ->
-    case emqx_bridge_v2:lookup(ActionType, BridgeName) of
-        {ok, #{raw_config := RawConf}} ->
+maybe_deobfuscate_bridge_probe(
+    ConfRootKey, #{<<"type">> := ActionType, <<"name">> := BridgeName} = Params
+) ->
+    case emqx_bridge_v2:lookup_raw_conf(ConfRootKey, ActionType, BridgeName) of
+        {ok, RawConf} ->
             %% TODO check if RawConf obtained above is compatible with the commented out code below
             %% RawConf = emqx:get_raw_config([bridges, BridgeType, BridgeName], #{}),
             emqx_utils:deobfuscate(Params, RawConf);
@@ -952,7 +954,7 @@ maybe_deobfuscate_bridge_probe(#{<<"type">> := ActionType, <<"name">> := BridgeN
             %% A bridge may be probed before it's created, so not finding it here is fine
             Params
     end;
-maybe_deobfuscate_bridge_probe(Params) ->
+maybe_deobfuscate_bridge_probe(_ConfRootKey, Params) ->
     Params.
 
 is_ok(ok) ->

+ 2 - 2
apps/emqx_bridge/src/schema/emqx_bridge_schema.erl

@@ -93,7 +93,7 @@ bridge_api_union(Refs) ->
 enterprise_api_schemas(Method) ->
     %% We *must* do this to ensure the module is really loaded, especially when we use
     %% `call_hocon' from `nodetool' to generate initial configurations.
-    _ = emqx_bridge_enterprise:module_info(),
+    ok = emqx_utils:interactive_load(emqx_bridge_enterprise),
     case erlang:function_exported(emqx_bridge_enterprise, api_schemas, 1) of
         true -> emqx_bridge_enterprise:api_schemas(Method);
         false -> []
@@ -102,7 +102,7 @@ enterprise_api_schemas(Method) ->
 enterprise_fields_bridges() ->
     %% We *must* do this to ensure the module is really loaded, especially when we use
     %% `call_hocon' from `nodetool' to generate initial configurations.
-    _ = emqx_bridge_enterprise:module_info(),
+    ok = emqx_utils:interactive_load(emqx_bridge_enterprise),
     case erlang:function_exported(emqx_bridge_enterprise, fields, 1) of
         true -> emqx_bridge_enterprise:fields(bridges);
         false -> []

+ 52 - 0
apps/emqx_bridge/test/emqx_bridge_v2_testlib.erl

@@ -1285,6 +1285,56 @@ t_aggreg_upload_restart_corrupted(TCConfig, Opts) ->
     ),
     ok.
 
+%% Simulates a sequence of requests from the frontend and checks that secrets are
+%% deobfuscated correctly for a connector.  The sequence is simply:
+%%
+%%   1) Create a connector.
+%%   2) Update the connector with the response config.
+%%
+%% This assumes that the response from (1) is already obfuscated.  That is, this doesn't
+%% check that secret fields are correctly marked as such.
+t_deobfuscate_connector(Config) ->
+    ?check_trace(
+        begin
+            #{
+                connector_type := ConnectorType,
+                connector_name := ConnectorName
+            } = get_common_values(Config),
+            OriginalConnectorConfig = get_value(connector_config, Config),
+            {201, Response} = simplify_result(create_connector_api(Config)),
+            %% Sanity check
+            ?assertEqual(
+                OriginalConnectorConfig,
+                emqx_config:get_raw([<<"connectors">>, bin(ConnectorType), bin(ConnectorName)])
+            ),
+            ConnectorConfig = maps:without(
+                [
+                    <<"name">>,
+                    <<"actions">>,
+                    <<"sources">>,
+                    <<"node_status">>,
+                    <<"status">>,
+                    <<"type">>
+                ],
+                Response
+            ),
+            ?assertMatch(
+                {200, _},
+                simplify_result(
+                    update_connector_api(ConnectorName, ConnectorType, ConnectorConfig)
+                )
+            ),
+            %% Even if the request is accepted, shouldn't clobber secrets
+            ?assertEqual(
+                OriginalConnectorConfig,
+                emqx_config:get_raw([<<"connectors">>, bin(ConnectorType), bin(ConnectorName)])
+            ),
+            ok
+        end,
+        []
+    ),
+    ok.
+
 snk_timetrap() ->
     {CTTimetrap, _} = ct:get_timetrap_info(),
     #{timetrap => max(0, CTTimetrap - 1_000)}.
@@ -1302,3 +1352,5 @@ proplist_update(Proplist, K, Fn) ->
     {K, OldV} = lists:keyfind(K, 1, Proplist),
     NewV = Fn(OldV),
     lists:keystore(K, 1, Proplist, {K, NewV}).
+
+bin(X) -> emqx_utils_conv:bin(X).

+ 16 - 11
apps/emqx_bridge_azure_blob_storage/src/emqx_bridge_azure_blob_storage_connector.erl

@@ -4,6 +4,8 @@
 
 -module(emqx_bridge_azure_blob_storage_connector).
 
+-feature(maybe_expr, enable).
+
 -behaviour(emqx_resource).
 -behaviour(emqx_connector_aggreg_delivery).
 -behaviour(emqx_template).
@@ -160,17 +162,20 @@ on_start(_ConnResId, ConnConfig) ->
         account_name := AccountName,
         account_key := AccountKey
     } = ConnConfig,
-    Endpoint = maps:get(endpoint, ConnConfig, undefined),
-    {ok, DriverState} = erlazure:new(#{
-        account => AccountName,
-        key => AccountKey,
-        endpoint => Endpoint
-    }),
-    State = #{
-        driver_state => DriverState,
-        installed_actions => #{}
-    },
-    {ok, State}.
+    maybe
+        ok ?= emqx_bridge_azure_blob_storage_connector_schema:validate_account_key(AccountKey),
+        Endpoint = maps:get(endpoint, ConnConfig, undefined),
+        {ok, DriverState} = erlazure:new(#{
+            account => AccountName,
+            key => AccountKey,
+            endpoint => Endpoint
+        }),
+        State = #{
+            driver_state => DriverState,
+            installed_actions => #{}
+        },
+        {ok, State}
+    end.
 
 -spec on_stop(connector_resource_id(), connector_state()) -> ok.
 on_stop(_ConnResId, _ConnState) ->

+ 23 - 5
apps/emqx_bridge_azure_blob_storage/src/emqx_bridge_azure_blob_storage_connector_schema.erl

@@ -23,6 +23,9 @@
     connector_examples/1
 ]).
 
+%% Internal exports
+-export([validate_account_key/1]).
+
 %% API
 -export([]).
 
@@ -138,16 +141,31 @@ connector_example(put) ->
 %%------------------------------------------------------------------------------
 
 %%------------------------------------------------------------------------------
-%% Internal fns
+%% Internal exports
 %%------------------------------------------------------------------------------
 
-mk(Type, Meta) -> hoconsc:mk(Type, Meta).
-
-account_key_validator(Val) ->
+validate_account_key(Val) ->
     try
         _ = base64:decode(emqx_secret:unwrap(Val)),
         ok
     catch
         _:_ ->
-            {error, <<"bad account key">>}
+            {error, <<"bad account key; must be a valid base64 encoded value">>}
+    end.
+
+%%------------------------------------------------------------------------------
+%% Internal fns
+%%------------------------------------------------------------------------------
+
+mk(Type, Meta) -> hoconsc:mk(Type, Meta).
+
+account_key_validator(Val) ->
+    case emqx_secret:unwrap(Val) of
+        <<"******">> ->
+            %% The frontend sends obfuscated values when updating a connector...  So we
+            %% cannot distinguish an obfuscated value from an user explicitly setting this
+            %% field to this value.
+            ok;
+        Key ->
+            validate_account_key(Key)
     end.

+ 28 - 2
apps/emqx_bridge_azure_blob_storage/test/emqx_bridge_azure_blob_storage_SUITE.erl

@@ -683,7 +683,7 @@ t_bad_account_key(Config) ->
                 {400, #{
                     <<"message">> := #{
                         <<"kind">> := <<"validation_error">>,
-                        <<"reason">> := <<"bad account key">>
+                        <<"reason">> := <<"bad account key", _/binary>>
                     }
                 }},
                 emqx_bridge_v2_testlib:simplify_result(
@@ -697,7 +697,7 @@ t_bad_account_key(Config) ->
                 {400, #{
                     <<"message">> := #{
                         <<"kind">> := <<"validation_error">>,
-                        <<"reason">> := <<"bad account key">>
+                        <<"reason">> := <<"bad account key", _/binary>>
                     }
                 }},
                 emqx_bridge_v2_testlib:simplify_result(
@@ -734,3 +734,29 @@ t_bad_account_name(Config) ->
         []
     ),
     ok.
+
+t_deobfuscate_connector(Config) ->
+    emqx_bridge_v2_testlib:?FUNCTION_NAME(Config).
+
+%% Checks that we verify at runtime that the provided account key is a valid base64 string.
+t_create_connector_with_obfuscated_key(Config0) ->
+    ?check_trace(
+        begin
+            RedactedValue = <<"******">>,
+            Config = emqx_bridge_v2_testlib:proplist_update(Config0, connector_config, fun(Old) ->
+                Old#{<<"account_key">> := RedactedValue}
+            end),
+            ?assertMatch(
+                {201, #{
+                    <<"status">> := <<"disconnected">>,
+                    <<"status_reason">> := <<"bad account key", _/binary>>
+                }},
+                emqx_bridge_v2_testlib:simplify_result(
+                    emqx_bridge_v2_testlib:create_connector_api(Config)
+                )
+            ),
+            ok
+        end,
+        []
+    ),
+    ok.

+ 1 - 1
apps/emqx_bridge_azure_event_hub/rebar.config

@@ -2,7 +2,7 @@
 
 {erl_opts, [debug_info]}.
 {deps, [
-    {wolff, "3.0.4"},
+    {wolff, "4.0.0"},
     {kafka_protocol, "4.1.8"},
     {brod_gssapi, "0.1.3"},
     {brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.18.0"}}},

+ 1 - 1
apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.app.src

@@ -1,6 +1,6 @@
 {application, emqx_bridge_azure_event_hub, [
     {description, "EMQX Enterprise Azure Event Hub Bridge"},
-    {vsn, "0.1.8"},
+    {vsn, "0.2.0"},
     {registered, []},
     {applications, [
         kernel,

+ 2 - 0
apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.erl

@@ -289,6 +289,8 @@ values(producer) ->
                 key => <<"${.clientid}">>,
                 value => <<"${.}">>
             },
+            max_linger_time => <<"5ms">>,
+            max_linger_bytes => <<"10MB">>,
             max_batch_bytes => <<"896KB">>,
             partition_strategy => <<"random">>,
             required_acks => <<"all_isr">>,

+ 4 - 1
apps/emqx_bridge_cassandra/test/emqx_bridge_cassandra_SUITE.erl

@@ -644,16 +644,19 @@ t_missing_data(Config) ->
     %% emqx_bridge_cassandra_connector will send missed data as a `null` atom
     %% to ecql driver
     ?check_trace(
+        #{timetrap => 10_000},
         begin
             {_, {ok, _}} =
                 ?wait_async_action(
                     send_message(Config, #{}),
                     #{?snk_kind := handle_async_reply, result := {error, {8704, _}}},
-                    30_000
+                    5_000
                 ),
+            ?block_until(#{?snk_kind := cassandra_connector_query_return}),
             ok
         end,
         fun(Trace0) ->
+            ct:pal("trace:\n  ~p", [Trace0]),
             %% 1. ecql driver will return `ok` first in async query
             Trace = ?of_kind(cassandra_connector_query_return, Trace0),
             ?assertMatch([#{result := {ok, _Pid}}], Trace),

+ 1 - 1
apps/emqx_bridge_confluent/rebar.config

@@ -2,7 +2,7 @@
 
 {erl_opts, [debug_info]}.
 {deps, [
-    {wolff, "3.0.4"},
+    {wolff, "4.0.0"},
     {kafka_protocol, "4.1.8"},
     {brod_gssapi, "0.1.3"},
     {brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.18.0"}}},

+ 1 - 1
apps/emqx_bridge_confluent/src/emqx_bridge_confluent.app.src

@@ -1,6 +1,6 @@
 {application, emqx_bridge_confluent, [
     {description, "EMQX Enterprise Confluent Connector and Action"},
-    {vsn, "0.1.3"},
+    {vsn, "0.2.0"},
     {registered, []},
     {applications, [
         kernel,

+ 2 - 0
apps/emqx_bridge_confluent/src/emqx_bridge_confluent_producer.erl

@@ -251,6 +251,8 @@ values(action) ->
                 key => <<"${.clientid}">>,
                 value => <<"${.}">>
             },
+            max_linger_time => <<"5ms">>,
+            max_linger_bytes => <<"10MB">>,
             max_batch_bytes => <<"896KB">>,
             partition_strategy => <<"random">>,
             required_acks => <<"all_isr">>,

+ 10 - 18
apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl

@@ -753,11 +753,11 @@ process_request_and_action(Request, ActionState, Msg) ->
             _ -> join_paths(PathPrefix, PathSuffix)
         end,
 
-    HeadersTemplate1 = maps:get(headers, Request),
-    HeadersTemplate2 = maps:get(headers, ActionState),
-    Headers = merge_proplist(
-        render_headers(HeadersTemplate1, RenderTmplFunc, Msg),
-        render_headers(HeadersTemplate2, RenderTmplFunc, Msg)
+    ActionHaders = maps:get(headers, ActionState),
+    BaseHeaders = maps:get(headers, Request),
+    Headers = merge_headers(
+        render_headers(ActionHaders, RenderTmplFunc, Msg),
+        render_headers(BaseHeaders, RenderTmplFunc, Msg)
     ),
     BodyTemplate = maps:get(body, ActionState),
     Body = render_request_body(BodyTemplate, RenderTmplFunc, Msg),
@@ -769,19 +769,11 @@ process_request_and_action(Request, ActionState, Msg) ->
         request_timeout => maps:get(request_timeout, ActionState)
     }.
 
-merge_proplist(Proplist1, Proplist2) ->
-    lists:foldl(
-        fun({K, V}, Acc) ->
-            case lists:keyfind(K, 1, Acc) of
-                false ->
-                    [{K, V} | Acc];
-                {K, _} = {K, V1} ->
-                    [{K, V1} | Acc]
-            end
-        end,
-        Proplist2,
-        Proplist1
-    ).
+merge_headers([], Result) ->
+    Result;
+merge_headers([{K, V} | Rest], Result) ->
+    R = lists:keydelete(K, 1, Result),
+    merge_headers(Rest, [{K, V} | R]).
 
 process_request(
     #{

+ 0 - 1
apps/emqx_bridge_http/src/emqx_bridge_http_connector_info.erl

@@ -41,7 +41,6 @@ config_schema() ->
         hoconsc:mk(
             hoconsc:map(name, hoconsc:ref(emqx_bridge_http_schema, "config_connector")),
             #{
-                alias => [webhook],
                 desc => <<"HTTP Connector Config">>,
                 required => false
             }

+ 144 - 55
apps/emqx_bridge_http/test/emqx_bridge_http_v2_SUITE.erl

@@ -126,62 +126,148 @@ t_compose_connector_url_and_action_path(Config) ->
 %% Checks that we can successfully update a connector containing sensitive headers and
 %% they won't be clobbered by the update.
 t_update_with_sensitive_data(Config) ->
-    ?check_trace(
-        begin
-            ConnectorCfg0 = make_connector_config(Config),
-            AuthHeader = <<"Bearer some_token">>,
-            ConnectorCfg1 = emqx_utils_maps:deep_merge(
-                ConnectorCfg0,
-                #{<<"headers">> => #{<<"authorization">> => AuthHeader}}
-            ),
-            ActionCfg = make_action_config(Config),
-            CreateConfig = [
-                {bridge_kind, action},
-                {action_type, ?BRIDGE_TYPE},
-                {action_name, ?BRIDGE_NAME},
-                {action_config, ActionCfg},
-                {connector_type, ?BRIDGE_TYPE},
-                {connector_name, ?CONNECTOR_NAME},
-                {connector_config, ConnectorCfg1}
-            ],
-            {ok, {{_, 201, _}, _, #{<<"headers">> := #{<<"authorization">> := Obfuscated}}}} =
-                emqx_bridge_v2_testlib:create_connector_api(CreateConfig),
-            {ok, _} =
-                emqx_bridge_v2_testlib:create_kind_api(CreateConfig),
-            BridgeId = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE, ?BRIDGE_NAME),
-            {ok, _} = emqx_bridge_v2_testlib:create_rule_api(
-                #{
-                    sql => <<"select * from \"t/http\" ">>,
-                    actions => [BridgeId]
+    ConnectorCfg0 = make_connector_config(Config),
+    AuthHeader = <<"Bearer some_token">>,
+    ConnectorCfg1 = emqx_utils_maps:deep_merge(
+        ConnectorCfg0,
+        #{
+            <<"headers">> => #{
+                <<"authorization">> => AuthHeader,
+                <<"x-test-header">> => <<"from-connector">>
+            }
+        }
+    ),
+    ActionCfg = make_action_config(Config, #{<<"x-test-header">> => <<"from-action">>}),
+    CreateConfig = [
+        {bridge_kind, action},
+        {action_type, ?BRIDGE_TYPE},
+        {action_name, ?BRIDGE_NAME},
+        {action_config, ActionCfg},
+        {connector_type, ?BRIDGE_TYPE},
+        {connector_name, ?CONNECTOR_NAME},
+        {connector_config, ConnectorCfg1}
+    ],
+    {ok, {{_, 201, _}, _, #{<<"headers">> := #{<<"authorization">> := Obfuscated}}}} =
+        emqx_bridge_v2_testlib:create_connector_api(CreateConfig),
+    {ok, _} =
+        emqx_bridge_v2_testlib:create_kind_api(CreateConfig),
+    BridgeId = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE, ?BRIDGE_NAME),
+    {ok, _} = emqx_bridge_v2_testlib:create_rule_api(
+        #{
+            sql => <<"select * from \"t/http\" ">>,
+            actions => [BridgeId]
+        }
+    ),
+    emqx:publish(emqx_message:make(<<"t/http">>, <<"1">>)),
+    ?assertReceive(
+        {http,
+            #{
+                <<"authorization">> := AuthHeader,
+                <<"x-test-header">> := <<"from-action">>
+            },
+            _}
+    ),
+
+    %% Now update the connector and see if the header stays deobfuscated.  We send the old
+    %% auth header as an obfuscated value to simulate the behavior of the frontend.
+    ConnectorCfg2 = emqx_utils_maps:deep_merge(
+        ConnectorCfg1,
+        #{
+            <<"headers">> => #{
+                <<"authorization">> => Obfuscated,
+                <<"x-test-header">> => <<"from-connector-new">>,
+                <<"x-test-header-2">> => <<"from-connector-new">>,
+                <<"other_header">> => <<"new">>
+            }
+        }
+    ),
+    {ok, _} = emqx_bridge_v2_testlib:update_connector_api(
+        ?CONNECTOR_NAME,
+        ?BRIDGE_TYPE,
+        ConnectorCfg2
+    ),
+
+    emqx:publish(emqx_message:make(<<"t/http">>, <<"2">>)),
+    %% Should not be obfuscated.
+    ?assertReceive(
+        {http,
+            #{
+                <<"authorization">> := AuthHeader,
+                <<"x-test-header">> := <<"from-action">>,
+                <<"x-test-header-2">> := <<"from-connector-new">>
+            },
+            _},
+        2_000
+    ),
+    ok.
+
+t_disable_action_counters(Config) ->
+    ConnectorCfg = make_connector_config(Config),
+    ActionCfg = make_action_config(Config),
+    CreateConfig = [
+        {bridge_kind, action},
+        {action_type, ?BRIDGE_TYPE},
+        {action_name, ?BRIDGE_NAME},
+        {action_config, ActionCfg},
+        {connector_type, ?BRIDGE_TYPE},
+        {connector_name, ?CONNECTOR_NAME},
+        {connector_config, ConnectorCfg}
+    ],
+    {ok, {{_, 201, _}, _, _}} =
+        emqx_bridge_v2_testlib:create_connector_api(CreateConfig),
+    {ok, _} =
+        emqx_bridge_v2_testlib:create_kind_api(CreateConfig),
+    BridgeId = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE, ?BRIDGE_NAME),
+    {ok, Rule} = emqx_bridge_v2_testlib:create_rule_api(
+        #{
+            sql => <<"select * from \"t/http\" ">>,
+            actions => [BridgeId]
+        }
+    ),
+    {{_, 201, _}, _, #{<<"id">> := RuleId}} = Rule,
+    emqx:publish(emqx_message:make(<<"t/http">>, <<"1">>)),
+    ?assertReceive({http_server, received, _}, 2_000),
+
+    ?retry(
+        _Interval = 500,
+        _NAttempts = 20,
+        ?assertMatch(
+            #{
+                counters := #{
+                    'matched' := 1,
+                    'actions.failed' := 0,
+                    'actions.failed.unknown' := 0,
+                    'actions.success' := 1,
+                    'actions.total' := 1,
+                    'actions.discarded' := 0
                 }
-            ),
-            emqx:publish(emqx_message:make(<<"t/http">>, <<"1">>)),
-            ?assertReceive({http, #{<<"authorization">> := AuthHeader}, _}),
-
-            %% Now update the connector and see if the header stays deobfuscated.  We send the old
-            %% auth header as an obfuscated value to simulate the behavior of the frontend.
-            ConnectorCfg2 = emqx_utils_maps:deep_merge(
-                ConnectorCfg1,
-                #{
-                    <<"headers">> => #{
-                        <<"authorization">> => Obfuscated,
-                        <<"other_header">> => <<"new">>
-                    }
+            },
+            emqx_metrics_worker:get_metrics(rule_metrics, RuleId)
+        )
+    ),
+
+    %% disable the action
+    {ok, {{_, 200, _}, _, _}} =
+        emqx_bridge_v2_testlib:update_bridge_api(CreateConfig, #{<<"enable">> => false}),
+
+    %% this will trigger a discard
+    emqx:publish(emqx_message:make(<<"t/http">>, <<"2">>)),
+    ?retry(
+        _Interval = 500,
+        _NAttempts = 20,
+        ?assertMatch(
+            #{
+                counters := #{
+                    'matched' := 2,
+                    'actions.failed' := 0,
+                    'actions.failed.unknown' := 0,
+                    'actions.success' := 1,
+                    'actions.total' := 2,
+                    'actions.discarded' := 1
                 }
-            ),
-            {ok, _} = emqx_bridge_v2_testlib:update_connector_api(
-                ?CONNECTOR_NAME,
-                ?BRIDGE_TYPE,
-                ConnectorCfg2
-            ),
-
-            emqx:publish(emqx_message:make(<<"t/http">>, <<"2">>)),
-            %% Should not be obfuscated.
-            ?assertReceive({http, #{<<"authorization">> := AuthHeader}, _}, 2_000),
-
-            ok
-        end,
-        []
+            },
+            emqx_metrics_worker:get_metrics(rule_metrics, RuleId)
+        )
     ),
 
     ok.
@@ -204,6 +290,9 @@ make_connector_config(Config) ->
     }.
 
 make_action_config(Config) ->
+    make_action_config(Config, _Headers = #{}).
+
+make_action_config(Config, Headers) ->
     Path = ?config(path, Config),
     #{
         <<"enable">> => true,
@@ -211,7 +300,7 @@ make_action_config(Config) ->
         <<"parameters">> => #{
             <<"path">> => Path,
             <<"method">> => <<"post">>,
-            <<"headers">> => #{},
+            <<"headers">> => Headers,
             <<"body">> => <<"${.}">>
         },
         <<"resource_opts">> => #{

+ 28 - 4
apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb_connector.erl

@@ -75,6 +75,7 @@
 
 -define(CONNECTOR_TYPE, iotdb).
 -define(IOTDB_PING_PATH, <<"ping">>).
+-define(DEFAULT_THRIFT_TIMEOUT, timer:seconds(10)).
 
 -import(hoconsc, [mk/2, enum/1, ref/2]).
 
@@ -187,6 +188,22 @@ fields("config_thrift") ->
                         default => 8,
                         desc => ?DESC("pool_size")
                     }
+                )},
+            {connect_timeout,
+                mk(
+                    emqx_schema:timeout_duration_ms(),
+                    #{
+                        default => <<"10s">>,
+                        desc => ?DESC("connect_timeout")
+                    }
+                )},
+            {recv_timeout,
+                mk(
+                    emqx_schema:timeout_duration_ms(),
+                    #{
+                        default => <<"10s">>,
+                        desc => ?DESC("recv_timeout")
+                    }
                 )}
         ] ++ fields(authentication) ++ emqx_connector_schema_lib:ssl_fields() ++
         emqx_connector_schema:resource_opts_ref(?MODULE, connector_resource_opts);
@@ -317,22 +334,29 @@ on_start(
 
     #{hostname := Host, port := Port} = emqx_schema:parse_server(Server, ?THRIFT_HOST_OPTIONS),
 
-    TransportOpts =
+    DriverOpts = maps:merge(
+        #{
+            connect_timeout => ?DEFAULT_THRIFT_TIMEOUT, recv_timeout => ?DEFAULT_THRIFT_TIMEOUT
+        },
+        maps:with([connect_timeout, recv_timeout], Config)
+    ),
+
+    DriverOpts1 =
         case maps:get(enable, SSL) of
             true ->
-                #{
+                DriverOpts#{
                     ssltransport => true,
                     ssloptions => emqx_tls_lib:to_client_opts(SSL)
                 };
             false ->
-                #{}
+                DriverOpts
         end,
 
     IoTDBOpts = IoTDBOpts0#{
         version => Version,
         host => Host,
         port => Port,
-        options => TransportOpts
+        options => DriverOpts1
     },
 
     Options = [

+ 2 - 0
apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb_connector_info.erl

@@ -57,6 +57,8 @@ driver_union_selector({value, Value}) ->
     case Value of
         #{<<"driver">> := <<"thrift">>} ->
             [ref(?DRIVER_THRIFT, "config")];
+        #{<<"driver">> := thrift} ->
+            [ref(?DRIVER_THRIFT, "config")];
         _ ->
             [ref(?DRIVER_REST, "config")]
     end.

+ 1 - 1
apps/emqx_bridge_kafka/rebar.config

@@ -2,7 +2,7 @@
 
 {erl_opts, [debug_info]}.
 {deps, [
-    {wolff, "3.0.4"},
+    {wolff, "4.0.0"},
     {kafka_protocol, "4.1.8"},
     {brod_gssapi, "0.1.3"},
     {brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.18.0"}}},

+ 1 - 1
apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src

@@ -1,7 +1,7 @@
 %% -*- mode: erlang -*-
 {application, emqx_bridge_kafka, [
     {description, "EMQX Enterprise Kafka Bridge"},
-    {vsn, "0.4.0"},
+    {vsn, "0.5.0"},
     {registered, [emqx_bridge_kafka_consumer_sup]},
     {applications, [
         kernel,

+ 14 - 2
apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl

@@ -175,6 +175,8 @@ values(producer_values) ->
             value => <<"${.}">>,
             timestamp => <<"${.timestamp}">>
         },
+        max_linger_time => <<"5ms">>,
+        max_linger_bytes => <<"10MB">>,
         max_batch_bytes => <<"896KB">>,
         compression => <<"no_compression">>,
         partition_strategy => <<"random">>,
@@ -197,7 +199,7 @@ values(producer_values) ->
         buffer => #{
             mode => <<"hybrid">>,
             per_partition_limit => <<"2GB">>,
-            segment_bytes => <<"100MB">>,
+            segment_bytes => <<"10MB">>,
             memory_overload_protection => true
         }
     };
@@ -385,6 +387,16 @@ fields(producer_kafka_opts) ->
     [
         {topic, mk(emqx_schema:template(), #{required => true, desc => ?DESC(kafka_topic)})},
         {message, mk(ref(kafka_message), #{required => false, desc => ?DESC(kafka_message)})},
+        {max_linger_time,
+            mk(emqx_schema:timeout_duration_ms(), #{
+                default => <<"0ms">>,
+                desc => ?DESC(max_linger_time)
+            })},
+        {max_linger_bytes,
+            mk(emqx_schema:bytesize(), #{
+                default => <<"10MB">>,
+                desc => ?DESC(max_linger_bytes)
+            })},
         {max_batch_bytes,
             mk(emqx_schema:bytesize(), #{default => <<"896KB">>, desc => ?DESC(max_batch_bytes)})},
         {compression,
@@ -525,7 +537,7 @@ fields(producer_buffer) ->
         {segment_bytes,
             mk(
                 emqx_schema:bytesize(),
-                #{default => <<"100MB">>, desc => ?DESC(buffer_segment_bytes)}
+                #{default => <<"10MB">>, desc => ?DESC(buffer_segment_bytes)}
             )},
         {memory_overload_protection,
             mk(boolean(), #{

+ 7 - 5
apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl

@@ -532,17 +532,15 @@ do_send_msg(sync, KafkaTopic, KafkaMessage, Producers, SyncTimeout) ->
             {error, timeout}
     end;
 do_send_msg(async, KafkaTopic, KafkaMessage, Producers, AsyncReplyFn) ->
-    %% * Must be a batch because wolff:send and wolff:send_sync are batch APIs
+    %% * Must be a batch because wolff send and cast are batch APIs
     %% * Must be a single element batch because wolff books calls, but not batch sizes
     %%   for counters and gauges.
     Batch = [KafkaMessage],
-    %% The retuned information is discarded here.
-    %% If the producer process is down when sending, this function would
-    %% raise an error exception which is to be caught by the caller of this callback
     {_Partition, Pid} = wolff:send2(
         Producers, KafkaTopic, Batch, {fun ?MODULE:on_kafka_ack/3, [AsyncReplyFn]}
     ),
-    %% this Pid is so far never used because Kafka producer is by-passing the buffer worker
+    %% This Pid is returned, but not monitored by caller
+    %% See emqx_resource_buffer_worker:simple_async_internal_buffer
     {ok, Pid}.
 
 %% Wolff producer never gives up retrying
@@ -743,6 +741,8 @@ ssl(_) ->
 
 producers_config(BridgeType, BridgeName, Input, IsDryRun, ActionResId) ->
     #{
+        max_linger_time := MaxLingerTime,
+        max_linger_bytes := MaxLingerBytes,
         max_batch_bytes := MaxBatchBytes,
         compression := Compression,
         partition_strategy := PartitionStrategy,
@@ -780,6 +780,8 @@ producers_config(BridgeType, BridgeName, Input, IsDryRun, ActionResId) ->
         replayq_seg_bytes => SegmentBytes,
         drop_if_highmem => MemOLP,
         required_acks => RequiredAcks,
+        max_linger_ms => MaxLingerTime,
+        max_linger_bytes => MaxLingerBytes,
         max_batch_bytes => MaxBatchBytes,
         max_send_ahead => MaxInflight - 1,
         compression => Compression,

+ 3 - 10
apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_producer_SUITE.erl

@@ -234,14 +234,6 @@ t_rest_api(Config) ->
     },
     ok = kafka_bridge_rest_api_helper(Cfg).
 
-%% So that we can check if new atoms are created when they are not supposed to be created
-pre_create_atoms() ->
-    [
-        kafka_producer__probe_,
-        probedryrun,
-        kafka__probe_
-    ].
-
 http_get_bridges(UrlPath, Name0) ->
     Name = iolist_to_binary(Name0),
     {ok, _Code, BridgesData} = http_get(UrlPath),
@@ -307,7 +299,6 @@ kafka_bridge_rest_api_helper(Config) ->
         ?assertMatch([#{<<"type">> := <<"kafka">>}], http_get_bridges(BridgesParts, BridgeName)),
         %% Probe should work
         %% no extra atoms should be created when probing
-        %% See pre_create_atoms() above
         AtomsBefore = erlang:system_info(atom_count),
         {ok, 204, _} = http_post(BridgesProbeParts, CreateBody),
         AtomsAfter = erlang:system_info(atom_count),
@@ -358,6 +349,7 @@ kafka_bridge_rest_api_helper(Config) ->
         ?assertEqual(1, emqx_resource_metrics:success_get(BridgeV2Id)),
         ?assertEqual(1, emqx_metrics_worker:get(rule_metrics, RuleId, 'actions.success')),
         ?assertEqual(0, emqx_metrics_worker:get(rule_metrics, RuleId, 'actions.failed')),
+        ?assertEqual(0, emqx_metrics_worker:get(rule_metrics, RuleId, 'actions.discarded')),
         ?assertEqual(0, emqx_resource_metrics:dropped_get(BridgeV2Id)),
         ?assertEqual(0, emqx_resource_metrics:failed_get(BridgeV2Id)),
         ?assertEqual(0, emqx_resource_metrics:inflight_get(BridgeV2Id)),
@@ -377,7 +369,8 @@ kafka_bridge_rest_api_helper(Config) ->
         timer:sleep(100),
         ?assertEqual(0, emqx_resource_metrics:success_get(BridgeV2Id)),
         ?assertEqual(1, emqx_metrics_worker:get(rule_metrics, RuleId, 'actions.success')),
-        ?assertEqual(1, emqx_metrics_worker:get(rule_metrics, RuleId, 'actions.failed')),
+        ?assertEqual(0, emqx_metrics_worker:get(rule_metrics, RuleId, 'actions.failed')),
+        ?assertEqual(1, emqx_metrics_worker:get(rule_metrics, RuleId, 'actions.discarded')),
         {ok, 204, _} = http_put(BridgesPartsOpDisable, #{}),
         {ok, 204, _} = http_put(BridgesPartsOpEnable, #{}),
         ?assertEqual(0, emqx_resource_metrics:success_get(BridgeV2Id)),

+ 2 - 0
apps/emqx_bridge_kafka/test/emqx_bridge_v2_kafka_producer_SUITE.erl

@@ -229,6 +229,8 @@ bridge_v2_config(ConnectorName, KafkaTopic) ->
             },
             <<"compression">> => <<"no_compression">>,
             <<"kafka_header_value_encode_mode">> => <<"none">>,
+            <<"max_linger_time">> => <<"0ms">>,
+            <<"max_linger_bytes">> => <<"10MB">>,
             <<"max_batch_bytes">> => <<"896KB">>,
             <<"max_inflight">> => 10,
             <<"message">> => #{

+ 1 - 1
apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src

@@ -1,7 +1,7 @@
 %% -*- mode: erlang -*-
 {application, emqx_bridge_mqtt, [
     {description, "EMQX MQTT Broker Bridge"},
-    {vsn, "0.2.4"},
+    {vsn, "0.2.5"},
     {registered, []},
     {applications, [
         kernel,

+ 1 - 1
apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_connector_schema.erl

@@ -127,7 +127,7 @@ fields("server_configs") ->
                     desc => ?DESC("clean_start")
                 }
             )},
-        {keepalive, mk_duration("MQTT Keepalive.", #{default => <<"300s">>})},
+        {keepalive, mk_duration("MQTT Keepalive.", #{default => <<"160s">>})},
         {retry_interval,
             mk_duration(
                 "Message retry interval. Delay for the MQTT bridge to retry sending the QoS1/QoS2 "

+ 112 - 103
apps/emqx_bridge_snowflake/docs/dev-quick-ref.md

@@ -32,6 +32,13 @@ Driver = /opt/snowflake/snowflakeodbc/lib/universal/libSnowflake.dylib
 EOF
 ```
 
+## Generate RSA key pair for user accounts
+
+```sh
+openssl genrsa 2048 | openssl pkcs8 -topk8 -inform PEM -out snowflake_rsa_key.private.pem -nocrypt
+openssl rsa -in snowflake_rsa_key.private.pem -pubout -out snowflake_rsa_key.public.pem
+```
+
 ## Basic helper functions
 
 ### Elixir
@@ -67,58 +74,17 @@ DSN = "snowflake".
 Query = fun(Conn, Sql) -> odbc:sql_query(Conn, Sql) end.
 ```
 
-## Create user
-
-### Shell
-
-```sh
-openssl genrsa 2048 | openssl pkcs8 -topk8 -inform PEM -out snowflake_rsa_key.private.pem -nocrypt
-openssl rsa -in snowflake_rsa_key.private.pem -pubout -out snowflake_rsa_key.public.pem
-```
-
-### Elixir
-
-```elixir
-test_user = "testuser"
-query.(conn, "create user #{test_user} password = 'TestUser99' must_change_password = false")
-# {:updated, :undefined}
-
-public_pem_contents_trimmed = File.read!("snowflake_rsa_key.public.pem") |> String.trim() |> String.split("\n") |> Enum.drop(1) |> Enum.drop(-1) |> Enum.join("\n")
-
-query.(conn, "alter user #{test_user} set rsa_public_key = '#{public_pem_contents_trimmed}'")
-# {:updated, :undefined}
-```
-
-### Erlang
-
-```erlang
-TestUser = "testuser".
-Query(Conn, ["create user ", TestUser, " password = 'TestUser99' must_change_password = false"]).
-# {updated,undefined}
-
-{ok, Bin} = file:read_file("snowflake_rsa_key.public.pem").
-Pem = binary_to_list(Bin).
-[_ | Lines] = string:split(string:trim(Pem), "\n", all).
-PublicPemContentsTrimmed = lists:join("\n", lists:droplast(Lines)).
-
-Query(Conn, ["alter user ", TestUser, " set rsa_public_key = '", PublicPemContentsTrimmed, "'"]).
-# {updated,undefined}
-```
-
-## Create database objects
+## Initialize Database and user accounts
 
 ### Elixir
 
 ```elixir
 database = "testdatabase"
 schema = "public"
-table = "test1"
+table = "test0"
 stage = "teststage0"
 pipe = "testpipe0"
 warehouse = "testwarehouse"
-snowpipe_role = "snowpipe1"
-snowpipe_user = "snowpipeuser"
-test_role = "testrole"
 fqn_table = "#{database}.#{schema}.#{table}"
 fqn_stage = "#{database}.#{schema}.#{stage}"
 fqn_pipe = "#{database}.#{schema}.#{pipe}"
@@ -132,99 +98,142 @@ query.(conn, "create stage if not exists #{fqn_stage} file_format = (type = csv
 query.(conn, "create pipe if not exists #{fqn_pipe} as copy into #{fqn_table} from @#{fqn_stage} match_by_column_name = case_insensitive")
 query.(conn, "create or replace warehouse #{warehouse}")
 
+# read public key contents
+public_pem_contents_trimmed = File.read!("snowflake_rsa_key.public.pem") |> String.trim() |> String.split("\n") |> Enum.drop(1) |> Enum.drop(-1) |> Enum.join("\n")
+
+# create user account for running local tests
+test_role = "testrole"
+test_user = "testuser"
+
+query.(conn, "create user #{test_user} password = 'TestUser99' must_change_password = false rsa_public_key = '#{public_pem_contents_trimmed}'")
+# {:updated, :undefined}
+
 # Create a role for the Snowpipe privileges.
-query.(conn, "create or replace role #{snowpipe_role}")
 query.(conn, "create or replace role #{test_role}")
 # Grant the USAGE privilege on the database and schema that contain the pipe object.
-query.(conn, "grant usage on database #{database} to role #{snowpipe_role}")
 query.(conn, "grant usage on database #{database} to role #{test_role}")
-query.(conn, "grant usage on schema #{database}.#{schema} to role #{snowpipe_role}")
 query.(conn, "grant usage on schema #{database}.#{schema} to role #{test_role}")
-# Grant the INSERT and SELECT privileges on the target table.
-query.(conn, "grant insert, select on #{fqn_table} to role #{snowpipe_role}")
-# for cleaning up table after tests
+# Grant the USAGE privilege on the warehouse (only needed for test account)
+query.(conn, "grant usage on warehouse #{warehouse} to role #{test_role}")
+# Grant the INSERT, SELECT, TRUNCATE and DELETE privileges on the target table
+# for cleaning up after tests
 query.(conn, "grant insert, select, truncate, delete on #{fqn_table} to role #{test_role}")
-# Grant the USAGE privilege on the external stage.
-# must use read/write for internal stage
-# query.(conn, "grant usage on stage #{fqn_stage} to role #{snowpipe_role}")
-query.(conn, "grant read, write on stage #{fqn_stage} to role #{snowpipe_role}")
-# for cleaning up table after tests
+# Grant the READ and WRITE privilege on the internal stage.
 query.(conn, "grant read, write on stage #{fqn_stage} to role #{test_role}")
 # Grant the OPERATE and MONITOR privileges on the pipe object.
-query.(conn, "grant operate, monitor on pipe #{fqn_pipe} to role #{snowpipe_role}")
+query.(conn, "grant operate, monitor on pipe #{fqn_pipe} to role #{test_role}")
 # Grant the role to a user
-query.(conn, "create user if not exists #{snowpipe_user} password = 'TestUser99' must_change_password = false rsa_public_key = '#{public_pem_contents_trimmed}'")
+query.(conn, "grant role #{test_role} to user #{test_user}")
+# Set the role as the default role for the user
+query.(conn, "alter user testuser set default_role = #{test_role}")
 
-query.(conn, "grant usage on warehouse #{warehouse} to role #{test_role}")
+# create user account for connector and action
+snowpipe_role = "snowpipe"
+snowpipe_user = "snowpipeuser"
+
+query.(conn, "create user if not exists #{snowpipe_user} password = 'TestUser99' must_change_password = false rsa_public_key = '#{public_pem_contents_trimmed}'")
 
+# Create a role for the Snowpipe privileges.
+query.(conn, "create or replace role #{snowpipe_role}")
+# Grant the USAGE privilege on the database and schema that contain the pipe object.
+query.(conn, "grant usage on database #{database} to role #{snowpipe_role}")
+query.(conn, "grant usage on schema #{database}.#{schema} to role #{snowpipe_role}")
+# Grant the INSERT and SELECT privileges on the target table.
+query.(conn, "grant insert, select on #{fqn_table} to role #{snowpipe_role}")
+# Grant the READ and WRITE privilege on the internal stage.
+query.(conn, "grant read, write on stage #{fqn_stage} to role #{snowpipe_role}")
+# Grant the OPERATE and MONITOR privileges on the pipe object.
+query.(conn, "grant operate, monitor on pipe #{fqn_pipe} to role #{snowpipe_role}")
+# Grant the role to a user
 query.(conn, "grant role #{snowpipe_role} to user #{snowpipe_user}")
-query.(conn, "grant role #{snowpipe_role} to user #{test_user}")
-query.(conn, "grant role #{test_role} to user #{test_user}")
 # Set the role as the default role for the user
 query.(conn, "alter user #{snowpipe_user} set default_role = #{snowpipe_role}")
-query.(conn, "alter user testuser set default_role = #{test_role}")
 ```
 
 ### Erlang
 
 ```erlang
-Database = "testdatabase",
-Schema = "public",
-Table = "test1",
-Stage = "teststage0",
-Pipe = "testpipe0",
-Warehouse = "testwarehouse",
-SnowpipeRole = "snowpipe1",
-SnowpipeUser = "snowpipeuser",
-TestRole = "testrole",
-FqnTable = [Database, ".", Schema, ".", Table],
-FqnStage = [Database, ".", Schema, ".", Stage],
-FqnPipe = [Database, ".", Schema, ".", Pipe],
-
-Query(Conn, "use role accountadmin"),
+Database = "testdatabase".
+Schema = "public".
+Table = "test0".
+Stage = "teststage0".
+Pipe = "testpipe0".
+Warehouse = "testwarehouse".
+TestRole = "testrole".
+FqnTable = [Database, ".", Schema, ".", Table].
+FqnStage = [Database, ".", Schema, ".", Stage].
+FqnPipe = [Database, ".", Schema, ".", Pipe].
+
+Query(Conn, "use role accountadmin").
 
 % Create database, table, stage, pipe, warehouse
-Query(Conn, ["create database if not exists ", Database]),
-Query(Conn, ["create or replace table ", FqnTable, " (clientid string, topic string, payload binary, publish_received_at timestamp_ltz)"]),
-Query(Conn, ["create stage if not exists ", FqnStage, " file_format = (type = csv parse_header = true) copy_options = (on_error = continue purge = true)"]),
-Query(Conn, ["create pipe if not exists ", FqnPipe, " as copy into ", FqnTable, " from @", FqnStage, " match_by_column_name = case_insensitive"]),
-Query(Conn, ["create or replace warehouse ", Warehouse]),
+Query(Conn, ["create database if not exists ", Database]).
+Query(Conn, ["create or replace table ", FqnTable, " (clientid string, topic string, payload binary, publish_received_at timestamp_ltz)"]).
+Query(Conn, ["create stage if not exists ", FqnStage, " file_format = (type = csv parse_header = true) copy_options = (on_error = continue purge = true)"]).
+Query(Conn, ["create pipe if not exists ", FqnPipe, " as copy into ", FqnTable, " from @", FqnStage, " match_by_column_name = case_insensitive"]).
+Query(Conn, ["create or replace warehouse ", Warehouse]).
+
+% Read public key contents
+{ok, Bin} = file:read_file("snowflake_rsa_key.public.pem").
+Pem = binary_to_list(Bin).
+[_ | Lines] = string:split(string:trim(Pem), "\n", all).
+PublicPemContentsTrimmed = lists:join("\n", lists:droplast(Lines)).
+
+% Create user account for running local tests
+TestUser = "testuser".
+Query(Conn, ["create user ", TestUser, " password = 'TestUser99' must_change_password = false rsa_public_key = '", PublicPemContentsTrimmed, "'"]).
+% {updated,undefined}
 
 % Create a role for the Snowpipe privileges.
-Query(Conn, ["create or replace role ", SnowpipeRole]),
-Query(Conn, ["create or replace role ", TestRole]),
+Query(Conn, ["create or replace role ", TestRole]).
 
 % Grant the USAGE privilege on the database and schema that contain the pipe object.
-Query(Conn, ["grant usage on database ", Database, " to role ", SnowpipeRole]),
-Query(Conn, ["grant usage on database ", Database, " to role ", TestRole]),
-Query(Conn, ["grant usage on schema ", Database, ".", Schema, " to role ", SnowpipeRole]),
-Query(Conn, ["grant usage on schema ", Database, ".", Schema, " to role ", TestRole]),
+Query(Conn, ["grant usage on database ", Database, " to role ", TestRole]).
+Query(Conn, ["grant usage on schema ", Database, ".", Schema, " to role ", TestRole]).
+% Grant the USAGE privilege on the warehouse (only needed for test account)
+Query(Conn, ["grant usage on warehouse ", Warehouse, " to role ", TestRole]).
 
-% Grant the INSERT and SELECT privileges on the target table.
-Query(Conn, ["grant insert, select on ", FqnTable, " to role ", SnowpipeRole]),
-% For cleaning up table after tests
-Query(Conn, ["grant insert, select, truncate, delete on ", FqnTable, " to role ", TestRole]),
+% Grant the INSERT, SELECT, TRUNCATE and DELETE privileges on the target table
+% for cleaning up after tests
+Query(Conn, ["grant insert, select, truncate, delete on ", FqnTable, " to role ", TestRole]).
 
-% Grant the USAGE privilege on the external stage.
-% Must use read/write for internal stage
-% Query(Conn, ["grant usage on stage ", FqnStage, " to role ", SnowpipeRole]),
-Query(Conn, ["grant read, write on stage ", FqnStage, " to role ", SnowpipeRole]),
-% For cleaning up table after tests
-Query(Conn, ["grant read, write on stage ", FqnStage, " to role ", TestRole]),
+% Grant the READ and WRITE privilege on the internal stage
+Query(Conn, ["grant read, write on stage ", FqnStage, " to role ", TestRole]).
 
 % Grant the OPERATE and MONITOR privileges on the pipe object.
-Query(Conn, ["grant operate, monitor on pipe ", FqnPipe, " to role ", SnowpipeRole]),
+Query(Conn, ["grant operate, monitor on pipe ", FqnPipe, " to role ", TestRole]).
 
 % Grant the role to a user
-Query(Conn, ["create user if not exists ", SnowpipeUser, " password = 'TestUser99' must_change_password = false rsa_public_key = '", PublicPemContentsTrimmed, "'"]),
+Query(Conn, ["grant role ", TestRole, " to user ", TestUser]).
+
+% Set the role as the default role for the user
+Query(Conn, ["alter user testuser set default_role = ", TestRole]).
 
-Query(Conn, ["grant usage on warehouse ", Warehouse, " to role ", TestRole]),
+% Create user account for connector and action
+SnowpipeRole = "snowpipe".
+SnowpipeUser = "snowpipeuser".
 
-Query(Conn, ["grant role ", SnowpipeRole, " to user ", SnowpipeUser]),
-Query(Conn, ["grant role ", SnowpipeRole, " to user ", TestUser]),
-Query(Conn, ["grant role ", TestRole, " to user ", TestUser]),
+Query(Conn, ["create user if not exists ", SnowpipeUser, " password = 'TestUser99' must_change_password = false rsa_public_key = '", PublicPemContentsTrimmed, "'"]).
+
+% Create a role for the Snowpipe privileges.
+Query(Conn, ["create or replace role ", SnowpipeRole]).
+
+% Grant the USAGE privilege on the database and schema that contain the pipe object.
+Query(Conn, ["grant usage on database ", Database, " to role ", SnowpipeRole]).
+Query(Conn, ["grant usage on schema ", Database, ".", Schema, " to role ", SnowpipeRole]).
+
+% Grant the INSERT and SELECT privileges on the target table.
+Query(Conn, ["grant insert, select on ", FqnTable, " to role ", SnowpipeRole]).
+
+% Grant the READ and WRITE privilege on the internal stage.
+Query(Conn, ["grant read, write on stage ", FqnStage, " to role ", SnowpipeRole]).
+
+% Grant the OPERATE and MONITOR privileges on the pipe object.
+Query(Conn, ["grant operate, monitor on pipe ", FqnPipe, " to role ", SnowpipeRole]).
+
+% Grant the role to a user
+Query(Conn, ["grant role ", SnowpipeRole, " to user ", SnowpipeUser]).
 
 % Set the role as the default role for the user
-Query(Conn, ["alter user ", SnowpipeUser, " set default_role = ", SnowpipeRole]),
-Query(Conn, ["alter user testuser set default_role = ", TestRole]).
+Query(Conn, ["alter user ", SnowpipeUser, " set default_role = ", SnowpipeRole]).
 ```

+ 117 - 0
apps/emqx_bridge_snowflake/docs/user-guide.md

@@ -0,0 +1,117 @@
+## Initialize Snowflake ODBC driver
+
+### Linux
+
+Run `scripts/install-snowflake-driver.sh` to install the Snowflake ODBC driver and configure `odbc.ini`.
+
+### macOS
+
+- Install unixODBC (e.g. `brew install unixodbc`)
+- [Download and install iODBC](https://github.com/openlink/iODBC/releases/download/v3.52.16/iODBC-SDK-3.52.16-macOS11.dmg)
+- [Download and install the Snowflake ODBC driver](https://sfc-repo.snowflakecomputing.com/odbc/macuniversal/3.3.2/snowflake_odbc_mac_64universal-3.3.2.dmg)
+- Refer to [Installing and configuring the ODBC Driver for macOS](https://docs.snowflake.com/en/developer-guide/odbc/odbc-mac) for more information.
+- Update `~/.odbc.ini` and `/opt/snowflake/snowflakeodbc/lib/universal/simba.snowflake.ini`:
+
+```sh
+chown $(id -u):$(id -g) /opt/snowflake/snowflakeodbc/lib/universal/simba.snowflake.ini
+echo 'ODBCInstLib=libiodbcinst.dylib' >> /opt/snowflake/snowflakeodbc/lib/universal/simba.snowflake.ini
+
+cat < EOF > ~/.odbc.ini
+[ODBC]
+Trace=no
+TraceFile=
+
+[ODBC Drivers]
+Snowflake = Installed
+
+[ODBC Data Sources]
+snowflake = Snowflake
+
+[Snowflake]
+Driver = /opt/snowflake/snowflakeodbc/lib/universal/libSnowflake.dylib
+EOF
+```
+
+## Create user account and database
+
+After completing the steps below, the following credentials shall be used in connector in action:
+
+| Field                  | Value                                            |
+|------------------------|--------------------------------------------------|
+| Data Source Name(DSN)  | `snowflake`                                      |
+| Username               | `snowpipeuser`                                   |
+| Password               | `Snowpipeuser99`                                 |
+| Database Name          | `testdatabase`                                   |
+| Schema                 | `public`                                         |
+| Stage                  | `emqx`                                           |
+| Pipe                   | `emqx`                                           |
+| Pipe User              | `snowpipeuser`                                   |
+| Private Key            | `file://<path to snowflake_rsa_key.private.pem>` |
+
+### Generate RSA key pair
+
+```sh
+openssl genrsa 2048 | openssl pkcs8 -topk8 -inform PEM -out snowflake_rsa_key.private.pem -nocrypt
+openssl rsa -in snowflake_rsa_key.private.pem -pubout -out snowflake_rsa_key.public.pem
+```
+
+### Snowflake SQL Worksheet (+ Create --> SQL Worksheet)
+
+```sql
+USE ROLE accountadmin;
+
+CREATE DATABASE IF NOT EXISTS testdatabase;
+
+CREATE OR REPLACE TABLE testdatabase.public.emqx (
+    clientid STRING,
+    topic STRING,
+    payload STRING,
+    publish_received_at TIMESTAMP_LTZ
+);
+
+CREATE STAGE IF NOT EXISTS testdatabase.public.emqx
+FILE_FORMAT = (TYPE = CSV PARSE_HEADER = TRUE FIELD_OPTIONALLY_ENCLOSED_BY = '"')
+COPY_OPTIONS = (ON_ERROR = CONTINUE PURGE = TRUE);
+
+CREATE PIPE IF NOT EXISTS testdatabase.public.emqx AS
+COPY INTO testdatabase.public.emqx
+FROM @testdatabase.public.emqx
+MATCH_BY_COLUMN_NAME = CASE_INSENSITIVE;
+
+CREATE USER IF NOT EXISTS snowpipeuser
+    PASSWORD = 'Snowpipeuser99'
+    MUST_CHANGE_PASSWORD = FALSE;
+
+-- Set the RSA public key for 'snowpipeuser'
+-- Note: Remove the '-----BEGIN PUBLIC KEY-----' and '-----END PUBLIC KEY-----' lines from your PEM file,
+-- and include the remaining content below, preserving line breaks.
+
+ALTER USER snowpipeuser SET RSA_PUBLIC_KEY = '
+<YOUR_PUBLIC_KEY_CONTENTS_LINE_1>
+<YOUR_PUBLIC_KEY_CONTENTS_LINE_2>
+<YOUR_PUBLIC_KEY_CONTENTS_LINE_3>
+<YOUR_PUBLIC_KEY_CONTENTS_LINE_4>
+';
+
+CREATE OR REPLACE ROLE snowpipe;
+
+GRANT USAGE ON DATABASE testdatabase TO ROLE snowpipe;
+GRANT USAGE ON SCHEMA testdatabase.public TO ROLE snowpipe;
+GRANT INSERT, SELECT ON testdatabase.public.emqx TO ROLE snowpipe;
+GRANT READ, WRITE ON STAGE testdatabase.public.emqx TO ROLE snowpipe;
+GRANT OPERATE, MONITOR ON PIPE testdatabase.public.emqx TO ROLE snowpipe;
+GRANT ROLE snowpipe TO USER snowpipeuser;
+ALTER USER snowpipeuser SET DEFAULT_ROLE = snowpipe;
+```
+
+## Rule SQL
+
+```
+SELECT
+  clientid,
+  unix_ts_to_rfc3339(publish_received_at, 'millisecond') as publish_received_at,
+  topic,
+  payload
+FROM
+  "t/#"
+```

+ 13 - 1
apps/emqx_bridge_snowflake/src/emqx_bridge_snowflake_action_schema.erl

@@ -102,6 +102,11 @@ fields(aggreg_parameters) ->
                     importance => ?IMPORTANCE_HIDDEN,
                     required => true
                 }
+            )},
+        {proxy,
+            mk(
+                hoconsc:union([none, ref(proxy_config)]),
+                #{default => none, desc => ?DESC("proxy_config")}
             )}
     ];
 fields(direct_parameters) ->
@@ -129,6 +134,12 @@ fields(aggregation) ->
                 }
             )}
     ];
+fields(proxy_config) ->
+    [
+        {host, mk(binary(), #{required => true, desc => ?DESC("proxy_config_host")})},
+        {port,
+            mk(emqx_schema:port_number(), #{required => true, desc => ?DESC("proxy_config_port")})}
+    ];
 fields(action_resource_opts) ->
     %% NOTE: This action should benefit from generous batching defaults.
     emqx_bridge_v2_schema:action_resource_opts_fields([
@@ -140,7 +151,8 @@ desc(Name) when
     Name =:= ?ACTION_TYPE;
     Name =:= aggreg_parameters;
     Name =:= aggregation;
-    Name =:= parameters
+    Name =:= parameters;
+    Name =:= proxy_config
 ->
     ?DESC(Name);
 desc(action_resource_opts) ->

+ 47 - 16
apps/emqx_bridge_snowflake/src/emqx_bridge_snowflake_connector.erl

@@ -75,7 +75,8 @@
     username := binary(),
     password := emqx_schema_secret:secret(),
     dsn := binary(),
-    pool_size := pos_integer()
+    pool_size := pos_integer(),
+    proxy := none | proxy_config()
 }.
 -type connector_state() :: #{
     account := account(),
@@ -106,6 +107,11 @@
 -type stage() :: binary().
 -type pipe() :: binary().
 
+-type proxy_config() :: #{
+    host := binary(),
+    port := emqx_schema:port_number()
+}.
+
 -type odbc_pool() :: connector_resource_id().
 -type http_pool() :: action_resource_id().
 -type http_client_config() :: #{
@@ -192,7 +198,8 @@ on_start(ConnResId, ConnConfig) ->
         username := Username,
         password := Password,
         dsn := DSN,
-        pool_size := PoolSize
+        pool_size := PoolSize,
+        proxy := ProxyConfig
     } = ConnConfig,
     #{hostname := Host, port := Port} = emqx_schema:parse_server(Server, ?SERVER_OPTS),
     PoolOpts = [
@@ -202,6 +209,7 @@ on_start(ConnResId, ConnConfig) ->
         {server, Server},
         {username, Username},
         {password, Password},
+        {proxy, ProxyConfig},
         {on_disconnect, {?MODULE, disconnect, []}}
     ],
     case emqx_resource_pool:start(ConnResId, ?MODULE, PoolOpts) of
@@ -212,7 +220,7 @@ on_start(ConnResId, ConnConfig) ->
                 installed_actions => #{}
             },
             {ok, State};
-        {error, Reason} ->
+        {error, {start_pool_failed, _, Reason}} ->
             {error, Reason}
     end.
 
@@ -643,12 +651,14 @@ start_http_pool(ActionResId, ActionConfig, ConnState) ->
             connect_timeout := ConnectTimeout,
             pipelining := Pipelining,
             pool_size := PoolSize,
-            max_retries := MaxRetries
+            max_retries := MaxRetries,
+            proxy := ProxyConfig0
         },
         resource_opts := #{request_ttl := RequestTTL}
     } = ActionConfig,
     PipeParts = lists:map(fun maybe_quote/1, [Database, Schema, Pipe]),
-    PipePath = iolist_to_binary(lists:join($., PipeParts)),
+    PipePath0 = iolist_to_binary(lists:join($., PipeParts)),
+    PipePath = uri_string:quote(PipePath0),
     PipePrefix = iolist_to_binary([
         <<"https://">>,
         Host,
@@ -667,17 +677,31 @@ start_http_pool(ActionResId, ActionConfig, ConnState) ->
     ]),
     JWTConfig = jwt_config(ActionResId, ActionConfig, ConnState),
     TransportOpts = emqx_tls_lib:to_client_opts(#{enable => true, verify => verify_none}),
-    PoolOpts = [
-        {host, Host},
-        {port, Port},
-        {connect_timeout, ConnectTimeout},
-        {keepalive, 30_000},
-        {pool_type, random},
-        {pool_size, PoolSize},
-        {transport, tls},
-        {transport_opts, TransportOpts},
-        {enable_pipelining, Pipelining}
-    ],
+    ProxyConfig =
+        case ProxyConfig0 of
+            none ->
+                [];
+            #{host := ProxyHost, port := ProxyPort} ->
+                [
+                    {proxy, #{
+                        host => str(ProxyHost),
+                        port => ProxyPort
+                    }}
+                ]
+        end,
+    PoolOpts =
+        ProxyConfig ++
+            [
+                {host, Host},
+                {port, Port},
+                {connect_timeout, ConnectTimeout},
+                {keepalive, 30_000},
+                {pool_type, random},
+                {pool_size, PoolSize},
+                {transport, tls},
+                {transport_opts, TransportOpts},
+                {enable_pipelining, Pipelining}
+            ],
     case ehttpc_sup:start_pool(ActionResId, PoolOpts) of
         {ok, _} ->
             {ok, #{
@@ -690,6 +714,9 @@ start_http_pool(ActionResId, ActionConfig, ConnState) ->
                     request_ttl => RequestTTL
                 }
             }};
+        {error, {already_started, _}} ->
+            _ = ehttpc_sup:stop_pool(ActionResId),
+            start_http_pool(ActionResId, ActionConfig, ConnState);
         {error, Reason} ->
             {error, Reason}
     end.
@@ -802,6 +829,10 @@ conn_str([{username, Username} | Opts], Acc) ->
     conn_str(Opts, ["uid=" ++ str(Username) | Acc]);
 conn_str([{password, Password} | Opts], Acc) ->
     conn_str(Opts, ["pwd=" ++ str(emqx_secret:unwrap(Password)) | Acc]);
+conn_str([{proxy, none} | Opts], Acc) ->
+    conn_str(Opts, Acc);
+conn_str([{proxy, #{host := Host, port := Port}} | Opts], Acc) ->
+    conn_str(Opts, ["proxy=" ++ str(Host) ++ ":" ++ str(Port) | Acc]);
 conn_str([{_, _} | Opts], Acc) ->
     conn_str(Opts, Acc).
 

+ 15 - 2
apps/emqx_bridge_snowflake/src/emqx_bridge_snowflake_connector_schema.erl

@@ -74,16 +74,29 @@ fields(connector_config) ->
                 desc => ?DESC("account"),
                 validator => fun account_id_validator/1
             })},
-        {dsn, mk(binary(), #{required => true, desc => ?DESC("dsn")})}
+        {dsn, mk(binary(), #{required => true, desc => ?DESC("dsn")})},
+        {proxy,
+            mk(
+                hoconsc:union([none, hoconsc:ref(?MODULE, proxy_config)]),
+                #{default => none, desc => ?DESC("proxy_config")}
+            )}
         | Fields
     ] ++
         emqx_connector_schema:resource_opts() ++
-        emqx_connector_schema_lib:ssl_fields().
+        emqx_connector_schema_lib:ssl_fields();
+fields(proxy_config) ->
+    [
+        {host, mk(binary(), #{required => true, desc => ?DESC("proxy_config_host")})},
+        {port,
+            mk(emqx_schema:port_number(), #{required => true, desc => ?DESC("proxy_config_port")})}
+    ].
 
 desc("config_connector") ->
     ?DESC("config_connector");
 desc(resource_opts) ->
     ?DESC(emqx_resource_schema, resource_opts);
+desc(proxy_config) ->
+    ?DESC("proxy_config");
 desc(_Name) ->
     undefined.
 

+ 1 - 1
apps/emqx_bridge_snowflake/test/emqx_bridge_snowflake_SUITE.erl

@@ -29,7 +29,7 @@
 -define(DATABASE, <<"testdatabase">>).
 -define(SCHEMA, <<"public">>).
 -define(STAGE, <<"teststage0">>).
--define(TABLE, <<"test1">>).
+-define(TABLE, <<"test0">>).
 -define(WAREHOUSE, <<"testwarehouse">>).
 -define(PIPE_USER, <<"snowpipeuser">>).
 

+ 1 - 1
apps/emqx_bridge_sqlserver/src/emqx_bridge_sqlserver.app.src

@@ -1,6 +1,6 @@
 {application, emqx_bridge_sqlserver, [
     {description, "EMQX Enterprise SQL Server Bridge"},
-    {vsn, "0.2.4"},
+    {vsn, "0.2.5"},
     {registered, []},
     {applications, [kernel, stdlib, emqx_resource, odbc]},
     {env, [

+ 3 - 5
apps/emqx_bridge_sqlserver/src/emqx_bridge_sqlserver_connector.erl

@@ -371,9 +371,7 @@ do_get_status(Conn) ->
 %% 'https://learn.microsoft.com/en-us/sql/connect/odbc/
 %%      dsn-connection-string-attribute?source=recommendations&view=sql-server-ver16#encrypt'
 conn_str([], Acc) ->
-    %% we should use this for msodbcsql 18+
-    %% lists:join(";", ["Encrypt=YES", "TrustServerCertificate=YES" | Acc]);
-    lists:join(";", Acc);
+    lists:join(";", ["Encrypt=YES", "TrustServerCertificate=YES" | Acc]);
 conn_str([{driver, Driver} | Opts], Acc) ->
     conn_str(Opts, ["Driver=" ++ str(Driver) | Acc]);
 conn_str([{server, Server} | Opts], Acc) ->
@@ -512,8 +510,8 @@ get_query_tuple([{_ChannelId, {_QueryType, _Data}} | _]) ->
         {unrecoverable_error,
             {invalid_request, <<"The only query type that supports batching is insert.">>}}
     );
-get_query_tuple([InsertQuery | _]) ->
-    get_query_tuple(InsertQuery).
+get_query_tuple([_InsertQuery | _] = Reqs) ->
+    lists:map(fun get_query_tuple/1, Reqs).
 
 %% for bridge data to sql server
 parse_sql_template(Config) ->

+ 2 - 6
apps/emqx_bridge_sqlserver/test/emqx_bridge_sqlserver_SUITE.erl

@@ -354,7 +354,7 @@ t_simple_query(Config) ->
     ok.
 
 -define(MISSING_TINYINT_ERROR,
-    "[Microsoft][ODBC Driver 17 for SQL Server][SQL Server]"
+    "[Microsoft][ODBC Driver 18 for SQL Server][SQL Server]"
     "Conversion failed when converting the varchar value 'undefined' to data type tinyint. SQLSTATE IS: 22018"
 ).
 
@@ -654,11 +654,7 @@ batch_size(Config) ->
     end.
 
 conn_str([], Acc) ->
-    %% TODO: for msodbc 18+, we need to add "Encrypt=YES;TrustServerCertificate=YES"
-    %% but havn't tested now
-    %% we should use this for msodbcsql 18+
-    %% lists:join(";", ["Encrypt=YES", "TrustServerCertificate=YES" | Acc]);
-    lists:join(";", Acc);
+    lists:join(";", ["Encrypt=YES", "TrustServerCertificate=YES" | Acc]);
 conn_str([{driver, Driver} | Opts], Acc) ->
     conn_str(Opts, ["Driver=" ++ str(Driver) | Acc]);
 conn_str([{host, Host} | Opts], Acc) ->

+ 46 - 2
apps/emqx_cluster_link/src/emqx_cluster_link_api.erl

@@ -23,7 +23,8 @@
 -export([
     '/cluster/links'/2,
     '/cluster/links/link/:name'/2,
-    '/cluster/links/link/:name/metrics'/2
+    '/cluster/links/link/:name/metrics'/2,
+    '/cluster/links/link/:name/metrics/reset'/2
 ]).
 
 -define(CONF_PATH, [cluster, links]).
@@ -40,7 +41,8 @@ paths() ->
     [
         "/cluster/links",
         "/cluster/links/link/:name",
-        "/cluster/links/link/:name/metrics"
+        "/cluster/links/link/:name/metrics",
+        "/cluster/links/link/:name/metrics/reset"
     ].
 
 schema("/cluster/links") ->
@@ -133,6 +135,23 @@ schema("/cluster/links/link/:name/metrics") ->
                         )
                     }
             }
+    };
+schema("/cluster/links/link/:name/metrics/reset") ->
+    #{
+        'operationId' => '/cluster/links/link/:name/metrics/reset',
+        put =>
+            #{
+                description => "Reset a cluster link's metrics",
+                tags => ?TAGS,
+                parameters => [param_path_name()],
+                responses =>
+                    #{
+                        204 => <<"Reset">>,
+                        404 => emqx_dashboard_swagger:error_codes(
+                            [?NOT_FOUND], <<"Cluster link not found">>
+                        )
+                    }
+            }
     }.
 
 fields(link_config_response) ->
@@ -195,6 +214,9 @@ fields(node_metrics) ->
 '/cluster/links/link/:name/metrics'(get, #{bindings := #{name := Name}}) ->
     with_link(Name, fun() -> handle_metrics(Name) end, not_found()).
 
+'/cluster/links/link/:name/metrics/reset'(put, #{bindings := #{name := Name}}) ->
+    with_link(Name, fun() -> handle_reset_metrics(Name) end, not_found()).
+
 %%--------------------------------------------------------------------
 %% Internal funcs
 %%--------------------------------------------------------------------
@@ -351,6 +373,28 @@ format_metrics(Node, RouterMetrics, ResourceMetrics) ->
         }
     }.
 
+handle_reset_metrics(Name) ->
+    Res = emqx_cluster_link_metrics:reset_metrics(Name),
+    ErrorNodes =
+        lists:filtermap(
+            fun
+                ({_Node, {ok, ok}, {ok, ok}}) ->
+                    false;
+                ({Node, _, _}) ->
+                    {true, Node}
+            end,
+            Res
+        ),
+    case ErrorNodes of
+        [] ->
+            ?NO_CONTENT;
+        [_ | _] ->
+            Msg0 = <<"Metrics reset failed on one or more nodes. Please try again.">>,
+            Msg1 = ?ERROR_MSG('INTERNAL_ERROR', Msg0),
+            Msg = Msg1#{nodes => ErrorNodes},
+            {500, Msg}
+    end.
+
 add_status(Name, Link) ->
     NodeRPCResults = emqx_cluster_link_mqtt:get_resource_cluster(Name),
     Status = collect_single_status(NodeRPCResults),

+ 11 - 0
apps/emqx_cluster_link/src/emqx_cluster_link_metrics.erl

@@ -11,6 +11,7 @@
     drop_metrics/1,
 
     get_metrics/1,
+    reset_metrics/1,
     routes_set/2
 ]).
 
@@ -47,6 +48,16 @@ maybe_create_metrics(ClusterName) ->
             )
     end.
 
+reset_metrics(ClusterName) ->
+    Nodes = emqx:running_nodes(),
+    Timeout = 15_000,
+    RouterResults = emqx_metrics_proto_v2:reset_metrics(Nodes, ?METRIC_NAME, ClusterName, Timeout),
+    ResourceId = emqx_cluster_link_mqtt:resource_id(ClusterName),
+    ResourceResults = emqx_metrics_proto_v2:reset_metrics(
+        Nodes, resource_metrics, ResourceId, Timeout
+    ),
+    lists:zip3(Nodes, RouterResults, ResourceResults).
+
 drop_metrics(ClusterName) ->
     ok = emqx_metrics_worker:clear_metrics(?METRIC_NAME, ClusterName).
 

+ 33 - 0
apps/emqx_cluster_link/test/emqx_cluster_link_api_SUITE.erl

@@ -181,6 +181,11 @@ get_metrics(SourceOrTargetCluster, Name) ->
     Path = emqx_mgmt_api_test_util:api_path(Host, [api_root(), "link", Name, "metrics"]),
     emqx_mgmt_api_test_util:simple_request(get, Path, _Params = []).
 
+reset_metrics(SourceOrTargetCluster, Name) ->
+    Host = host(SourceOrTargetCluster),
+    Path = emqx_mgmt_api_test_util:api_path(Host, [api_root(), "link", Name, "metrics", "reset"]),
+    emqx_mgmt_api_test_util:simple_request(put, Path, _Params = []).
+
 host(source) -> "http://127.0.0.1:18083";
 host(target) -> "http://127.0.0.1:28083".
 
@@ -755,6 +760,34 @@ t_metrics(Config) ->
         )
     ),
 
+    %% Reset metrics
+    ?assertMatch({204, _}, reset_metrics(source, SourceName)),
+    ?assertMatch(
+        {200, #{
+            <<"metrics">> := #{
+                <<"router">> := #{<<"routes">> := 0},
+                <<"forwarding">> := #{<<"matched">> := 0}
+            },
+            <<"node_metrics">> := [
+                #{
+                    <<"metrics">> :=
+                        #{
+                            <<"router">> := #{<<"routes">> := 0},
+                            <<"forwarding">> := #{<<"matched">> := 0}
+                        }
+                },
+                #{
+                    <<"metrics">> :=
+                        #{
+                            <<"router">> := #{<<"routes">> := 0},
+                            <<"forwarding">> := #{<<"matched">> := 0}
+                        }
+                }
+            ]
+        }},
+        get_metrics(source, SourceName)
+    ),
+
     ok.
 
 %% Checks that we can update a link via the API in the same fashion as the frontend does,

+ 1 - 1
apps/emqx_conf/src/emqx_conf.app.src

@@ -1,6 +1,6 @@
 {application, emqx_conf, [
     {description, "EMQX configuration management"},
-    {vsn, "0.3.1"},
+    {vsn, "0.4.0"},
     {registered, []},
     {mod, {emqx_conf_app, []}},
     {applications, [kernel, stdlib]},

+ 17 - 2
apps/emqx_conf/src/emqx_conf_schema.erl

@@ -590,14 +590,15 @@ fields("node") ->
             )},
         {"role",
             sc(
-                hoconsc:enum([core] ++ emqx_schema_hooks:injection_point('node.role')),
+                hoconsc:enum(node_role_symbols()),
                 #{
                     mapping => "mria.node_role",
                     default => core,
                     'readOnly' => true,
                     importance => ?IMPORTANCE_HIGH,
                     aliases => [db_role],
-                    desc => ?DESC(db_role)
+                    desc => ?DESC(db_role),
+                    validator => fun validate_node_role/1
                 }
             )},
         {"rpc_module",
@@ -1574,3 +1575,17 @@ is_ip_addr(Host, Type) ->
 
 address_type(IP) when tuple_size(IP) =:= 4 -> ipv4;
 address_type(IP) when tuple_size(IP) =:= 8 -> ipv6.
+
+node_role_symbols() ->
+    [core] ++ emqx_schema_hooks:injection_point('node.role').
+
+validate_node_role(Role) ->
+    Allowed = node_role_symbols(),
+    case lists:member(Role, Allowed) of
+        true ->
+            ok;
+        false when Role =:= replicant ->
+            throw("Node role 'replicant' is only allowed in Enterprise edition since 5.8.0");
+        false ->
+            throw("Invalid node role: " ++ atom_to_list(Role))
+    end.

+ 2 - 1
apps/emqx_conf/src/emqx_conf_schema_inject.erl

@@ -65,7 +65,8 @@ authn_mods(ee) ->
         [
             emqx_gcp_device_authn_schema,
             emqx_authn_scram_restapi_schema,
-            emqx_authn_kerberos_schema
+            emqx_authn_kerberos_schema,
+            emqx_authn_cinfo_schema
         ].
 
 authz() ->

+ 29 - 0
apps/emqx_conf/test/emqx_conf_schema_tests.erl

@@ -685,3 +685,32 @@ dns_srv_record_is_ok_test() ->
         Value when is_map(Value),
         hocon_tconf:check_plain(emqx_conf_schema, ConfMap, #{required => false}, [node, cluster])
     ).
+
+invalid_role_test() ->
+    Conf = node_role_conf(dummy),
+    ?assertThrow(
+        {emqx_conf_schema, [#{reason := "Invalid node role: dummy"}]},
+        hocon_tconf:check_plain(emqx_conf_schema, Conf, #{required => false}, [node])
+    ).
+
+unsupported_role_test() ->
+    test_unsupported_role(emqx_release:edition()).
+
+test_unsupported_role(ee) ->
+    %% all roles are supported in ee
+    ok;
+test_unsupported_role(ce) ->
+    %% replicant role is not allowed for ce since 5.8.0
+    Conf = node_role_conf(replicant),
+    ?assertThrow(
+        {emqx_conf_schema, [
+            #{reason := "Node role 'replicant' is only allowed in Enterprise edition since 5.8.0"}
+        ]},
+        hocon_tconf:check_plain(emqx_conf_schema, Conf, #{required => false}, [node])
+    ).
+
+node_role_conf(Role0) ->
+    Role = atom_to_binary(Role0),
+    Hocon = <<"node { role =", Role/binary, ", cookie = \"cookie\", data_dir = \".\" }">>,
+    {ok, ConfMap} = hocon:binary(Hocon, #{format => map}),
+    ConfMap.

+ 1 - 1
apps/emqx_connector/mix.exs

@@ -36,7 +36,7 @@ defmodule EMQXConnector.MixProject do
       {:emqx_connector_jwt, in_umbrella: true},
       UMP.common_dep(:jose),
       UMP.common_dep(:ecpool),
-      {:eredis_cluster, github: "emqx/eredis_cluster", tag: "0.8.4"},
+      {:eredis_cluster, github: "emqx/eredis_cluster", tag: "0.8.5"},
       UMP.common_dep(:ehttpc),
       UMP.common_dep(:emqtt),
     ]

+ 4 - 0
apps/emqx_connector/src/emqx_connector.erl

@@ -32,6 +32,7 @@
     get_metrics/2,
     list/0,
     load/0,
+    is_exist/2,
     lookup/1,
     lookup/2,
     remove/2,
@@ -235,6 +236,9 @@ lookup(Type, Name, RawConf) ->
             }}
     end.
 
+is_exist(Type, Name) ->
+    emqx_resource:is_exist(emqx_connector_resource:resource_id(Type, Name)).
+
 get_metrics(Type, Name) ->
     emqx_resource:get_metrics(emqx_connector_resource:resource_id(Type, Name)).
 

+ 12 - 12
apps/emqx_connector/src/emqx_connector_api.erl

@@ -318,10 +318,10 @@ schema("/connectors_probe") ->
     }.
 
 '/connectors'(post, #{body := #{<<"type">> := ConnectorType, <<"name">> := ConnectorName} = Conf0}) ->
-    case emqx_connector:lookup(ConnectorType, ConnectorName) of
-        {ok, _} ->
+    case emqx_connector:is_exist(ConnectorType, ConnectorName) of
+        true ->
             ?BAD_REQUEST('ALREADY_EXISTS', <<"connector already exists">>);
-        {error, not_found} ->
+        false ->
             Conf = filter_out_request_body(Conf0),
             create_connector(ConnectorType, ConnectorName, Conf)
     end;
@@ -345,20 +345,20 @@ schema("/connectors_probe") ->
     Conf1 = filter_out_request_body(Conf0),
     ?TRY_PARSE_ID(
         Id,
-        case emqx_connector:lookup(ConnectorType, ConnectorName) of
-            {ok, _} ->
+        case emqx_connector:is_exist(ConnectorType, ConnectorName) of
+            true ->
                 RawConf = emqx:get_raw_config([connectors, ConnectorType, ConnectorName], #{}),
                 Conf = emqx_utils:deobfuscate(Conf1, RawConf),
                 update_connector(ConnectorType, ConnectorName, Conf);
-            {error, not_found} ->
+            false ->
                 ?CONNECTOR_NOT_FOUND(ConnectorType, ConnectorName)
         end
     );
 '/connectors/:id'(delete, #{bindings := #{id := Id}}) ->
     ?TRY_PARSE_ID(
         Id,
-        case emqx_connector:lookup(ConnectorType, ConnectorName) of
-            {ok, _} ->
+        case emqx_connector:is_exist(ConnectorType, ConnectorName) of
+            true ->
                 case emqx_connector:remove(ConnectorType, ConnectorName) of
                     ok ->
                         ?NO_CONTENT;
@@ -372,7 +372,7 @@ schema("/connectors_probe") ->
                     {error, Reason} ->
                         ?INTERNAL_ERROR(Reason)
                 end;
-            {error, not_found} ->
+            false ->
                 ?CONNECTOR_NOT_FOUND(ConnectorType, ConnectorName)
         end
     ).
@@ -406,11 +406,11 @@ schema("/connectors_probe") ->
 maybe_deobfuscate_connector_probe(
     #{<<"type">> := ConnectorType, <<"name">> := ConnectorName} = Params
 ) ->
-    case emqx_connector:lookup(ConnectorType, ConnectorName) of
-        {ok, _} ->
+    case emqx_connector:is_exist(ConnectorType, ConnectorName) of
+        true ->
             RawConf = emqx:get_raw_config([connectors, ConnectorType, ConnectorName], #{}),
             emqx_utils:deobfuscate(Params, RawConf);
-        _ ->
+        false ->
             %% A connector may be probed before it's created, so not finding it here is fine
             Params
     end;

+ 6 - 4
apps/emqx_connector/src/emqx_connector_resource.erl

@@ -52,6 +52,8 @@
 
 -export([parse_url/1]).
 
+-define(PROBE_ID_SEP, $_).
+
 -callback connector_config(ParsedConfig, Context) ->
     ParsedConfig
 when
@@ -90,7 +92,8 @@ parse_connector_id(ConnectorId) ->
     {atom(), atom() | binary()}.
 parse_connector_id(<<"connector:", ConnectorId/binary>>, Opts) ->
     parse_connector_id(ConnectorId, Opts);
-parse_connector_id(<<?TEST_ID_PREFIX, _:16/binary, ConnectorId/binary>>, Opts) ->
+parse_connector_id(?PROBE_ID_MATCH(Suffix), Opts) ->
+    <<?PROBE_ID_SEP, ConnectorId/binary>> = Suffix,
     parse_connector_id(ConnectorId, Opts);
 parse_connector_id(ConnectorId, Opts) ->
     emqx_resource:parse_resource_id(ConnectorId, Opts).
@@ -214,9 +217,8 @@ create_dry_run(Type, Conf0, Callback) ->
     TypeAtom = safe_atom(Type),
     %% We use a fixed name here to avoid creating an atom
     %% to avoid potential race condition, the resource id should be unique
-    Prefix = emqx_resource_manager:make_test_id(),
-    TmpName =
-        iolist_to_binary([Prefix, TypeBin, ":", <<"probedryrun">>]),
+    Prefix = ?PROBE_ID_NEW(),
+    TmpName = iolist_to_binary([Prefix, ?PROBE_ID_SEP, TypeBin, $:, "dryrun"]),
     TmpPath = emqx_utils:safe_filename(TmpName),
     Conf1 = maps:without([<<"name">>], Conf0),
     RawConf = #{<<"connectors">> => #{TypeBin => #{<<"temp_name">> => Conf1}}},

+ 2 - 0
apps/emqx_connector/test/emqx_connector_api_SUITE.erl

@@ -86,6 +86,8 @@
         ],
         <<"kafka_header_value_encode_mode">> => <<"none">>,
         <<"kafka_headers">> => <<"${pub_props}">>,
+        <<"max_linger_time">> => <<"1ms">>,
+        <<"max_linger_bytes">> => <<"1MB">>,
         <<"max_batch_bytes">> => <<"896KB">>,
         <<"max_inflight">> => 10,
         <<"message">> => #{

+ 244 - 121
apps/emqx_dashboard/src/emqx_dashboard_monitor.erl

@@ -20,6 +20,7 @@
 
 -include_lib("snabbkaffe/include/trace.hrl").
 -include_lib("emqx/include/logger.hrl").
+-include_lib("stdlib/include/ms_transform.hrl").
 
 -behaviour(gen_server).
 
@@ -38,26 +39,46 @@
 -export([
     samplers/0,
     samplers/2,
-    current_rate/1,
-    granularity_adapter/1
+    current_rate/1
 ]).
 
--ifdef(TEST).
--export([current_rate_cluster/0]).
--endif.
-
 %% for rpc
 -export([do_sample/2]).
 
+%% For tests
+-export([
+    current_rate_cluster/0,
+    sample_interval/1,
+    store/1,
+    format/1,
+    clean/1,
+    lookup/1,
+    sample_nodes/2,
+    randomize/2,
+    randomize/3,
+    sample_fill_gap/2,
+    fill_gaps/2
+]).
+
 -define(TAB, ?MODULE).
 
-%% 1 hour = 60 * 60 * 1000 milliseconds
--define(CLEAN_EXPIRED_INTERVAL, 60 * 60 * 1000).
-%% 7 days = 7 * 24 * 60 * 60 * 1000 milliseconds
--define(RETENTION_TIME, 7 * 24 * 60 * 60 * 1000).
+-define(ONE_SECOND, 1_000).
+-define(SECONDS, ?ONE_SECOND).
+-define(ONE_MINUTE, 60 * ?SECONDS).
+-define(MINUTES, ?ONE_MINUTE).
+-define(ONE_HOUR, 60 * ?MINUTES).
+-define(HOURS, ?ONE_HOUR).
+-define(ONE_DAY, 24 * ?HOURS).
+-define(DAYS, ?ONE_DAY).
+
+-define(CLEAN_EXPIRED_INTERVAL, 10 * ?MINUTES).
+-define(RETENTION_TIME, 7 * ?DAYS).
+-define(MAX_POSSIBLE_SAMPLES, 1440).
 
 -record(state, {
-    last
+    last,
+    clean_timer,
+    extra = []
 }).
 
 -record(emqx_monit, {
@@ -79,41 +100,20 @@ create_tables() ->
 %% API
 
 samplers() ->
-    format(do_sample(all, infinity)).
+    format(sample_fill_gap(all, 0)).
 
 samplers(NodeOrCluster, Latest) ->
-    Time = latest2time(Latest),
-    case format(do_sample(NodeOrCluster, Time)) of
+    SinceTime = latest2time(Latest),
+    case format(sample_fill_gap(NodeOrCluster, SinceTime)) of
         {badrpc, Reason} ->
             {badrpc, Reason};
         List when is_list(List) ->
-            granularity_adapter(List)
+            List
     end.
 
-latest2time(infinity) -> infinity;
+latest2time(infinity) -> 0;
 latest2time(Latest) -> erlang:system_time(millisecond) - (Latest * 1000).
 
-%% When the number of samples exceeds 1000, it affects the rendering speed of dashboard UI.
-%% granularity_adapter is an oversampling of the samples.
-%% Use more granular data and reduce data density.
-%%
-%% [
-%%   Data1 = #{time => T1, k1 => 1, k2 => 2},
-%%   Data2 = #{time => T2, k1 => 3, k2 => 4},
-%%   ...
-%% ]
-%% After granularity_adapter, Merge Data1 Data2
-%%
-%% [
-%%   #{time => T2, k1 => 1 + 3, k2 =>  2 + 6},
-%%   ...
-%% ]
-%%
-granularity_adapter(List) when length(List) > 1000 ->
-    granularity_adapter(List, []);
-granularity_adapter(List) ->
-    List.
-
 current_rate(all) ->
     current_rate_cluster();
 current_rate(Node) when Node == node() ->
@@ -167,8 +167,9 @@ start_link() ->
 
 init([]) ->
     sample_timer(),
-    clean_timer(),
-    {ok, #state{last = undefined}}.
+    %% clean immediately
+    self() ! clean_expired,
+    {ok, #state{last = undefined, clean_timer = undefined, extra = []}}.
 
 handle_call(current_rate, _From, State = #state{last = Last}) ->
     NowTime = erlang:system_time(millisecond),
@@ -189,10 +190,11 @@ handle_info({sample, Time}, State = #state{last = Last}) ->
     ?tp(dashboard_monitor_flushed, #{}),
     sample_timer(),
     {noreply, State#state{last = Now}};
-handle_info(clean_expired, State) ->
+handle_info(clean_expired, #state{clean_timer = TrefOld} = State) ->
+    ok = maybe_cancel_timer(TrefOld),
     clean(),
-    clean_timer(),
-    {noreply, State};
+    TrefNew = clean_timer(),
+    {noreply, State#state{clean_timer = TrefNew}};
 handle_info(_Info, State = #state{}) ->
     {noreply, State}.
 
@@ -205,65 +207,125 @@ code_change(_OldVsn, State = #state{}, _Extra) ->
 %% -------------------------------------------------------------------------------------------------
 %% Internal functions
 
+%% for testing
+randomize(Count, Data) when is_map(Data) ->
+    MaxAge = 7 * ?DAYS,
+    randomize(Count, Data, MaxAge).
+
+randomize(Count, Data, Age) when is_map(Data) andalso is_integer(Age) ->
+    Now = erlang:system_time(millisecond) - 1,
+    Interval = sample_interval(Age),
+    NowBase = Now - (Now rem Interval),
+    StartTs = NowBase - Age,
+    lists:foreach(
+        fun(_) ->
+            Ts = StartTs + rand:uniform(Now - StartTs),
+            Record = #emqx_monit{time = Ts, data = Data},
+            case ets:lookup(?TAB, Ts) of
+                [] ->
+                    store(Record);
+                [#emqx_monit{data = D} = R] ->
+                    store(R#emqx_monit{data = merge_sampler_maps(D, Data)})
+            end
+        end,
+        lists:seq(1, Count)
+    ).
+
+maybe_cancel_timer(Tref) when is_reference(Tref) ->
+    _ = erlang:cancel_timer(Tref),
+    ok;
+maybe_cancel_timer(_) ->
+    ok.
+
 do_call(Request) ->
     gen_server:call(?MODULE, Request, 5000).
 
-do_sample(all, Time) ->
-    do_sample(emqx:running_nodes(), Time, #{});
-do_sample(Node, Time) when Node == node() ->
-    MS = match_spec(Time),
-    internal_format(ets:select(?TAB, MS));
-do_sample(Node, Time) ->
+do_sample(Node, infinity) ->
+    %% handle RPC from old version nodes
+    do_sample(Node, 0);
+do_sample(all, Time) when is_integer(Time) ->
+    AllNodes = emqx:running_nodes(),
+    All = sample_nodes(AllNodes, Time),
+    maps:map(fun(_, S) -> adjust_synthetic_cluster_metrics(S) end, All);
+do_sample(Node, Time) when Node == node() andalso is_integer(Time) ->
+    do_sample_local(Time);
+do_sample(Node, Time) when is_integer(Time) ->
     case emqx_dashboard_proto_v1:do_sample(Node, Time) of
         {badrpc, Reason} ->
-            {badrpc, {Node, Reason}};
+            {badrpc, #{node => Node, reason => Reason}};
         Res ->
             Res
     end.
 
-do_sample([], _Time, Samples) ->
-    maps:map(
-        fun(_TS, Sample) -> adjust_synthetic_cluster_metrics(Sample) end,
-        Samples
-    );
-do_sample([Node | Nodes], Time, Res) ->
-    case do_sample(Node, Time) of
-        {badrpc, Reason} ->
-            {badrpc, Reason};
-        Samplers ->
-            do_sample(Nodes, Time, merge_cluster_samplers(Samplers, Res))
-    end.
-
-match_spec(infinity) ->
-    [{'$1', [], ['$1']}];
-match_spec(Time) ->
-    [{{'_', '$1', '_'}, [{'>=', '$1', Time}], ['$_']}].
+do_sample_local(Time) ->
+    MS = ets:fun2ms(fun(#emqx_monit{time = T} = A) when T >= Time -> A end),
+    FromDB = ets:select(?TAB, MS),
+    Map = to_ts_data_map(FromDB),
+    %% downsample before return RPC calls for less data to merge by the caller nodes
+    downsample(Time, Map).
 
-merge_cluster_samplers(NodeSamples, Cluster) ->
-    maps:fold(fun merge_cluster_samplers/3, Cluster, NodeSamples).
+sample_nodes(Nodes, Time) ->
+    ResList = concurrently_sample_nodes(Nodes, Time),
+    {Failed, Success} = lists:partition(
+        fun
+            ({badrpc, _}) -> true;
+            (_) -> false
+        end,
+        ResList
+    ),
+    Failed =/= [] andalso
+        ?SLOG(warning, #{msg => "failed_to_sample_monitor_data", errors => Failed}),
+    lists:foldl(fun(I, B) -> merge_samplers(Time, I, B) end, #{}, Success).
+
+concurrently_sample_nodes(Nodes, Time) ->
+    %% emqx_dashboard_proto_v1:do_sample has a timeout (5s),
+    Timeout = ?RPC_TIMEOUT + ?ONE_SECOND,
+    %% call emqx_utils:pmap here instead of a rpc multicall
+    %% to avoid having to introduce a new bpapi proto version
+    emqx_utils:pmap(fun(Node) -> do_sample(Node, Time) end, Nodes, Timeout).
+
+merge_samplers(SinceTime, Increment0, Base) ->
+    Increment =
+        case map_size(Increment0) > ?MAX_POSSIBLE_SAMPLES of
+            true ->
+                %% this is a response from older version node
+                downsample(SinceTime, Increment0);
+            false ->
+                Increment0
+        end,
+    maps:fold(fun merge_samplers_loop/3, Base, Increment).
 
-merge_cluster_samplers(TS, NodeSample, Cluster) ->
-    case maps:get(TS, Cluster, undefined) of
+merge_samplers_loop(TS, Increment, Base) when is_map(Increment) ->
+    case maps:get(TS, Base, undefined) of
         undefined ->
-            Cluster#{TS => NodeSample};
-        ClusterSample ->
-            Cluster#{TS => merge_cluster_sampler_map(NodeSample, ClusterSample)}
+            Base#{TS => Increment};
+        BaseSample when is_map(BaseSample) ->
+            Base#{TS => merge_sampler_maps(Increment, BaseSample)}
     end.
 
-merge_cluster_sampler_map(M1, M2) ->
-    Fun =
-        fun
-            (Key, Map) when
-                %% cluster-synced values
-                Key =:= topics;
-                Key =:= subscriptions_durable;
-                Key =:= disconnected_durable_sessions
-            ->
-                Map#{Key => maps:get(Key, M1, maps:get(Key, M2, 0))};
-            (Key, Map) ->
-                Map#{Key => maps:get(Key, M1, 0) + maps:get(Key, M2, 0)}
-        end,
-    lists:foldl(Fun, #{}, ?SAMPLER_LIST).
+merge_sampler_maps(M1, M2) when is_map(M1) andalso is_map(M2) ->
+    Fun = fun(Key, Acc) -> merge_values(Key, M1, Acc) end,
+    lists:foldl(Fun, M2, ?SAMPLER_LIST).
+
+%% topics, subscriptions_durable and disconnected_durable_sessions are cluster synced
+merge_values(topics, M1, M2) ->
+    max_values(topics, M1, M2);
+merge_values(subscriptions_durable, M1, M2) ->
+    max_values(subscriptions_durable, M1, M2);
+merge_values(disconnected_durable_sessions, M1, M2) ->
+    max_values(disconnected_durable_sessions, M1, M2);
+merge_values(Key, M1, M2) ->
+    sum_values(Key, M1, M2).
+
+max_values(Key, M1, M2) when is_map_key(Key, M1) orelse is_map_key(Key, M2) ->
+    M2#{Key => max(maps:get(Key, M1, 0), maps:get(Key, M2, 0))};
+max_values(_Key, _M1, M2) ->
+    M2.
+
+sum_values(Key, M1, M2) when is_map_key(Key, M1) orelse is_map_key(Key, M2) ->
+    M2#{Key => maps:get(Key, M1, 0) + maps:get(Key, M2, 0)};
+sum_values(_Key, _M1, M2) ->
+    M2.
 
 merge_cluster_rate(Node, Cluster) ->
     Fun =
@@ -310,13 +372,10 @@ adjust_synthetic_cluster_metrics(Metrics0) ->
 
 format({badrpc, Reason}) ->
     {badrpc, Reason};
-format(Data) ->
-    All = maps:fold(fun format/3, [], Data),
-    Compare = fun(#{time_stamp := T1}, #{time_stamp := T2}) -> T1 =< T2 end,
-    lists:sort(Compare, All).
-
-format(TimeStamp, Data, All) ->
-    [Data#{time_stamp => TimeStamp} | All].
+format(Data0) ->
+    Data1 = maps:to_list(Data0),
+    Data = lists:keysort(1, Data1),
+    lists:map(fun({TimeStamp, V}) -> V#{time_stamp => TimeStamp} end, Data).
 
 cal_rate(_Now, undefined) ->
     AllSamples = ?GAUGE_SAMPLER_LIST ++ maps:values(?DELTA_SAMPLER_RATE_MAP),
@@ -350,18 +409,83 @@ cal_rate_(Key, {Now, Last, TDelta, Res}) ->
     RateKey = maps:get(Key, ?DELTA_SAMPLER_RATE_MAP),
     {Now, Last, TDelta, Res#{RateKey => Rate}}.
 
-granularity_adapter([], Res) ->
-    lists:reverse(Res);
-granularity_adapter([Sampler], Res) ->
-    granularity_adapter([], [Sampler | Res]);
-granularity_adapter([Sampler1, Sampler2 | Rest], Res) ->
-    Fun =
-        fun(Key, M) ->
-            Value1 = maps:get(Key, Sampler1),
-            Value2 = maps:get(Key, Sampler2),
-            M#{Key => Value1 + Value2}
+%% Try to keep the total number of recrods around 1000.
+%% When the oldest data point is
+%% < 1h: sample every 10s: 360 data points
+%% < 1d: sample every 1m: 1440 data points
+%% < 3d: sample every 5m: 864 data points
+%% < 7d: sample every 10m: 1008 data points
+sample_interval(Age) when Age =< 60 * ?SECONDS ->
+    %% so far this can happen only during tests
+    ?ONE_SECOND;
+sample_interval(Age) when Age =< ?ONE_HOUR ->
+    10 * ?SECONDS;
+sample_interval(Age) when Age =< ?ONE_DAY ->
+    ?ONE_MINUTE;
+sample_interval(Age) when Age =< 3 * ?DAYS ->
+    5 * ?MINUTES;
+sample_interval(_Age) ->
+    10 * ?MINUTES.
+
+sample_fill_gap(Node, SinceTs) ->
+    Samples = do_sample(Node, SinceTs),
+    fill_gaps(Samples, SinceTs).
+
+fill_gaps(Samples, SinceTs) ->
+    TsList = lists:sort(maps:keys(Samples)),
+    case length(TsList) >= 2 of
+        true ->
+            do_fill_gaps(hd(TsList), tl(TsList), Samples, SinceTs);
+        false ->
+            Samples
+    end.
+
+do_fill_gaps(FirstTs, TsList, Samples, SinceTs) ->
+    Latest = lists:last(TsList),
+    Interval = sample_interval(Latest - SinceTs),
+    StartTs =
+        case round_down(SinceTs, Interval) of
+            T when T =:= 0 orelse T =:= FirstTs ->
+                FirstTs;
+            T ->
+                T
         end,
-    granularity_adapter(Rest, [lists:foldl(Fun, Sampler2, ?DELTA_SAMPLER_LIST) | Res]).
+    fill_gaps_loop(StartTs, Interval, Latest, Samples).
+
+fill_gaps_loop(T, _Interval, Latest, Samples) when T >= Latest ->
+    Samples;
+fill_gaps_loop(T, Interval, Latest, Samples) ->
+    Samples1 =
+        case is_map_key(T, Samples) of
+            true ->
+                Samples;
+            false ->
+                Samples#{T => #{}}
+        end,
+    fill_gaps_loop(T + Interval, Interval, Latest, Samples1).
+
+downsample(SinceTs, TsDataMap) when map_size(TsDataMap) >= 2 ->
+    TsList = ts_list(TsDataMap),
+    Latest = lists:max(TsList),
+    Interval = sample_interval(Latest - SinceTs),
+    downsample_loop(TsList, TsDataMap, Interval, #{});
+downsample(_Since, TsDataMap) ->
+    TsDataMap.
+
+ts_list(TsDataMap) ->
+    maps:keys(TsDataMap).
+
+round_down(Ts, Interval) ->
+    Ts - (Ts rem Interval).
+
+downsample_loop([], _TsDataMap, _Interval, Res) ->
+    Res;
+downsample_loop([Ts | Rest], TsDataMap, Interval, Res) ->
+    Bucket = round_down(Ts, Interval),
+    Agg0 = maps:get(Bucket, Res, #{}),
+    Inc = maps:get(Ts, TsDataMap),
+    Agg = merge_sampler_maps(Inc, Agg0),
+    downsample_loop(Rest, TsDataMap, Interval, Res#{Bucket => Agg}).
 
 %% -------------------------------------------------------------------------------------------------
 %% timer
@@ -381,7 +505,7 @@ clean_timer() ->
 next_interval() ->
     Interval = emqx_conf:get([dashboard, sample_interval], ?DEFAULT_SAMPLE_INTERVAL) * 1000,
     Now = erlang:system_time(millisecond),
-    NextTime = ((Now div Interval) + 1) * Interval,
+    NextTime = round_down(Now, Interval) + Interval,
     Remaining = NextTime - Now,
     {NextTime, Remaining}.
 
@@ -410,31 +534,30 @@ delta(LastData, NowData) ->
         end,
     lists:foldl(Fun, NowData, ?DELTA_SAMPLER_LIST).
 
+lookup(Ts) ->
+    ets:lookup(?TAB, Ts).
+
 store(MonitData) ->
     {atomic, ok} =
         mria:transaction(mria:local_content_shard(), fun mnesia:write/3, [?TAB, MonitData, write]).
 
 clean() ->
+    clean(?RETENTION_TIME).
+
+clean(Retention) ->
     Now = erlang:system_time(millisecond),
-    ExpiredMS = [{{'_', '$1', '_'}, [{'>', {'-', Now, '$1'}, ?RETENTION_TIME}], ['$_']}],
-    Expired = ets:select(?TAB, ExpiredMS),
-    lists:foreach(
-        fun(Data) ->
-            true = ets:delete_object(?TAB, Data)
-        end,
-        Expired
-    ),
+    MS = ets:fun2ms(fun(#emqx_monit{time = T}) -> Now - T > Retention end),
+    _ = ets:select_delete(?TAB, MS),
     ok.
 
-%% To make it easier to do data aggregation
-internal_format(List) when is_list(List) ->
+%% This data structure should not be changed because it's a RPC contract.
+%% Otherwise dashboard may not work during rolling upgrade.
+to_ts_data_map(List) when is_list(List) ->
     Fun =
-        fun(Data, All) ->
-            maps:merge(internal_format(Data), All)
+        fun(#emqx_monit{time = Time, data = Data}, All) ->
+            All#{Time => Data}
         end,
-    lists:foldl(Fun, #{}, List);
-internal_format(#emqx_monit{time = Time, data = Data}) ->
-    #{Time => Data}.
+    lists:foldl(Fun, #{}, List).
 
 getstats(Key) ->
     %% Stats ets maybe not exist when ekka join.

+ 0 - 0
apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl


Bu fark içinde çok fazla dosya değişikliği olduğu için bazı dosyalar gösterilmiyor