Просмотр исходного кода

feat: integrate with the Datalayers

JianBo He 1 год назад
Родитель
Сommit
dee353380e

+ 2 - 1
apps/emqx_bridge/src/emqx_action_info.erl

@@ -120,7 +120,8 @@ hard_coded_action_info_modules_ee() ->
         emqx_bridge_sqlserver_action_info,
         emqx_bridge_syskeeper_action_info,
         emqx_bridge_tdengine_action_info,
-        emqx_bridge_timescale_action_info
+        emqx_bridge_timescale_action_info,
+        emqx_bridge_datalayers_action_info
     ].
 -else.
 hard_coded_action_info_modules_ee() ->

+ 20 - 4
apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl

@@ -51,7 +51,8 @@ api_schemas(Method) ->
         api_ref(emqx_bridge_rabbitmq, <<"rabbitmq">>, Method),
         api_ref(emqx_bridge_kinesis, <<"kinesis_producer">>, Method ++ "_producer"),
         api_ref(emqx_bridge_greptimedb, <<"greptimedb">>, Method ++ "_grpc_v1"),
-        api_ref(emqx_bridge_azure_event_hub, <<"azure_event_hub_producer">>, Method ++ "_producer")
+        api_ref(emqx_bridge_azure_event_hub, <<"azure_event_hub_producer">>, Method ++ "_producer"),
+        api_ref(emqx_bridge_datalayers, <<"datalayers">>, Method ++ "_api_v1")
     ].
 
 schema_modules() ->
@@ -79,7 +80,8 @@ schema_modules() ->
         emqx_bridge_rabbitmq,
         emqx_bridge_kinesis,
         emqx_bridge_greptimedb,
-        emqx_bridge_azure_event_hub
+        emqx_bridge_azure_event_hub,
+        emqx_bridge_datalayers
     ].
 
 examples(Method) ->
@@ -130,7 +132,8 @@ resource_type(rabbitmq) -> emqx_bridge_rabbitmq_connector;
 resource_type(kinesis_producer) -> emqx_bridge_kinesis_impl_producer;
 resource_type(greptimedb) -> emqx_bridge_greptimedb_connector;
 %% We use AEH's Kafka interface.
-resource_type(azure_event_hub_producer) -> emqx_bridge_kafka_impl_producer.
+resource_type(azure_event_hub_producer) -> emqx_bridge_kafka_impl_producer;
+resource_type(datalayers) -> emqx_bridge_datalayers_connector.
 
 %% For bridges that need to override connector configurations.
 bridge_impl_module(BridgeType) when is_binary(BridgeType) ->
@@ -221,7 +224,8 @@ fields(bridges) ->
         influxdb_structs() ++
         redis_structs() ++
         pgsql_structs() ++ clickhouse_structs() ++ sqlserver_structs() ++ rabbitmq_structs() ++
-        kinesis_structs() ++ greptimedb_structs() ++ azure_event_hub_structs().
+        kinesis_structs() ++ greptimedb_structs() ++ azure_event_hub_structs() ++
+        datalayers_structs().
 
 mongodb_structs() ->
     [
@@ -433,6 +437,18 @@ azure_event_hub_structs() ->
             )}
     ].
 
+datalayers_structs() ->
+    [
+        {datalayers,
+            mk(
+                hoconsc:map(name, ref(emqx_bridge_datalayers, datalayers_api_v1)),
+                #{
+                    desc => <<"Datalayers Bridge Config">>,
+                    required => false
+                }
+            )}
+    ].
+
 api_ref(Module, Type, Method) ->
     {Type, ref(Module, Method)}.
 

+ 94 - 0
apps/emqx_bridge_datalayers/BSL.txt

@@ -0,0 +1,94 @@
+Business Source License 1.1
+
+Licensor:             Hangzhou EMQ Technologies Co., Ltd.
+Licensed Work:        EMQX Enterprise Edition
+                      The Licensed Work is (c) 2023
+                      Hangzhou EMQ Technologies Co., Ltd.
+Additional Use Grant: Students and educators are granted right to copy,
+                      modify, and create derivative work for research
+                      or education.
+Change Date:          2028-01-26
+Change License:       Apache License, Version 2.0
+
+For information about alternative licensing arrangements for the Software,
+please contact Licensor: https://www.emqx.com/en/contact
+
+Notice
+
+The Business Source License (this document, or the “License”) is not an Open
+Source license. However, the Licensed Work will eventually be made available
+under an Open Source License, as stated in this License.
+
+License text copyright (c) 2017 MariaDB Corporation Ab, All Rights Reserved.
+“Business Source License” is a trademark of MariaDB Corporation Ab.
+
+-----------------------------------------------------------------------------
+
+Business Source License 1.1
+
+Terms
+
+The Licensor hereby grants you the right to copy, modify, create derivative
+works, redistribute, and make non-production use of the Licensed Work. The
+Licensor may make an Additional Use Grant, above, permitting limited
+production use.
+
+Effective on the Change Date, or the fourth anniversary of the first publicly
+available distribution of a specific version of the Licensed Work under this
+License, whichever comes first, the Licensor hereby grants you rights under
+the terms of the Change License, and the rights granted in the paragraph
+above terminate.
+
+If your use of the Licensed Work does not comply with the requirements
+currently in effect as described in this License, you must purchase a
+commercial license from the Licensor, its affiliated entities, or authorized
+resellers, or you must refrain from using the Licensed Work.
+
+All copies of the original and modified Licensed Work, and derivative works
+of the Licensed Work, are subject to this License. This License applies
+separately for each version of the Licensed Work and the Change Date may vary
+for each version of the Licensed Work released by Licensor.
+
+You must conspicuously display this License on each original or modified copy
+of the Licensed Work. If you receive the Licensed Work in original or
+modified form from a third party, the terms and conditions set forth in this
+License apply to your use of that work.
+
+Any use of the Licensed Work in violation of this License will automatically
+terminate your rights under this License for the current and all other
+versions of the Licensed Work.
+
+This License does not grant you any right in any trademark or logo of
+Licensor or its affiliates (provided that you may use a trademark or logo of
+Licensor as expressly required by this License).
+
+TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON
+AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS,
+EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND
+TITLE.
+
+MariaDB hereby grants you permission to use this License’s text to license
+your works, and to refer to it using the trademark “Business Source License”,
+as long as you comply with the Covenants of Licensor below.
+
+Covenants of Licensor
+
+In consideration of the right to use this License’s text and the “Business
+Source License” name and trademark, Licensor covenants to MariaDB, and to all
+other recipients of the licensed work to be provided by Licensor:
+
+1. To specify as the Change License the GPL Version 2.0 or any later version,
+   or a license that is compatible with GPL Version 2.0 or a later version,
+   where “compatible” means that software provided under the Change License can
+   be included in a program with software provided under GPL Version 2.0 or a
+   later version. Licensor may specify additional Change Licenses without
+   limitation.
+
+2. To either: (a) specify an additional grant of rights to use that does not
+   impose any additional restriction on the right granted in this License, as
+   the Additional Use Grant; or (b) insert the text “None”.
+
+3. To specify a Change Date.
+
+4. Not to modify this License in any other way.

+ 43 - 0
apps/emqx_bridge_datalayers/README.md

@@ -0,0 +1,43 @@
+# EMQX Datalayers Bridge
+
+[Datalayers](https://docs.datalayers.cn/datalayers/latest/) is a multimodal,
+hyper-converged database for the Industrial IoT, Telematics, Energy and other industries.
+
+Datalayers is designed to be fast, efficient, and scalable, and it has a SQL-like
+query language that makes it easy to extract insights from time-series data.
+
+The application is used to connect EMQX and Datalayers. User can create a rule and
+easily ingest IoT data into Datalayers by leveraging
+[EMQX Rules](https://docs.emqx.com/en/enterprise/v5.0/data-integration/rules.html).
+
+
+# Documentation
+
+- Refer to [Ingest Data into Datalayers](https://docs.emqx.com/en/enterprise/v5.0/data-integration/data-bridge-datalayers.html)
+  for how to use EMQX dashboard to ingest IoT data into Datalayers.
+
+- Refer to [EMQX Rules](https://docs.emqx.com/en/enterprise/v5.0/data-integration/rules.html)
+  for the EMQX rules engine introduction.
+
+
+# HTTP APIs
+
+- Several APIs are provided for bridge management, which includes create bridge,
+  update bridge, get bridge, stop or restart bridge and list bridges etc.
+
+  Refer to [API Docs - Bridges](https://docs.emqx.com/en/enterprise/v5.0/admin/api-docs.html#tag/Bridges) for more detailed information.
+
+- [Create bridge API doc](https://docs.emqx.com/en/enterprise/v5.0/admin/api-docs.html#tag/Bridges/paths/~1bridges/post)
+  list required parameters for creating a Datalayers bridge.
+  - `server`: The IPv4 or IPv6 address or the hostname to connect to.
+  - `database`: Datalayers database name
+  - `write_syntax`: Conf of Datalayers line protocol to write data points. It is a text-based format that provides the measurement, tag set, field set, and timestamp of a data point, and placeholder supported.
+
+# Contributing
+
+Please see our [contributing.md](../../CONTRIBUTING.md).
+
+
+# License
+
+EMQ Business Source License 1.1, refer to [LICENSE](BSL.txt).

+ 2 - 0
apps/emqx_bridge_datalayers/docker-ct

@@ -0,0 +1,2 @@
+toxiproxy
+datalayers

+ 32 - 0
apps/emqx_bridge_datalayers/mix.exs

@@ -0,0 +1,32 @@
+defmodule EMQXBridgeDatalayers.MixProject do
+  use Mix.Project
+  alias EMQXUmbrella.MixProject, as: UMP
+
+  def project do
+    [
+      app: :emqx_bridge_datalayers,
+      version: "0.1.0",
+      build_path: "../../_build",
+      erlc_options: UMP.erlc_options(),
+      erlc_paths: UMP.erlc_paths(),
+      deps_path: "../../deps",
+      lockfile: "../../mix.lock",
+      elixir: "~> 1.14",
+      start_permanent: Mix.env() == :prod,
+      deps: deps()
+    ]
+  end
+
+  def application do
+    [extra_applications: UMP.extra_applications()]
+  end
+
+  def deps() do
+    [
+      {:influxdb, github: "emqx/influxdb-client-erl", tag: "1.1.13"},
+      {:emqx_connector, in_umbrella: true, runtime: false},
+      {:emqx_resource, in_umbrella: true},
+      {:emqx_bridge, in_umbrella: true, runtime: false}
+    ]
+  end
+end

+ 10 - 0
apps/emqx_bridge_datalayers/rebar.config

@@ -0,0 +1,10 @@
+%% -*- mode: erlang; -*-
+
+{erl_opts, [debug_info]}.
+
+{deps, [
+    {influxdb, {git, "https://github.com/emqx/influxdb-client-erl", {tag, "1.1.13"}}},
+    {emqx_connector, {path, "../../apps/emqx_connector"}},
+    {emqx_resource, {path, "../../apps/emqx_resource"}},
+    {emqx_bridge, {path, "../../apps/emqx_bridge"}}
+]}.

+ 17 - 0
apps/emqx_bridge_datalayers/src/emqx_bridge_datalayers.app.src

@@ -0,0 +1,17 @@
+{application, emqx_bridge_datalayers, [
+    {description, "EMQX Enterprise Datalayers Bridge"},
+    {vsn, "0.1.0"},
+    {registered, []},
+    {applications, [
+        kernel,
+        stdlib,
+        emqx_resource,
+        influxdb
+    ]},
+    {env, [
+        {emqx_action_info_modules, [emqx_bridge_datalayers_action_info]},
+        {emqx_connector_info_modules, [emqx_bridge_datalayers_connector_info]}
+    ]},
+    {modules, []},
+    {links, []}
+]}.

+ 409 - 0
apps/emqx_bridge_datalayers/src/emqx_bridge_datalayers.erl

@@ -0,0 +1,409 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+-module(emqx_bridge_datalayers).
+
+-behaviour(emqx_connector_examples).
+
+-include_lib("emqx/include/logger.hrl").
+-include_lib("emqx_connector/include/emqx_connector.hrl").
+-include_lib("typerefl/include/types.hrl").
+-include_lib("hocon/include/hoconsc.hrl").
+
+-import(hoconsc, [mk/2, enum/1, ref/2]).
+
+-export([
+    write_syntax_type/0
+]).
+
+-export([
+    namespace/0,
+    roots/0,
+    fields/1,
+    desc/1
+]).
+
+%% Examples
+-export([
+    bridge_v2_examples/1,
+    conn_bridge_examples/1,
+    connector_examples/1
+]).
+
+-type write_syntax() :: list().
+-reflect_type([write_syntax/0]).
+-typerefl_from_string({write_syntax/0, ?MODULE, to_datalayers_lines}).
+-export([to_datalayers_lines/1]).
+
+-define(CONNECTOR_TYPE, datalayers).
+-define(ACTION_TYPE, datalayers).
+
+%% -------------------------------------------------------------------------------------------------
+%% api
+
+write_syntax_type() ->
+    typerefl:alias("template", write_syntax()).
+
+%% Examples
+conn_bridge_examples(Method) ->
+    [
+        #{
+            <<"datalayers_api_v1">> => #{
+                summary => <<"Datalayers Bridge">>,
+                value => values("datalayers_api_v1", Method)
+            }
+        }
+    ].
+
+bridge_v2_examples(Method) ->
+    WriteExample =
+        <<"${topic},clientid=${clientid} ", "payload=${payload},",
+            "${clientid}_int_value=${payload.int_key}i,", "bool=${payload.bool}">>,
+    ParamsExample = #{
+        parameters => #{
+            write_syntax => WriteExample, precision => ms
+        }
+    },
+    [
+        #{
+            <<"datalayers">> => #{
+                summary => <<"Datalayers Action">>,
+                value => emqx_bridge_v2_schema:action_values(
+                    Method, datalayers, datalayers, ParamsExample
+                )
+            }
+        }
+    ].
+
+connector_examples(Method) ->
+    [
+        #{
+            <<"datalayers">> => #{
+                summary => <<"Datalayers Connector">>,
+                value => emqx_connector_schema:connector_values(
+                    Method, datalayers, connector_values(datalayers_api_v1)
+                )
+            }
+        }
+    ].
+
+connector_values(Type) ->
+    maps:merge(basic_connector_values(), #{parameters => connector_values_v(Type)}).
+
+connector_values_v(datalayers_api_v1) ->
+    #{
+        datalayers_type => datalayers_api_v1,
+        database => <<"example_database">>,
+        username => <<"example_username">>,
+        password => <<"******">>
+    }.
+
+basic_connector_values() ->
+    #{
+        enable => true,
+        server => <<"127.0.0.1:8086">>,
+        ssl => #{enable => false}
+    }.
+
+values(Protocol, get) ->
+    values(Protocol, post);
+values("datalayers_api_v1", post) ->
+    SupportUint = <<>>,
+    TypeOpts = connector_values_v(datalayers_api_v1),
+    values(common, "datalayers_api_v1", SupportUint, TypeOpts);
+values(Protocol, put) ->
+    values(Protocol, post).
+
+values(common, Protocol, SupportUint, TypeOpts) ->
+    CommonConfigs = #{
+        type => list_to_atom(Protocol),
+        name => <<"demo">>,
+        enable => true,
+        local_topic => <<"local/topic/#">>,
+        write_syntax =>
+            <<"${topic},clientid=${clientid}", " ", "payload=${payload},",
+                "${clientid}_int_value=${payload.int_key}i,", SupportUint/binary,
+                "bool=${payload.bool}">>,
+        precision => ms,
+        resource_opts => #{
+            batch_size => 100,
+            batch_time => <<"20ms">>
+        },
+        server => <<"127.0.0.1:8086">>,
+        ssl => #{enable => false}
+    },
+    maps:merge(TypeOpts, CommonConfigs).
+
+%% -------------------------------------------------------------------------------------------------
+%% Hocon Schema Definitions
+namespace() -> "bridge_datalayers".
+
+roots() -> [].
+
+fields("config_connector") ->
+    emqx_connector_schema:common_fields() ++
+        emqx_bridge_datalayers_connector:fields("connector") ++
+        emqx_connector_schema:resource_opts_ref(?MODULE, connector_resource_opts);
+fields("post_api_v1") ->
+    method_fields(post, datalayers_api_v1);
+fields("put_api_v1") ->
+    method_fields(put, datalayers_api_v1);
+fields("get_api_v1") ->
+    method_fields(get, datalayers_api_v1);
+fields(action) ->
+    {datalayers,
+        mk(
+            hoconsc:map(name, ref(?MODULE, datalayers_action)),
+            #{desc => <<"Datalayers Action Config">>, required => false}
+        )};
+fields(datalayers_action) ->
+    emqx_bridge_v2_schema:make_producer_action_schema(
+        mk(ref(?MODULE, action_parameters), #{
+            required => true, desc => ?DESC(action_parameters)
+        })
+    );
+fields(action_parameters) ->
+    [
+        {write_syntax, fun write_syntax/1},
+        emqx_bridge_datalayers_connector:precision_field()
+    ];
+fields(connector_resource_opts) ->
+    emqx_connector_schema:resource_opts_fields();
+fields(Field) when
+    Field == "get_connector";
+    Field == "put_connector";
+    Field == "post_connector"
+->
+    Fields =
+        emqx_bridge_datalayers_connector:fields("connector") ++
+            emqx_connector_schema:resource_opts_ref(?MODULE, connector_resource_opts),
+    emqx_connector_schema:api_fields(Field, ?CONNECTOR_TYPE, Fields);
+fields(Field) when
+    Field == "get_bridge_v2";
+    Field == "post_bridge_v2";
+    Field == "put_bridge_v2"
+->
+    emqx_bridge_v2_schema:api_fields(Field, ?ACTION_TYPE, fields(datalayers_action));
+fields(Type) when
+    Type == datalayers_api_v1
+->
+    datalayers_bridge_common_fields() ++
+        connector_fields(Type).
+
+method_fields(post, ConnectorType) ->
+    datalayers_bridge_common_fields() ++
+        connector_fields(ConnectorType) ++
+        type_name_fields(ConnectorType);
+method_fields(get, ConnectorType) ->
+    datalayers_bridge_common_fields() ++
+        connector_fields(ConnectorType) ++
+        type_name_fields(ConnectorType) ++
+        emqx_bridge_schema:status_fields();
+method_fields(put, ConnectorType) ->
+    datalayers_bridge_common_fields() ++
+        connector_fields(ConnectorType).
+
+datalayers_bridge_common_fields() ->
+    emqx_bridge_schema:common_bridge_fields() ++
+        [
+            {local_topic, mk(binary(), #{desc => ?DESC("local_topic")})},
+            {write_syntax, fun write_syntax/1}
+        ] ++
+        emqx_resource_schema:fields("resource_opts").
+
+connector_fields(Type) ->
+    emqx_bridge_datalayers_connector:fields(Type).
+
+type_name_fields(Type) ->
+    [
+        {type, mk(Type, #{required => true, desc => ?DESC("desc_type")})},
+        {name, mk(binary(), #{required => true, desc => ?DESC("desc_name")})}
+    ].
+
+desc("config") ->
+    ?DESC("desc_config");
+desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->
+    ["Configuration for Datalayers using `", string:to_upper(Method), "` method."];
+desc(datalayers_api_v1) ->
+    ?DESC(emqx_bridge_datalayers_connector, "datalayers_api_v1");
+desc(datalayers_action) ->
+    ?DESC(datalayers_action);
+desc(action_parameters) ->
+    ?DESC(action_parameters);
+desc("config_connector") ->
+    ?DESC("desc_config");
+desc(connector_resource_opts) ->
+    ?DESC(emqx_resource_schema, "resource_opts");
+desc(_) ->
+    undefined.
+
+write_syntax(type) ->
+    write_syntax_type();
+write_syntax(required) ->
+    true;
+write_syntax(validator) ->
+    [?NOT_EMPTY("the value of the field 'write_syntax' cannot be empty")];
+write_syntax(converter) ->
+    fun to_datalayers_lines/1;
+write_syntax(desc) ->
+    ?DESC("write_syntax");
+write_syntax(format) ->
+    <<"sql">>;
+write_syntax(_) ->
+    undefined.
+
+to_datalayers_lines(Lines = [#{} | _]) ->
+    %% already parsed/converted (e.g.: bridge_probe, after hocon_tconf:check_plain)
+    Lines;
+to_datalayers_lines(RawLines) ->
+    try
+        datalayers_lines(str(RawLines), [])
+    catch
+        _:Reason:Stacktrace ->
+            Msg = lists:flatten(
+                io_lib:format("Unable to parse Datalayers line protocol: ~p", [RawLines])
+            ),
+            ?SLOG(error, #{msg => Msg, error_reason => Reason, stacktrace => Stacktrace}),
+            throw(Msg)
+    end.
+
+-define(MEASUREMENT_ESC_CHARS, [$,, $\s]).
+-define(TAG_FIELD_KEY_ESC_CHARS, [$,, $=, $\s]).
+-define(FIELD_VAL_ESC_CHARS, [$", $\\]).
+% Common separator for both tags and fields
+-define(SEP, $\s).
+-define(MEASUREMENT_TAG_SEP, $,).
+-define(KEY_SEP, $=).
+-define(VAL_SEP, $,).
+-define(NON_EMPTY, [_ | _]).
+
+datalayers_lines([] = _RawLines, Acc) ->
+    ?NON_EMPTY = lists:reverse(Acc);
+datalayers_lines(RawLines, Acc) ->
+    {Acc1, RawLines1} = datalayers_line(string:trim(RawLines, leading, "\s\n"), Acc),
+    datalayers_lines(RawLines1, Acc1).
+
+datalayers_line([], Acc) ->
+    {Acc, []};
+datalayers_line(Line, Acc) ->
+    {?NON_EMPTY = Measurement, Line1} = measurement(Line),
+    {Tags, Line2} = tags(Line1),
+    {?NON_EMPTY = Fields, Line3} = datalayers_fields(Line2),
+    {Timestamp, Line4} = timestamp(Line3),
+    {
+        [
+            #{
+                measurement => Measurement,
+                tags => Tags,
+                fields => Fields,
+                timestamp => Timestamp
+            }
+            | Acc
+        ],
+        Line4
+    }.
+
+measurement(Line) ->
+    unescape(?MEASUREMENT_ESC_CHARS, [?MEASUREMENT_TAG_SEP, ?SEP], Line, []).
+
+tags([?MEASUREMENT_TAG_SEP | Line]) ->
+    tags1(Line, []);
+tags(Line) ->
+    {[], Line}.
+
+%% Empty line is invalid as fields are required after tags,
+%% need to break recursion here and fail later on parsing fields
+tags1([] = Line, Acc) ->
+    {lists:reverse(Acc), Line};
+%% Matching non empty Acc treats lines like "m, field=field_val" invalid
+tags1([?SEP | _] = Line, ?NON_EMPTY = Acc) ->
+    {lists:reverse(Acc), Line};
+tags1(Line, Acc) ->
+    {Tag, Line1} = tag(Line),
+    tags1(Line1, [Tag | Acc]).
+
+tag(Line) ->
+    {?NON_EMPTY = Key, Line1} = key(Line),
+    {?NON_EMPTY = Val, Line2} = tag_val(Line1),
+    {{Key, Val}, Line2}.
+
+tag_val(Line) ->
+    {Val, Line1} = unescape(?TAG_FIELD_KEY_ESC_CHARS, [?VAL_SEP, ?SEP], Line, []),
+    {Val, strip_l(Line1, ?VAL_SEP)}.
+
+datalayers_fields([?SEP | Line]) ->
+    fields1(string:trim(Line, leading, "\s"), []).
+
+%% Timestamp is optional, so fields may be at the very end of the line
+fields1([Ch | _] = Line, Acc) when Ch =:= ?SEP; Ch =:= $\n ->
+    {lists:reverse(Acc), Line};
+fields1([] = Line, Acc) ->
+    {lists:reverse(Acc), Line};
+fields1(Line, Acc) ->
+    {Field, Line1} = field(Line),
+    fields1(Line1, [Field | Acc]).
+
+field(Line) ->
+    {?NON_EMPTY = Key, Line1} = key(Line),
+    {Val, Line2} = field_val(Line1),
+    {{Key, Val}, Line2}.
+
+field_val([$" | Line]) ->
+    {Val, [$" | Line1]} = unescape(?FIELD_VAL_ESC_CHARS, [$"], Line, []),
+    %% Quoted val can be empty
+    {{quoted, Val}, strip_l(Line1, ?VAL_SEP)};
+field_val(Line) ->
+    %% Unquoted value should not be un-escaped according to InfluxDB protocol,
+    %% as it can only hold float, integer, uinteger or boolean value.
+    %% However, as templates are possible, un-escaping is applied here,
+    %% which also helps to detect some invalid lines, e.g.: "m,tag=1 field= ${timestamp}"
+    {Val, Line1} = unescape(?TAG_FIELD_KEY_ESC_CHARS, [?VAL_SEP, ?SEP, $\n], Line, []),
+    {?NON_EMPTY = Val, strip_l(Line1, ?VAL_SEP)}.
+
+timestamp([?SEP | Line]) ->
+    Line1 = string:trim(Line, leading, "\s"),
+    %% Similarly to unquoted field value, un-escape a timestamp to validate and handle
+    %% potentially escaped characters in a template
+    {T, Line2} = unescape(?TAG_FIELD_KEY_ESC_CHARS, [?SEP, $\n], Line1, []),
+    {timestamp1(T), Line2};
+timestamp(Line) ->
+    {undefined, Line}.
+
+timestamp1(?NON_EMPTY = Ts) -> Ts;
+timestamp1(_Ts) -> undefined.
+
+%% Common for both tag and field keys
+key(Line) ->
+    {Key, Line1} = unescape(?TAG_FIELD_KEY_ESC_CHARS, [?KEY_SEP], Line, []),
+    {Key, strip_l(Line1, ?KEY_SEP)}.
+
+%% Only strip a character between pairs, don't strip it(and let it fail)
+%% if the char to be stripped is at the end, e.g.: m,tag=val, field=val
+strip_l([Ch, Ch1 | Str], Ch) when Ch1 =/= ?SEP ->
+    [Ch1 | Str];
+strip_l(Str, _Ch) ->
+    Str.
+
+unescape(EscapeChars, SepChars, [$\\, Char | T], Acc) ->
+    ShouldEscapeBackslash = lists:member($\\, EscapeChars),
+    Acc1 =
+        case lists:member(Char, EscapeChars) of
+            true -> [Char | Acc];
+            false when not ShouldEscapeBackslash -> [Char, $\\ | Acc]
+        end,
+    unescape(EscapeChars, SepChars, T, Acc1);
+unescape(EscapeChars, SepChars, [Char | T] = L, Acc) ->
+    IsEscapeChar = lists:member(Char, EscapeChars),
+    case lists:member(Char, SepChars) of
+        true -> {lists:reverse(Acc), L};
+        false when not IsEscapeChar -> unescape(EscapeChars, SepChars, T, [Char | Acc])
+    end;
+unescape(_EscapeChars, _SepChars, [] = L, Acc) ->
+    {lists:reverse(Acc), L}.
+
+str(A) when is_atom(A) ->
+    atom_to_list(A);
+str(B) when is_binary(B) ->
+    binary_to_list(B);
+str(S) when is_list(S) ->
+    S.

+ 77 - 0
apps/emqx_bridge_datalayers/src/emqx_bridge_datalayers_action_info.erl

@@ -0,0 +1,77 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+-module(emqx_bridge_datalayers_action_info).
+
+-behaviour(emqx_action_info).
+
+-export([
+    bridge_v1_config_to_action_config/2,
+    bridge_v1_config_to_connector_config/1,
+    connector_action_config_to_bridge_v1_config/2,
+    bridge_v1_type_name/0,
+    action_type_name/0,
+    connector_type_name/0,
+    schema_module/0
+]).
+
+%% dynamic callback
+-export([
+    bridge_v1_type_name_fun/1
+]).
+
+-import(emqx_utils_conv, [bin/1]).
+
+-define(SCHEMA_MODULE, emqx_bridge_datalayers).
+
+bridge_v1_config_to_action_config(BridgeV1Config, ConnectorName) ->
+    ActionTopLevelKeys = schema_keys(datalayers_action),
+    ActionParametersKeys = schema_keys(action_parameters),
+    ActionKeys = ActionTopLevelKeys ++ ActionParametersKeys,
+    ActionConfig = make_config_map(ActionKeys, ActionParametersKeys, BridgeV1Config),
+    emqx_utils_maps:update_if_present(
+        <<"resource_opts">>,
+        fun emqx_bridge_v2_schema:project_to_actions_resource_opts/1,
+        ActionConfig#{<<"connector">> => ConnectorName}
+    ).
+
+bridge_v1_config_to_connector_config(BridgeV1Config) ->
+    ActionTopLevelKeys = schema_keys(datalayers_action),
+    ActionParametersKeys = schema_keys(action_parameters),
+    ActionKeys = ActionTopLevelKeys ++ ActionParametersKeys,
+    ConnectorTopLevelKeys = schema_keys("config_connector"),
+    ConnectorKeys = maps:keys(BridgeV1Config) -- (ActionKeys -- ConnectorTopLevelKeys),
+    ConnectorParametersKeys = ConnectorKeys -- ConnectorTopLevelKeys,
+    ConnConfig0 = make_config_map(ConnectorKeys, ConnectorParametersKeys, BridgeV1Config),
+    emqx_utils_maps:update_if_present(
+        <<"resource_opts">>,
+        fun emqx_connector_schema:project_to_connector_resource_opts/1,
+        ConnConfig0
+    ).
+
+connector_action_config_to_bridge_v1_config(ConnectorRawConf, ActionRawConf) ->
+    RawConf = emqx_action_info:connector_action_config_to_bridge_v1_config(
+        ConnectorRawConf, ActionRawConf
+    ),
+    maps:without([<<"datalayers_type">>], RawConf).
+
+bridge_v1_type_name() ->
+    {fun ?MODULE:bridge_v1_type_name_fun/1, bridge_v1_type_names()}.
+
+action_type_name() -> datalayers.
+
+connector_type_name() -> datalayers.
+
+schema_module() -> ?SCHEMA_MODULE.
+
+bridge_v1_type_name_fun(_) ->
+    datalayers.
+
+make_config_map(PickKeys, IndentKeys, Config) ->
+    Conf0 = maps:with(PickKeys, Config),
+    emqx_utils_maps:indent(<<"parameters">>, IndentKeys, Conf0).
+
+schema_keys(Name) ->
+    [bin(Key) || Key <- proplists:get_keys(?SCHEMA_MODULE:fields(Name))].
+
+bridge_v1_type_names() -> [datalayers].

+ 916 - 0
apps/emqx_bridge_datalayers/src/emqx_bridge_datalayers_connector.erl

@@ -0,0 +1,916 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+-module(emqx_bridge_datalayers_connector).
+
+-include_lib("emqx_connector/include/emqx_connector.hrl").
+
+-include_lib("hocon/include/hoconsc.hrl").
+-include_lib("typerefl/include/types.hrl").
+-include_lib("emqx/include/logger.hrl").
+-include_lib("snabbkaffe/include/snabbkaffe.hrl").
+
+-import(hoconsc, [mk/2, enum/1, ref/2]).
+
+-behaviour(emqx_resource).
+
+%% callbacks of behaviour emqx_resource
+-export([
+    resource_type/0,
+    callback_mode/0,
+    on_start/2,
+    on_stop/2,
+    on_add_channel/4,
+    on_remove_channel/3,
+    on_get_channel_status/3,
+    on_get_channels/1,
+    on_query/3,
+    on_batch_query/3,
+    on_query_async/4,
+    on_batch_query_async/4,
+    on_get_status/2,
+    on_format_query_result/1
+]).
+-export([reply_callback/2]).
+
+-export([
+    roots/0,
+    namespace/0,
+    fields/1,
+    desc/1
+]).
+
+-export([transform_bridge_v1_config_to_connector_config/1]).
+
+-export([precision_field/0]).
+
+%% only for test
+-export([is_unrecoverable_error/1]).
+
+-type ts_precision() :: ns | us | ms | s.
+
+%% Allocatable resources
+-define(datalayers_client, datalayers_client).
+
+-define(DATALAYERS_DEFAULT_PORT, 8361).
+
+%% datalayers servers don't need parse
+-define(DATALAYERS_HOST_OPTIONS, #{
+    default_port => ?DATALAYERS_DEFAULT_PORT
+}).
+
+-define(DEFAULT_TIMESTAMP_TMPL, "${timestamp}").
+
+-define(set_tag, set_tag).
+-define(set_field, set_field).
+
+-define(IS_HTTP_ERROR(STATUS_CODE),
+    (is_integer(STATUS_CODE) andalso
+        (STATUS_CODE < 200 orelse STATUS_CODE >= 300))
+).
+
+%% -------------------------------------------------------------------------------------------------
+%% resource callback
+
+resource_type() -> datalayers.
+
+callback_mode() -> async_if_possible.
+
+on_add_channel(
+    _InstanceId,
+    #{channels := Channels, client := Client} = OldState,
+    ChannelId,
+    #{parameters := Parameters} = ChannelConfig0
+) ->
+    #{write_syntax := WriteSytaxTmpl} = Parameters,
+    Precision = maps:get(precision, Parameters, ms),
+    ChannelConfig = maps:merge(
+        Parameters,
+        ChannelConfig0#{
+            channel_client => influxdb:update_precision(Client, Precision),
+            write_syntax => to_config(WriteSytaxTmpl, Precision)
+        }
+    ),
+    {ok, OldState#{
+        channels => maps:put(ChannelId, ChannelConfig, Channels)
+    }}.
+
+on_remove_channel(_InstanceId, #{channels := Channels} = State, ChannelId) ->
+    NewState = State#{channels => maps:remove(ChannelId, Channels)},
+    {ok, NewState}.
+
+on_get_channel_status(InstanceId, _ChannelId, State) ->
+    case on_get_status(InstanceId, State) of
+        connected -> connected;
+        _ -> connecting
+    end.
+
+on_get_channels(InstanceId) ->
+    emqx_bridge_v2:get_channels_for_connector(InstanceId).
+
+on_start(InstId, Config) ->
+    %% InstID as pool would be handled by influxdb client
+    %% so there is no need to allocate pool_name here
+    start_client(InstId, Config).
+
+on_stop(InstId, _State) ->
+    case emqx_resource:get_allocated_resources(InstId) of
+        #{?datalayers_client := Client} ->
+            Res = influxdb:stop_client(Client),
+            ?tp(datalayers_client_stopped, #{instance_id => InstId}),
+            Res;
+        _ ->
+            ok
+    end.
+
+on_query(InstId, {Channel, Message}, #{channels := ChannelConf}) ->
+    #{write_syntax := SyntaxLines} = maps:get(Channel, ChannelConf),
+    #{channel_client := Client} = maps:get(Channel, ChannelConf),
+    case data_to_points(Message, SyntaxLines) of
+        {ok, Points} ->
+            ?tp(
+                datalayers_connector_send_query,
+                #{points => Points, batch => false, mode => sync}
+            ),
+            do_query(InstId, Channel, Client, Points);
+        {error, ErrorPoints} ->
+            ?tp(
+                datalayers_connector_send_query_error,
+                #{batch => false, mode => sync, error => ErrorPoints}
+            ),
+            log_error_points(InstId, ErrorPoints),
+            {error, {unrecoverable_error, ErrorPoints}}
+    end.
+
+%% Once a Batched Data trans to points failed.
+%% This batch query failed
+on_batch_query(InstId, BatchData, #{channels := ChannelConf}) ->
+    [{Channel, _} | _] = BatchData,
+    #{write_syntax := SyntaxLines} = maps:get(Channel, ChannelConf),
+    #{channel_client := Client} = maps:get(Channel, ChannelConf),
+    case parse_batch_data(InstId, BatchData, SyntaxLines) of
+        {ok, Points} ->
+            ?tp(
+                datalayers_connector_send_query,
+                #{points => Points, batch => true, mode => sync}
+            ),
+            do_query(InstId, Channel, Client, Points);
+        {error, Reason} ->
+            ?tp(
+                dayalayers_connector_send_query_error,
+                #{batch => true, mode => sync, error => Reason}
+            ),
+            {error, {unrecoverable_error, Reason}}
+    end.
+
+on_query_async(
+    InstId,
+    {Channel, Message},
+    {ReplyFun, Args},
+    #{channels := ChannelConf}
+) ->
+    #{write_syntax := SyntaxLines} = maps:get(Channel, ChannelConf),
+    #{channel_client := Client} = maps:get(Channel, ChannelConf),
+    case data_to_points(Message, SyntaxLines) of
+        {ok, Points} ->
+            ?tp(
+                datalayers_connector_send_query,
+                #{points => Points, batch => false, mode => async}
+            ),
+            do_async_query(InstId, Channel, Client, Points, {ReplyFun, Args});
+        {error, ErrorPoints} = Err ->
+            ?tp(
+                datalayers_connector_send_query_error,
+                #{batch => false, mode => async, error => ErrorPoints}
+            ),
+            log_error_points(InstId, ErrorPoints),
+            Err
+    end.
+
+on_batch_query_async(
+    InstId,
+    BatchData,
+    {ReplyFun, Args},
+    #{channels := ChannelConf}
+) ->
+    [{Channel, _} | _] = BatchData,
+    #{write_syntax := SyntaxLines} = maps:get(Channel, ChannelConf),
+    #{channel_client := Client} = maps:get(Channel, ChannelConf),
+    case parse_batch_data(InstId, BatchData, SyntaxLines) of
+        {ok, Points} ->
+            ?tp(
+                datalayers_connector_send_query,
+                #{points => Points, batch => true, mode => async}
+            ),
+            do_async_query(InstId, Channel, Client, Points, {ReplyFun, Args});
+        {error, Reason} ->
+            ?tp(
+                datalayers_connector_send_query_error,
+                #{batch => true, mode => async, error => Reason}
+            ),
+            {error, {unrecoverable_error, Reason}}
+    end.
+
+on_format_query_result(Result) ->
+    emqx_bridge_http_connector:on_format_query_result(Result).
+
+on_get_status(_InstId, #{client := Client}) ->
+    %case influxdb:is_alive(Client) andalso ok =:= influxdb:check_auth(Client) of
+    case influxdb:is_alive(Client) of
+        true ->
+            connected;
+        false ->
+            disconnected
+    end.
+
+transform_bridge_v1_config_to_connector_config(BridgeV1Config) ->
+    IndentKeys = [username, password, database, token, bucket, org],
+    ConnConfig0 = maps:without([write_syntax, precision], BridgeV1Config),
+    ConnConfig1 =
+        case emqx_utils_maps:indent(parameters, IndentKeys, ConnConfig0) of
+            #{parameters := #{database := _} = Params} = Conf ->
+                Conf#{parameters => Params#{datalayers_type => datalayers_api_v1}}
+        end,
+    emqx_utils_maps:update_if_present(
+        resource_opts,
+        fun emqx_connector_schema:project_to_connector_resource_opts/1,
+        ConnConfig1
+    ).
+
+%% -------------------------------------------------------------------------------------------------
+%% schema
+
+namespace() -> connector_datalayers.
+
+roots() ->
+    [
+        {config, #{
+            type => hoconsc:ref(?MODULE, "connector")
+        }}
+    ].
+
+fields("connector") ->
+    [
+        server_field(),
+        parameter_field()
+    ] ++ emqx_connector_schema_lib:ssl_fields();
+fields("connector_datalayers_api_v1") ->
+    [datalayers_type_field(datalayers_api_v1) | datalayers_api_v1_fields()];
+%% ============ begin: schema for old bridge configs ============
+
+fields(datalayers_api_v1) ->
+    fields(common) ++ datalayers_api_v1_fields();
+fields(common) ->
+    [
+        server_field(),
+        precision_field()
+    ] ++ emqx_connector_schema_lib:ssl_fields().
+%% ============ end: schema for old bridge configs ============
+
+datalayers_type_field(Type) ->
+    {datalayers_type, #{
+        required => true,
+        type => Type,
+        default => Type,
+        desc => ?DESC(atom_to_list(Type))
+    }}.
+
+server_field() ->
+    {server, server()}.
+
+precision_field() ->
+    {precision,
+        %% The influxdb only supports these 4 precision:
+        %% See "https://github.com/influxdata/influxdb/blob/
+        %% 6b607288439a991261307518913eb6d4e280e0a7/models/points.go#L487" for
+        %% more information.
+        mk(enum([ns, us, ms, s]), #{
+            required => false, default => ms, desc => ?DESC("precision")
+        })}.
+
+parameter_field() ->
+    {parameters,
+        mk(
+            hoconsc:union([
+                ref(?MODULE, "connector_" ++ T)
+             || T <- ["datalayers_api_v1"]
+            ]),
+            #{required => true, desc => ?DESC("datalayers_parameters")}
+        )}.
+
+datalayers_api_v1_fields() ->
+    [
+        {database, mk(binary(), #{required => true, desc => ?DESC("database")})},
+        {username, mk(binary(), #{desc => ?DESC("username")})},
+        {password, emqx_schema_secret:mk(#{desc => ?DESC("password")})}
+    ].
+
+server() ->
+    Meta = #{
+        required => false,
+        default => <<"127.0.0.1:8086">>,
+        desc => ?DESC("server"),
+        converter => fun convert_server/2
+    },
+    emqx_schema:servers_sc(Meta, ?DATALAYERS_HOST_OPTIONS).
+
+desc(common) ->
+    ?DESC("common");
+desc(parameters) ->
+    ?DESC("dayalayers_parameters");
+desc("datalayers_parameters") ->
+    ?DESC("datalayers_parameters");
+desc(datalayers_api_v1) ->
+    ?DESC("datalayers_api_v1");
+desc("connector") ->
+    ?DESC("connector");
+desc("connector_datalayers_api_v1") ->
+    ?DESC("datalayers_api_v1").
+
+%% -------------------------------------------------------------------------------------------------
+%% internal functions
+
+start_client(InstId, Config) ->
+    ClientConfig = client_config(InstId, Config),
+    ?SLOG(info, #{
+        msg => "starting_datalayers_connector",
+        connector => InstId,
+        config => emqx_utils:redact(Config),
+        client_config => emqx_utils:redact(ClientConfig)
+    }),
+    try do_start_client(InstId, ClientConfig, Config) of
+        Res = {ok, #{client := Client}} ->
+            ok = emqx_resource:allocate_resource(InstId, ?datalayers_client, Client),
+            Res;
+        {error, Reason} ->
+            {error, Reason}
+    catch
+        E:R:S ->
+            ?tp(datalayers_connector_start_exception, #{error => {E, R}}),
+            ?SLOG(warning, #{
+                msg => "start_datalayers_connector_error",
+                connector => InstId,
+                error => E,
+                reason => R,
+                stack => S
+            }),
+            {error, R}
+    end.
+
+do_start_client(InstId, ClientConfig, Config) ->
+    case influxdb:start_client(ClientConfig) of
+        {ok, Client} ->
+            case influxdb:is_alive(Client, true) of
+                true ->
+                    State = #{client => Client, channels => #{}},
+                    {ok, State};
+                %case influxdb:check_auth(Client) of
+                %    ok ->
+                %        State = #{client => Client, channels => #{}},
+                %        ?SLOG(info, #{
+                %            msg => "starting_datalayers_connector_success",
+                %            connector => InstId,
+                %            client => redact_auth(Client),
+                %            state => redact_auth(State)
+                %        }),
+                %        {ok, State};
+                %    Error ->
+                %        ?tp(datalayers_connector_start_failed, #{error => auth_error}),
+                %        ?SLOG(warning, #{
+                %            msg => "failed_to_start_datalayers_connector",
+                %            error => Error,
+                %            connector => InstId,
+                %            client => redact_auth(Client),
+                %            reason => auth_error
+                %        }),
+                %        %% no leak
+                %        _ = influxdb:stop_client(Client),
+                %        {error, connect_ok_but_auth_failed}
+                %end;
+                {false, Reason} ->
+                    ?tp(datalayers_connector_start_failed, #{
+                        error => datalayers_client_not_alive, reason => Reason
+                    }),
+                    ?SLOG(warning, #{
+                        msg => "failed_to_start_datalayers_connector",
+                        connector => InstId,
+                        client => redact_auth(Client),
+                        reason => Reason
+                    }),
+                    %% no leak
+                    _ = influxdb:stop_client(Client),
+                    {error, {connect_failed, Reason}}
+            end;
+        {error, {already_started, Client0}} ->
+            ?tp(datalayers_connector_start_already_started, #{}),
+            ?SLOG(info, #{
+                msg => "restarting_datalayers_connector_found_already_started_client",
+                connector => InstId,
+                old_client => redact_auth(Client0)
+            }),
+            _ = influxdb:stop_client(Client0),
+            do_start_client(InstId, ClientConfig, Config);
+        {error, Reason} ->
+            ?tp(datalayers_connector_start_failed, #{error => Reason}),
+            ?SLOG(warning, #{
+                msg => "failed_to_start_datalayers_connector",
+                connector => InstId,
+                reason => Reason
+            }),
+            {error, Reason}
+    end.
+
+client_config(
+    InstId,
+    Config = #{
+        server := Server
+    }
+) ->
+    #{hostname := Host, port := Port} = emqx_schema:parse_server(Server, ?DATALAYERS_HOST_OPTIONS),
+    [
+        {host, str(Host)},
+        {port, Port},
+        {pool_size, erlang:system_info(schedulers)},
+        {pool, InstId}
+    ] ++ protocol_config(Config).
+
+%% api v1 config
+protocol_config(#{
+    parameters := #{datalayers_type := datalayers_api_v1, database := DB} = Params, ssl := SSL
+}) ->
+    [
+        {protocol, http},
+        {version, v1},
+        {database, str(DB)}
+    ] ++ username(Params) ++ password(Params) ++ ssl_config(SSL).
+
+ssl_config(#{enable := false}) ->
+    [
+        {https_enabled, false}
+    ];
+ssl_config(SSL = #{enable := true}) ->
+    [
+        {https_enabled, true},
+        {transport, ssl},
+        {transport_opts, emqx_tls_lib:to_client_opts(SSL)}
+    ].
+
+username(#{username := Username}) ->
+    [{username, str(Username)}];
+username(_) ->
+    [].
+
+password(#{password := Password}) ->
+    %% TODO: teach `influxdb` to accept 0-arity closures as passwords.
+    [{password, str(emqx_secret:unwrap(Password))}];
+password(_) ->
+    [].
+
+redact_auth(Term) ->
+    emqx_utils:redact(Term, fun is_auth_key/1).
+
+is_auth_key(Key) when is_binary(Key) ->
+    string:equal("authorization", Key, true);
+is_auth_key(_) ->
+    false.
+
+%% -------------------------------------------------------------------------------------------------
+%% Query
+do_query(InstId, Channel, Client, Points) ->
+    emqx_trace:rendered_action_template(Channel, #{points => Points, is_async => false}),
+    case influxdb:write(Client, Points) of
+        ok ->
+            ?SLOG(debug, #{
+                msg => "datalayers_write_point_success",
+                connector => InstId,
+                points => Points
+            });
+        {error, {401, _, _}} ->
+            ?tp(datalayers_connector_do_query_failure, #{error => <<"authorization failure">>}),
+            ?SLOG(error, #{
+                msg => "datalayers_authorization_failed",
+                client => redact_auth(Client),
+                connector => InstId
+            }),
+            {error, {unrecoverable_error, <<"authorization failure">>}};
+        {error, Reason} = Err ->
+            ?tp(datalayers_connector_do_query_failure, #{error => Reason}),
+            ?SLOG(error, #{
+                msg => "datalayers_write_point_failed",
+                connector => InstId,
+                reason => Reason
+            }),
+            case is_unrecoverable_error(Err) of
+                true ->
+                    {error, {unrecoverable_error, Reason}};
+                false ->
+                    {error, {recoverable_error, Reason}}
+            end
+    end.
+
+do_async_query(InstId, Channel, Client, Points, ReplyFunAndArgs) ->
+    ?SLOG(info, #{
+        msg => "datalayers_write_point_async",
+        connector => InstId,
+        points => Points
+    }),
+    emqx_trace:rendered_action_template(Channel, #{points => Points, is_async => true}),
+    WrappedReplyFunAndArgs = {fun ?MODULE:reply_callback/2, [ReplyFunAndArgs]},
+    {ok, _WorkerPid} = influxdb:write_async(Client, Points, WrappedReplyFunAndArgs).
+
+reply_callback(ReplyFunAndArgs, {error, Reason} = Error) ->
+    case is_unrecoverable_error(Error) of
+        true ->
+            Result = {error, {unrecoverable_error, Reason}},
+            emqx_resource:apply_reply_fun(ReplyFunAndArgs, Result);
+        false ->
+            Result = {error, {recoverable_error, Reason}},
+            emqx_resource:apply_reply_fun(ReplyFunAndArgs, Result)
+    end;
+reply_callback(ReplyFunAndArgs, {ok, 401, _, _}) ->
+    ?tp(datalayers_connector_do_query_failure, #{error => <<"authorization failure">>}),
+    Result = {error, {unrecoverable_error, <<"authorization failure">>}},
+    emqx_resource:apply_reply_fun(ReplyFunAndArgs, Result);
+reply_callback(ReplyFunAndArgs, {ok, Code, _, Body}) when ?IS_HTTP_ERROR(Code) ->
+    ?tp(datalayers_connector_do_query_failure, #{error => Body}),
+    Result = {error, {unrecoverable_error, Body}},
+    emqx_resource:apply_reply_fun(ReplyFunAndArgs, Result);
+reply_callback(ReplyFunAndArgs, Result) ->
+    ?tp(datalayers_connector_do_query_ok, #{result => Result}),
+    emqx_resource:apply_reply_fun(ReplyFunAndArgs, Result).
+
+%% -------------------------------------------------------------------------------------------------
+%% Tags & Fields Config Trans
+
+to_config(Lines, Precision) ->
+    to_config(Lines, [], Precision).
+
+to_config([], Acc, _Precision) ->
+    lists:reverse(Acc);
+to_config([Item0 | Rest], Acc, Precision) ->
+    Ts0 = maps:get(timestamp, Item0, undefined),
+    {Ts, FromPrecision, ToPrecision} = preproc_tmpl_timestamp(Ts0, Precision),
+    Item = #{
+        measurement => emqx_placeholder:preproc_tmpl(maps:get(measurement, Item0)),
+        timestamp => Ts,
+        precision => {FromPrecision, ToPrecision},
+        tags => to_kv_config(maps:get(tags, Item0)),
+        fields => to_kv_config(maps:get(fields, Item0))
+    },
+    to_config(Rest, [Item | Acc], Precision).
+
+%% pre-process the timestamp template
+%% returns a tuple of three elements:
+%% 1. The timestamp template itself.
+%% 2. The source timestamp precision (ms if the template ${timestamp} is used).
+%% 3. The target timestamp precision (configured for the client).
+preproc_tmpl_timestamp(undefined, Precision) ->
+    %% not configured, we default it to the message timestamp
+    preproc_tmpl_timestamp(?DEFAULT_TIMESTAMP_TMPL, Precision);
+preproc_tmpl_timestamp(Ts, Precision) when is_integer(Ts) ->
+    %% a const value is used which is very much unusual, but we have to add a special handling
+    {Ts, Precision, Precision};
+preproc_tmpl_timestamp(Ts, Precision) when is_list(Ts) ->
+    preproc_tmpl_timestamp(iolist_to_binary(Ts), Precision);
+preproc_tmpl_timestamp(<<?DEFAULT_TIMESTAMP_TMPL>> = Ts, Precision) ->
+    {emqx_placeholder:preproc_tmpl(Ts), ms, Precision};
+preproc_tmpl_timestamp(Ts, Precision) when is_binary(Ts) ->
+    %% a placehold is in use. e.g. ${payload.my_timestamp}
+    %% we can only hope it the value will be of the same precision in the configs
+    {emqx_placeholder:preproc_tmpl(Ts), Precision, Precision}.
+
+to_kv_config(KVfields) ->
+    maps:fold(fun to_maps_config/3, #{}, proplists:to_map(KVfields)).
+
+to_maps_config(K, V, Res) ->
+    NK = emqx_placeholder:preproc_tmpl(bin(K)),
+    Res#{NK => preproc_quoted(V)}.
+
+preproc_quoted({quoted, V}) ->
+    {quoted, emqx_placeholder:preproc_tmpl(bin(V))};
+preproc_quoted(V) ->
+    emqx_placeholder:preproc_tmpl(bin(V)).
+
+proc_quoted({quoted, V}, Data, TransOpts) ->
+    {quoted, emqx_placeholder:proc_tmpl(V, Data, TransOpts)};
+proc_quoted(V, Data, TransOpts) ->
+    emqx_placeholder:proc_tmpl(V, Data, TransOpts).
+
+%% -------------------------------------------------------------------------------------------------
+%% Tags & Fields Data Trans
+parse_batch_data(InstId, BatchData, SyntaxLines) ->
+    {Points, Errors} = lists:foldl(
+        fun({_, Data}, {ListOfPoints, ErrAccIn}) ->
+            case data_to_points(Data, SyntaxLines) of
+                {ok, Points} ->
+                    {[Points | ListOfPoints], ErrAccIn};
+                {error, ErrorPoints} ->
+                    log_error_points(InstId, ErrorPoints),
+                    {ListOfPoints, ErrAccIn + 1}
+            end
+        end,
+        {[], 0},
+        BatchData
+    ),
+    case Errors of
+        0 ->
+            {ok, lists:flatten(Points)};
+        _ ->
+            ?SLOG(error, #{
+                msg => "datalayers_trans_point_failed",
+                error_count => Errors,
+                connector => InstId,
+                reason => points_trans_failed
+            }),
+            {error, points_trans_failed}
+    end.
+
+-spec data_to_points(map(), [
+    #{
+        fields := [{binary(), binary()}],
+        measurement := binary(),
+        tags := [{binary(), binary()}],
+        timestamp := emqx_placeholder:tmpl_token() | integer(),
+        precision := {From :: ts_precision(), To :: ts_precision()}
+    }
+]) -> {ok, [map()]} | {error, term()}.
+data_to_points(Data, SyntaxLines) ->
+    lines_to_points(Data, SyntaxLines, [], []).
+
+%% When converting multiple rows data into InfluxDB Line Protocol, they are considered to be strongly correlated.
+%% And once a row fails to convert, all of them are considered to have failed.
+lines_to_points(_, [], Points, ErrorPoints) ->
+    case ErrorPoints of
+        [] ->
+            {ok, Points};
+        _ ->
+            %% ignore trans succeeded points
+            {error, ErrorPoints}
+    end;
+lines_to_points(Data, [#{timestamp := Ts} = Item | Rest], ResultPointsAcc, ErrorPointsAcc) when
+    is_list(Ts)
+->
+    TransOptions = #{return => rawlist, var_trans => fun data_filter/1},
+    case parse_timestamp(emqx_placeholder:proc_tmpl(Ts, Data, TransOptions)) of
+        {ok, TsInt} ->
+            Item1 = Item#{timestamp => TsInt},
+            continue_lines_to_points(Data, Item1, Rest, ResultPointsAcc, ErrorPointsAcc);
+        {error, BadTs} ->
+            lines_to_points(Data, Rest, ResultPointsAcc, [
+                {error, {bad_timestamp, BadTs}} | ErrorPointsAcc
+            ])
+    end;
+lines_to_points(Data, [#{timestamp := Ts} = Item | Rest], ResultPointsAcc, ErrorPointsAcc) when
+    is_integer(Ts)
+->
+    continue_lines_to_points(Data, Item, Rest, ResultPointsAcc, ErrorPointsAcc).
+
+parse_timestamp([TsInt]) when is_integer(TsInt) ->
+    {ok, TsInt};
+parse_timestamp([TsBin]) ->
+    try
+        {ok, binary_to_integer(TsBin)}
+    catch
+        _:_ ->
+            {error, TsBin}
+    end.
+
+continue_lines_to_points(Data, Item, Rest, ResultPointsAcc, ErrorPointsAcc) ->
+    case line_to_point(Data, Item) of
+        #{fields := Fields} when map_size(Fields) =:= 0 ->
+            %% influxdb client doesn't like empty field maps...
+            ErrorPointsAcc1 = [{error, no_fields} | ErrorPointsAcc],
+            lines_to_points(Data, Rest, ResultPointsAcc, ErrorPointsAcc1);
+        Point ->
+            lines_to_points(Data, Rest, [Point | ResultPointsAcc], ErrorPointsAcc)
+    end.
+
+line_to_point(
+    Data,
+    #{
+        measurement := Measurement,
+        tags := Tags,
+        fields := Fields,
+        timestamp := Ts,
+        precision := Precision
+    } = Item
+) ->
+    {_, EncodedTags, _} = maps:fold(fun maps_config_to_data/3, {Data, #{}, ?set_tag}, Tags),
+    {_, EncodedFields, _} = maps:fold(fun maps_config_to_data/3, {Data, #{}, ?set_field}, Fields),
+    maps:without([precision], Item#{
+        measurement => emqx_placeholder:proc_tmpl(Measurement, Data),
+        tags => EncodedTags,
+        fields => EncodedFields,
+        timestamp => maybe_convert_time_unit(Ts, Precision)
+    }).
+
+maybe_convert_time_unit(Ts, {FromPrecision, ToPrecision}) ->
+    erlang:convert_time_unit(Ts, time_unit(FromPrecision), time_unit(ToPrecision)).
+
+time_unit(s) -> second;
+time_unit(ms) -> millisecond;
+time_unit(us) -> microsecond;
+time_unit(ns) -> nanosecond.
+
+maps_config_to_data(K, V, {Data, Res, SetType}) ->
+    KTransOptions = #{return => rawlist, var_trans => fun key_filter/1},
+    VTransOptions = #{return => rawlist, var_trans => fun data_filter/1},
+    NK = emqx_placeholder:proc_tmpl(K, Data, KTransOptions),
+    NV = proc_quoted(V, Data, VTransOptions),
+    case {NK, NV} of
+        {[undefined], _} ->
+            {Data, Res, SetType};
+        %% undefined value in normal format [undefined] or int/uint format [undefined, <<"i">>]
+        {_, [undefined | _]} ->
+            {Data, Res, SetType};
+        {_, {quoted, [undefined | _]}} ->
+            {Data, Res, SetType};
+        _ ->
+            NRes = Res#{
+                list_to_binary(NK) => value_type(NV, #{
+                    tmpl_type => tmpl_type(V), set_type => SetType
+                })
+            },
+            {Data, NRes, SetType}
+    end.
+
+value_type([Number], #{set_type := ?set_tag}) when is_number(Number) ->
+    %% all `tag` values are treated as string
+    %% See also: https://docs.influxdata.com/influxdb/v2/reference/syntax/line-protocol/#tag-set
+    emqx_utils_conv:bin(Number);
+value_type([Str], #{set_type := ?set_tag}) when is_binary(Str) ->
+    Str;
+value_type({quoted, ValList}, _) ->
+    {string_list, ValList};
+value_type([Int, <<"i">>], #{tmpl_type := mixed}) when is_integer(Int) ->
+    {int, Int};
+value_type([UInt, <<"u">>], #{tmpl_type := mixed}) when is_integer(UInt) ->
+    {uint, UInt};
+%% write `1`, `1.0`, `-1.0` all as float
+%% see also: https://docs.influxdata.com/influxdb/v2.7/reference/syntax/line-protocol/#float
+value_type([Number], #{set_type := ?set_field}) when is_number(Number) ->
+    {float, Number};
+value_type([<<"t">>], _) ->
+    't';
+value_type([<<"T">>], _) ->
+    'T';
+value_type([true], _) ->
+    'true';
+value_type([<<"TRUE">>], _) ->
+    'TRUE';
+value_type([<<"True">>], _) ->
+    'True';
+value_type([<<"f">>], _) ->
+    'f';
+value_type([<<"F">>], _) ->
+    'F';
+value_type([false], _) ->
+    'false';
+value_type([<<"FALSE">>], _) ->
+    'FALSE';
+value_type([<<"False">>], _) ->
+    'False';
+value_type([Str], #{tmpl_type := variable}) when is_binary(Str) ->
+    Str;
+value_type([Str], #{tmpl_type := literal, set_type := ?set_field}) when is_binary(Str) ->
+    %% if Str is a literal string suffixed with `i` or `u`, we should convert it to int/uint.
+    %% otherwise, we should convert it to float.
+    NumStr = binary:part(Str, 0, byte_size(Str) - 1),
+    case binary:part(Str, byte_size(Str), -1) of
+        <<"i">> ->
+            maybe_convert_to_integer(NumStr, Str, int);
+        <<"u">> ->
+            maybe_convert_to_integer(NumStr, Str, uint);
+        _ ->
+            maybe_convert_to_float_str(Str)
+    end;
+value_type(Str, _) ->
+    Str.
+
+tmpl_type([{str, _}]) ->
+    literal;
+tmpl_type([{var, _}]) ->
+    variable;
+tmpl_type(_) ->
+    mixed.
+
+maybe_convert_to_integer(NumStr, String, Type) ->
+    try
+        Int = binary_to_integer(NumStr),
+        {Type, Int}
+    catch
+        error:badarg ->
+            maybe_convert_to_integer_f(NumStr, String, Type)
+    end.
+
+maybe_convert_to_integer_f(NumStr, String, Type) ->
+    try
+        Float = binary_to_float(NumStr),
+        {Type, erlang:floor(Float)}
+    catch
+        error:badarg ->
+            String
+    end.
+
+maybe_convert_to_float_str(NumStr) ->
+    try
+        _ = binary_to_float(NumStr),
+        %% NOTE: return a {float, String} to avoid precision loss when converting to float
+        {float, NumStr}
+    catch
+        error:badarg ->
+            maybe_convert_to_float_str_i(NumStr)
+    end.
+
+maybe_convert_to_float_str_i(NumStr) ->
+    try
+        _ = binary_to_integer(NumStr),
+        {float, NumStr}
+    catch
+        error:badarg ->
+            NumStr
+    end.
+
+key_filter(undefined) -> undefined;
+key_filter(Value) -> bin(Value).
+
+data_filter(undefined) -> undefined;
+data_filter(Int) when is_integer(Int) -> Int;
+data_filter(Number) when is_number(Number) -> Number;
+data_filter(Bool) when is_boolean(Bool) -> Bool;
+data_filter(Data) -> bin(Data).
+
+bin(Data) -> emqx_utils_conv:bin(Data).
+
+%% helper funcs
+log_error_points(InstId, Errs) ->
+    lists:foreach(
+        fun({error, Reason}) ->
+            ?SLOG(error, #{
+                msg => "datalayers_trans_point_failed",
+                connector => InstId,
+                reason => Reason
+            })
+        end,
+        Errs
+    ).
+
+convert_server(<<"http://", Server/binary>>, HoconOpts) ->
+    convert_server(Server, HoconOpts);
+convert_server(<<"https://", Server/binary>>, HoconOpts) ->
+    convert_server(Server, HoconOpts);
+convert_server(Server0, HoconOpts) ->
+    Server = string:trim(Server0, trailing, "/"),
+    emqx_schema:convert_servers(Server, HoconOpts).
+
+str(A) when is_atom(A) ->
+    atom_to_list(A);
+str(B) when is_binary(B) ->
+    binary_to_list(B);
+str(S) when is_list(S) ->
+    S.
+
+is_unrecoverable_error({error, {unrecoverable_error, _}}) ->
+    true;
+is_unrecoverable_error({error, {Code, _}}) when ?IS_HTTP_ERROR(Code) ->
+    true;
+is_unrecoverable_error({error, {Code, _, _Body}}) when ?IS_HTTP_ERROR(Code) ->
+    true;
+is_unrecoverable_error(_) ->
+    false.
+
+%%===================================================================
+%% eunit tests
+%%===================================================================
+
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+is_auth_key_test_() ->
+    [
+        ?_assert(is_auth_key(<<"Authorization">>)),
+        ?_assertNot(is_auth_key(<<"Something">>)),
+        ?_assertNot(is_auth_key(89))
+    ].
+
+%% for coverage
+desc_test_() ->
+    [
+        ?_assertMatch(
+            {desc, _, _},
+            desc(common)
+        ),
+        ?_assertMatch(
+            {desc, _, _},
+            desc(datalayers_api_v1)
+        ),
+        ?_assertMatch(
+            {desc, _, _},
+            hocon_schema:field_schema(server(), desc)
+        ),
+        ?_assertMatch(
+            connector_datalayers,
+            namespace()
+        )
+    ].
+-endif.

+ 42 - 0
apps/emqx_bridge_datalayers/src/emqx_bridge_datalayers_connector_info.erl

@@ -0,0 +1,42 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+-module(emqx_bridge_datalayers_connector_info).
+
+-behaviour(emqx_connector_info).
+
+-export([
+    type_name/0,
+    bridge_types/0,
+    resource_callback_module/0,
+    config_schema/0,
+    schema_module/0,
+    api_schema/1
+]).
+
+type_name() ->
+    datalayers.
+
+bridge_types() ->
+    [datalayers].
+
+resource_callback_module() ->
+    emqx_bridge_datalayers_connector.
+
+config_schema() ->
+    {datalayers,
+        hoconsc:mk(
+            hoconsc:map(name, hoconsc:ref(emqx_bridge_datalayers, "config_connector")),
+            #{
+                desc => <<"Datalayers Connector Config">>,
+                required => false
+            }
+        )}.
+
+schema_module() ->
+    emqx_bridge_datalayers.
+
+api_schema(Method) ->
+    emqx_connector_schema:api_ref(
+        emqx_bridge_datalayers, <<"datalayers">>, Method ++ "_connector"
+    ).

Разница между файлами не показана из-за своего большого размера
+ 1338 - 0
apps/emqx_bridge_datalayers/test/emqx_bridge_influxdb_SUITE.erl


+ 272 - 0
apps/emqx_bridge_datalayers/test/emqx_bridge_influxdb_connector_SUITE.erl

@@ -0,0 +1,272 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+
+-module(emqx_bridge_influxdb_connector_SUITE).
+
+-compile(nowarn_export_all).
+-compile(export_all).
+
+-include_lib("emqx_connector/include/emqx_connector.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("common_test/include/ct.hrl").
+
+-define(INFLUXDB_RESOURCE_MOD, emqx_bridge_influxdb_connector).
+
+all() ->
+    emqx_common_test_helpers:all(?MODULE).
+
+groups() ->
+    [].
+
+init_per_suite(Config) ->
+    InfluxDBTCPHost = os:getenv("INFLUXDB_APIV2_TCP_HOST", "toxiproxy"),
+    InfluxDBTCPPort = list_to_integer(os:getenv("INFLUXDB_APIV2_TCP_PORT", "8086")),
+    InfluxDBTLSHost = os:getenv("INFLUXDB_APIV2_TLS_HOST", "toxiproxy"),
+    InfluxDBTLSPort = list_to_integer(os:getenv("INFLUXDB_APIV2_TLS_PORT", "8087")),
+    Servers = [{InfluxDBTCPHost, InfluxDBTCPPort}, {InfluxDBTLSHost, InfluxDBTLSPort}],
+    case emqx_common_test_helpers:is_all_tcp_servers_available(Servers) of
+        true ->
+            Apps = emqx_cth_suite:start(
+                [
+                    emqx_conf,
+                    emqx_bridge_influxdb,
+                    emqx_bridge
+                ],
+                #{work_dir => emqx_cth_suite:work_dir(Config)}
+            ),
+            [
+                {apps, Apps},
+                {influxdb_tcp_host, InfluxDBTCPHost},
+                {influxdb_tcp_port, InfluxDBTCPPort},
+                {influxdb_tls_host, InfluxDBTLSHost},
+                {influxdb_tls_port, InfluxDBTLSPort}
+                | Config
+            ];
+        false ->
+            case os:getenv("IS_CI") of
+                "yes" ->
+                    throw(no_influxdb);
+                _ ->
+                    {skip, no_influxdb}
+            end
+    end.
+
+end_per_suite(Config) ->
+    Apps = ?config(apps, Config),
+    emqx_cth_suite:stop(Apps),
+    ok.
+
+init_per_testcase(_, Config) ->
+    Config.
+
+end_per_testcase(_, _Config) ->
+    ok.
+
+% %%------------------------------------------------------------------------------
+% %% Testcases
+% %%------------------------------------------------------------------------------
+
+t_lifecycle(Config) ->
+    Host = ?config(influxdb_tcp_host, Config),
+    Port = ?config(influxdb_tcp_port, Config),
+    perform_lifecycle_check(
+        <<"emqx_bridge_influxdb_connector_SUITE">>,
+        influxdb_connector_config(Host, Port, false, <<"verify_none">>)
+    ).
+
+perform_lifecycle_check(PoolName, InitialConfig) ->
+    {ok, #{config := CheckedConfig}} =
+        emqx_resource:check_config(?INFLUXDB_RESOURCE_MOD, InitialConfig),
+    % We need to add a write_syntax to the config since the connector
+    % expects this
+    FullConfig = CheckedConfig#{write_syntax => influxdb_write_syntax()},
+    {ok, #{
+        id := ResourceId,
+        state := #{client := #{pool := ReturnedPoolName}} = State,
+        status := InitialStatus
+    }} = emqx_resource:create_local(
+        PoolName,
+        ?CONNECTOR_RESOURCE_GROUP,
+        ?INFLUXDB_RESOURCE_MOD,
+        FullConfig,
+        #{}
+    ),
+    ?assertEqual(InitialStatus, connected),
+    % Instance should match the state and status of the just started resource
+    {ok, ?CONNECTOR_RESOURCE_GROUP, #{
+        state := State,
+        status := InitialStatus
+    }} =
+        emqx_resource:get_instance(PoolName),
+    ?assertEqual({ok, connected}, emqx_resource:health_check(PoolName)),
+    %% install actions to the connector
+    ActionConfig = influxdb_action_config(),
+    ChannelId = <<"test_channel">>,
+    ?assertEqual(
+        ok,
+        emqx_resource_manager:add_channel(
+            ResourceId, ChannelId, ActionConfig
+        )
+    ),
+    ?assertMatch(#{status := connected}, emqx_resource:channel_health_check(ResourceId, ChannelId)),
+    % % Perform query as further check that the resource is working as expected
+    ?assertMatch({ok, 204, _}, emqx_resource:query(PoolName, test_query(ChannelId))),
+    ?assertEqual(ok, emqx_resource:stop(PoolName)),
+    % Resource will be listed still, but state will be changed and healthcheck will fail
+    % as the worker no longer exists.
+    {ok, ?CONNECTOR_RESOURCE_GROUP, #{
+        state := State,
+        status := StoppedStatus
+    }} =
+        emqx_resource:get_instance(PoolName),
+    ?assertEqual(stopped, StoppedStatus),
+    ?assertEqual({error, resource_is_stopped}, emqx_resource:health_check(PoolName)),
+    % Resource healthcheck shortcuts things by checking ets. Go deeper by checking pool itself.
+    ?assertEqual({error, not_found}, ecpool:stop_sup_pool(ReturnedPoolName)),
+    % Can call stop/1 again on an already stopped instance
+    ?assertEqual(ok, emqx_resource:stop(PoolName)),
+    % Make sure it can be restarted and the healthchecks and queries work properly
+    ?assertEqual(ok, emqx_resource:restart(PoolName)),
+    % async restart, need to wait resource
+    timer:sleep(500),
+    {ok, ?CONNECTOR_RESOURCE_GROUP, #{status := InitialStatus}} =
+        emqx_resource:get_instance(PoolName),
+    ?assertEqual({ok, connected}, emqx_resource:health_check(PoolName)),
+    ChannelId = <<"test_channel">>,
+    ?assertEqual(
+        ok,
+        emqx_resource_manager:add_channel(
+            ResourceId, ChannelId, ActionConfig
+        )
+    ),
+    ?assertMatch(#{status := connected}, emqx_resource:channel_health_check(ResourceId, ChannelId)),
+    ?assertMatch({ok, 204, _}, emqx_resource:query(PoolName, test_query(ChannelId))),
+    % Stop and remove the resource in one go.
+    ?assertEqual(ok, emqx_resource:remove_local(PoolName)),
+    ?assertEqual({error, not_found}, ecpool:stop_sup_pool(ReturnedPoolName)),
+    % Should not even be able to get the resource data out of ets now unlike just stopping.
+    ?assertEqual({error, not_found}, emqx_resource:get_instance(PoolName)).
+
+t_tls_verify_none(Config) ->
+    PoolName = <<"testpool-1">>,
+    Host = ?config(influxdb_tls_host, Config),
+    Port = ?config(influxdb_tls_port, Config),
+    InitialConfig = influxdb_connector_config(Host, Port, true, <<"verify_none">>),
+    ValidStatus = perform_tls_opts_check(PoolName, InitialConfig, valid),
+    ?assertEqual(connected, ValidStatus),
+    InvalidStatus = perform_tls_opts_check(PoolName, InitialConfig, fail),
+    ?assertEqual(disconnected, InvalidStatus),
+    ok.
+
+t_tls_verify_peer(Config) ->
+    PoolName = <<"testpool-2">>,
+    Host = ?config(influxdb_tls_host, Config),
+    Port = ?config(influxdb_tls_port, Config),
+    InitialConfig = influxdb_connector_config(Host, Port, true, <<"verify_peer">>),
+    %% This works without a CA-cert & friends since we are using a mock
+    ValidStatus = perform_tls_opts_check(PoolName, InitialConfig, valid),
+    ?assertEqual(connected, ValidStatus),
+    InvalidStatus = perform_tls_opts_check(PoolName, InitialConfig, fail),
+    ?assertEqual(disconnected, InvalidStatus),
+    ok.
+
+perform_tls_opts_check(PoolName, InitialConfig, VerifyReturn) ->
+    {ok, #{config := CheckedConfig}} =
+        emqx_resource:check_config(?INFLUXDB_RESOURCE_MOD, InitialConfig),
+    % Meck handling of TLS opt handling so that we can inject custom
+    % verification returns
+    meck:new(emqx_tls_lib, [passthrough, no_link]),
+    meck:expect(
+        emqx_tls_lib,
+        to_client_opts,
+        fun(Opts) ->
+            Verify = {verify_fun, {custom_verify(), {return, VerifyReturn}}},
+            [
+                Verify,
+                {cacerts, public_key:cacerts_get()}
+                | meck:passthrough([Opts])
+            ]
+        end
+    ),
+    try
+        % We need to add a write_syntax to the config since the connector
+        % expects this
+        FullConfig = CheckedConfig#{write_syntax => influxdb_write_syntax()},
+        {ok, #{
+            config := #{ssl := #{enable := SslEnabled}},
+            status := Status
+        }} = emqx_resource:create_local(
+            PoolName,
+            ?CONNECTOR_RESOURCE_GROUP,
+            ?INFLUXDB_RESOURCE_MOD,
+            FullConfig,
+            #{}
+        ),
+        ?assert(SslEnabled),
+        ?assert(meck:validate(emqx_tls_lib)),
+        % Stop and remove the resource in one go.
+        ?assertEqual(ok, emqx_resource:remove_local(PoolName)),
+        Status
+    after
+        meck:unload(emqx_tls_lib)
+    end.
+
+% %%------------------------------------------------------------------------------
+% %% Helpers
+% %%------------------------------------------------------------------------------
+
+influxdb_connector_config(Host, Port, SslEnabled, Verify) ->
+    Server = list_to_binary(io_lib:format("~s:~b", [Host, Port])),
+    ConnectorConf = #{
+        <<"parameters">> => #{
+            <<"influxdb_type">> => <<"influxdb_api_v2">>,
+            <<"bucket">> => <<"mqtt">>,
+            <<"org">> => <<"emqx">>,
+            <<"token">> => <<"abcdefg">>
+        },
+        <<"server">> => Server,
+        <<"ssl">> => #{
+            <<"enable">> => SslEnabled,
+            <<"verify">> => Verify
+        }
+    },
+    #{<<"config">> => ConnectorConf}.
+
+influxdb_action_config() ->
+    #{
+        parameters => #{
+            write_syntax => influxdb_write_syntax(),
+            precision => ms
+        }
+    }.
+
+custom_verify() ->
+    fun
+        (_, {bad_cert, unknown_ca} = Event, {return, Return} = UserState) ->
+            ct:pal("Call to custom verify fun. Event: ~p UserState: ~p", [Event, UserState]),
+            {Return, UserState};
+        (_, Event, UserState) ->
+            ct:pal("Unexpected call to custom verify fun. Event: ~p UserState: ~p", [
+                Event, UserState
+            ]),
+            {fail, unexpected_call_to_verify_fun}
+    end.
+
+influxdb_write_syntax() ->
+    [
+        #{
+            measurement => "${topic}",
+            tags => [{"clientid", "${clientid}"}],
+            fields => [{"payload", "${payload}"}],
+            timestamp => undefined
+        }
+    ].
+
+test_query(ChannelId) ->
+    {ChannelId, #{
+        <<"clientid">> => <<"something">>,
+        <<"payload">> => #{bool => true},
+        <<"topic">> => <<"connector_test">>,
+        <<"timestamp">> => 1678220316257
+    }}.

+ 392 - 0
apps/emqx_bridge_datalayers/test/emqx_bridge_influxdb_tests.erl

@@ -0,0 +1,392 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+-module(emqx_bridge_influxdb_tests).
+
+-include_lib("eunit/include/eunit.hrl").
+
+-define(INVALID_LINES, [
+    "   ",
+    " \n",
+    "  \n\n\n  ",
+    "\n",
+    "  \n\n   \n  \n",
+    "measurement",
+    "measurement ",
+    "measurement,tag",
+    "measurement field",
+    "measurement,tag field",
+    "measurement,tag field ${timestamp}",
+    "measurement,tag=",
+    "measurement,tag=tag1",
+    "measurement,tag =",
+    "measurement field=",
+    "measurement field= ",
+    "measurement field = ",
+    "measurement, tag = field = ",
+    "measurement, tag = field = ",
+    "measurement, tag = tag_val field = field_val",
+    "measurement, tag = tag_val field = field_val ${timestamp}",
+    "measurement,= = ${timestamp}",
+    "measurement,t=a, f=a, ${timestamp}",
+    "measurement,t=a,t1=b, f=a,f1=b, ${timestamp}",
+    "measurement,t=a,t1=b, f=a,f1=b,",
+    "measurement,t=a, t1=b, f=a,f1=b,",
+    "measurement,t=a,,t1=b, f=a,f1=b,",
+    "measurement,t=a,,t1=b f=a,,f1=b",
+    "measurement,t=a,,t1=b f=a,f1=b ${timestamp}",
+    "measurement, f=a,f1=b",
+    "measurement, f=a,f1=b ${timestamp}",
+    "measurement,, f=a,f1=b ${timestamp}",
+    "measurement,, f=a,f1=b",
+    "measurement,, f=a,f1=b,, ${timestamp}",
+    "measurement f=a,f1=b,, ${timestamp}",
+    "measurement,t=a f=a,f1=b,, ${timestamp}",
+    "measurement,t=a f=a,f1=b,, ",
+    "measurement,t=a f=a,f1=b,,",
+    "measurement, t=a  f=a,f1=b",
+    "measurement,t=a f=a, f1=b",
+    "measurement,t=a f=a, f1=b ${timestamp}",
+    "measurement, t=a  f=a, f1=b ${timestamp}",
+    "measurement,t= a f=a,f1=b ${timestamp}",
+    "measurement,t= a f=a,f1 =b ${timestamp}",
+    "measurement, t = a f = a,f1 = b ${timestamp}",
+    "measurement,t=a f=a,f1=b \n ${timestamp}",
+    "measurement,t=a \n f=a,f1=b \n ${timestamp}",
+    "measurement,t=a \n f=a,f1=b \n ",
+    "\n measurement,t=a \n f=a,f1=b \n ${timestamp}",
+    "\n measurement,t=a \n f=a,f1=b \n",
+    %% not escaped backslash in a quoted field value is invalid
+    "measurement,tag=1 field=\"val\\1\""
+]).
+
+-define(VALID_LINE_PARSED_PAIRS, [
+    {"m1,tag=tag1 field=field1 ${timestamp1}", #{
+        measurement => "m1",
+        tags => [{"tag", "tag1"}],
+        fields => [{"field", "field1"}],
+        timestamp => "${timestamp1}"
+    }},
+    {"m2,tag=tag2 field=field2", #{
+        measurement => "m2",
+        tags => [{"tag", "tag2"}],
+        fields => [{"field", "field2"}],
+        timestamp => undefined
+    }},
+    {"m3 field=field3 ${timestamp3}", #{
+        measurement => "m3",
+        tags => [],
+        fields => [{"field", "field3"}],
+        timestamp => "${timestamp3}"
+    }},
+    {"m4 field=field4", #{
+        measurement => "m4",
+        tags => [],
+        fields => [{"field", "field4"}],
+        timestamp => undefined
+    }},
+    {"m5,tag=tag5,tag_a=tag5a,tag_b=tag5b field=field5,field_a=field5a,field_b=field5b ${timestamp5}",
+        #{
+            measurement => "m5",
+            tags => [{"tag", "tag5"}, {"tag_a", "tag5a"}, {"tag_b", "tag5b"}],
+            fields => [{"field", "field5"}, {"field_a", "field5a"}, {"field_b", "field5b"}],
+            timestamp => "${timestamp5}"
+        }},
+    {"m6,tag=tag6,tag_a=tag6a,tag_b=tag6b field=field6,field_a=field6a,field_b=field6b", #{
+        measurement => "m6",
+        tags => [{"tag", "tag6"}, {"tag_a", "tag6a"}, {"tag_b", "tag6b"}],
+        fields => [{"field", "field6"}, {"field_a", "field6a"}, {"field_b", "field6b"}],
+        timestamp => undefined
+    }},
+    {"m7,tag=tag7,tag_a=\"tag7a\",tag_b=tag7b field=\"field7\",field_a=field7a,field_b=\"field7b\"",
+        #{
+            measurement => "m7",
+            tags => [{"tag", "tag7"}, {"tag_a", "\"tag7a\""}, {"tag_b", "tag7b"}],
+            fields => [
+                {"field", {quoted, "field7"}},
+                {"field_a", "field7a"},
+                {"field_b", {quoted, "field7b"}}
+            ],
+            timestamp => undefined
+        }},
+    {"m8,tag=tag8,tag_a=\"tag8a\",tag_b=tag8b field=\"field8\",field_a=field8a,field_b=\"field8b\" ${timestamp8}",
+        #{
+            measurement => "m8",
+            tags => [{"tag", "tag8"}, {"tag_a", "\"tag8a\""}, {"tag_b", "tag8b"}],
+            fields => [
+                {"field", {quoted, "field8"}},
+                {"field_a", "field8a"},
+                {"field_b", {quoted, "field8b"}}
+            ],
+            timestamp => "${timestamp8}"
+        }},
+    {
+        "m8a,tag=tag8,tag_a=\"${tag8a}\",tag_b=tag8b field=\"${field8}\","
+        "field_a=field8a,field_b=\"${field8b}\" ${timestamp8}",
+        #{
+            measurement => "m8a",
+            tags => [{"tag", "tag8"}, {"tag_a", "\"${tag8a}\""}, {"tag_b", "tag8b"}],
+            fields => [
+                {"field", {quoted, "${field8}"}},
+                {"field_a", "field8a"},
+                {"field_b", {quoted, "${field8b}"}}
+            ],
+            timestamp => "${timestamp8}"
+        }
+    },
+    {"m9,tag=tag9,tag_a=\"tag9a\",tag_b=tag9b field=\"field9\",field_a=field9a,field_b=\"\" ${timestamp9}",
+        #{
+            measurement => "m9",
+            tags => [{"tag", "tag9"}, {"tag_a", "\"tag9a\""}, {"tag_b", "tag9b"}],
+            fields => [
+                {"field", {quoted, "field9"}}, {"field_a", "field9a"}, {"field_b", {quoted, ""}}
+            ],
+            timestamp => "${timestamp9}"
+        }},
+    {"m10 field=\"\" ${timestamp10}", #{
+        measurement => "m10",
+        tags => [],
+        fields => [{"field", {quoted, ""}}],
+        timestamp => "${timestamp10}"
+    }}
+]).
+
+-define(VALID_LINE_EXTRA_SPACES_PARSED_PAIRS, [
+    {"\n  m1,tag=tag1  field=field1  ${timestamp1} \n", #{
+        measurement => "m1",
+        tags => [{"tag", "tag1"}],
+        fields => [{"field", "field1"}],
+        timestamp => "${timestamp1}"
+    }},
+    {"  m2,tag=tag2  field=field2  ", #{
+        measurement => "m2",
+        tags => [{"tag", "tag2"}],
+        fields => [{"field", "field2"}],
+        timestamp => undefined
+    }},
+    {" m3  field=field3   ${timestamp3}  ", #{
+        measurement => "m3",
+        tags => [],
+        fields => [{"field", "field3"}],
+        timestamp => "${timestamp3}"
+    }},
+    {" \n m4  field=field4\n ", #{
+        measurement => "m4",
+        tags => [],
+        fields => [{"field", "field4"}],
+        timestamp => undefined
+    }},
+    {" \n m5,tag=tag5,tag_a=tag5a,tag_b=tag5b   field=field5,field_a=field5a,field_b=field5b    ${timestamp5}  \n",
+        #{
+            measurement => "m5",
+            tags => [{"tag", "tag5"}, {"tag_a", "tag5a"}, {"tag_b", "tag5b"}],
+            fields => [{"field", "field5"}, {"field_a", "field5a"}, {"field_b", "field5b"}],
+            timestamp => "${timestamp5}"
+        }},
+    {" m6,tag=tag6,tag_a=tag6a,tag_b=tag6b  field=field6,field_a=field6a,field_b=field6b\n  ", #{
+        measurement => "m6",
+        tags => [{"tag", "tag6"}, {"tag_a", "tag6a"}, {"tag_b", "tag6b"}],
+        fields => [{"field", "field6"}, {"field_a", "field6a"}, {"field_b", "field6b"}],
+        timestamp => undefined
+    }}
+]).
+
+-define(VALID_LINE_PARSED_ESCAPED_CHARS_PAIRS, [
+    {"m\\ =1\\,,\\,tag\\ \\==\\=tag\\ 1\\, \\,fie\\ ld\\ =\\ field\\,1 ${timestamp1}", #{
+        measurement => "m =1,",
+        tags => [{",tag =", "=tag 1,"}],
+        fields => [{",fie ld ", " field,1"}],
+        timestamp => "${timestamp1}"
+    }},
+    {"m2,tag=tag2 field=\"field \\\"2\\\",\n\"", #{
+        measurement => "m2",
+        tags => [{"tag", "tag2"}],
+        fields => [{"field", {quoted, "field \"2\",\n"}}],
+        timestamp => undefined
+    }},
+    {"m\\ 3 field=\"field3\" ${payload.timestamp\\ 3}", #{
+        measurement => "m 3",
+        tags => [],
+        fields => [{"field", {quoted, "field3"}}],
+        timestamp => "${payload.timestamp 3}"
+    }},
+    {"m4 field=\"\\\"field\\\\4\\\"\"", #{
+        measurement => "m4",
+        tags => [],
+        fields => [{"field", {quoted, "\"field\\4\""}}],
+        timestamp => undefined
+    }},
+    {
+        "m5\\,mA,tag=\\=tag5\\=,\\,tag_a\\,=tag\\ 5a,tag_b=tag5b \\ field\\ =field5,"
+        "field\\ _\\ a=field5a,\\,field_b\\ =\\=\\,\\ field5b ${timestamp5}",
+        #{
+            measurement => "m5,mA",
+            tags => [{"tag", "=tag5="}, {",tag_a,", "tag 5a"}, {"tag_b", "tag5b"}],
+            fields => [
+                {" field ", "field5"}, {"field _ a", "field5a"}, {",field_b ", "=, field5b"}
+            ],
+            timestamp => "${timestamp5}"
+        }
+    },
+    {"m6,tag=tag6,tag_a=tag6a,tag_b=tag6b field=\"field6\",field_a=\"field6a\",field_b=\"field6b\"",
+        #{
+            measurement => "m6",
+            tags => [{"tag", "tag6"}, {"tag_a", "tag6a"}, {"tag_b", "tag6b"}],
+            fields => [
+                {"field", {quoted, "field6"}},
+                {"field_a", {quoted, "field6a"}},
+                {"field_b", {quoted, "field6b"}}
+            ],
+            timestamp => undefined
+        }},
+    {
+        "\\ \\ m7\\ \\ ,tag=\\ tag\\,7\\ ,tag_a=\"tag7a\",tag_b\\,tag1=tag7b field=\"field7\","
+        "field_a=field7a,field_b=\"field7b\\\\\n\"",
+        #{
+            measurement => "  m7  ",
+            tags => [{"tag", " tag,7 "}, {"tag_a", "\"tag7a\""}, {"tag_b,tag1", "tag7b"}],
+            fields => [
+                {"field", {quoted, "field7"}},
+                {"field_a", "field7a"},
+                {"field_b", {quoted, "field7b\\\n"}}
+            ],
+            timestamp => undefined
+        }
+    },
+    {
+        "m8,tag=tag8,tag_a=\"tag8a\",tag_b=tag8b field=\"field8\",field_a=field8a,"
+        "field_b=\"\\\"field\\\" = 8b\" ${timestamp8}",
+        #{
+            measurement => "m8",
+            tags => [{"tag", "tag8"}, {"tag_a", "\"tag8a\""}, {"tag_b", "tag8b"}],
+            fields => [
+                {"field", {quoted, "field8"}},
+                {"field_a", "field8a"},
+                {"field_b", {quoted, "\"field\" = 8b"}}
+            ],
+            timestamp => "${timestamp8}"
+        }
+    },
+    {"m\\9,tag=tag9,tag_a=\"tag9a\",tag_b=tag9b field\\=field=\"field9\",field_a=field9a,field_b=\"\" ${timestamp9}",
+        #{
+            measurement => "m\\9",
+            tags => [{"tag", "tag9"}, {"tag_a", "\"tag9a\""}, {"tag_b", "tag9b"}],
+            fields => [
+                {"field=field", {quoted, "field9"}},
+                {"field_a", "field9a"},
+                {"field_b", {quoted, ""}}
+            ],
+            timestamp => "${timestamp9}"
+        }},
+    {"m\\,10 \"field\\\\\"=\"\" ${timestamp10}", #{
+        measurement => "m,10",
+        tags => [],
+        %% backslash should not be un-escaped in tag key
+        fields => [{"\"field\\\\\"", {quoted, ""}}],
+        timestamp => "${timestamp10}"
+    }}
+]).
+
+-define(VALID_LINE_PARSED_ESCAPED_CHARS_EXTRA_SPACES_PAIRS, [
+    {" \n m\\ =1\\,,\\,tag\\ \\==\\=tag\\ 1\\,   \\,fie\\ ld\\ =\\ field\\,1  ${timestamp1}  ", #{
+        measurement => "m =1,",
+        tags => [{",tag =", "=tag 1,"}],
+        fields => [{",fie ld ", " field,1"}],
+        timestamp => "${timestamp1}"
+    }},
+    {" m2,tag=tag2   field=\"field \\\"2\\\",\n\"  ", #{
+        measurement => "m2",
+        tags => [{"tag", "tag2"}],
+        fields => [{"field", {quoted, "field \"2\",\n"}}],
+        timestamp => undefined
+    }},
+    {"  m\\ 3   field=\"field3\"   ${payload.timestamp\\ 3}  ", #{
+        measurement => "m 3",
+        tags => [],
+        fields => [{"field", {quoted, "field3"}}],
+        timestamp => "${payload.timestamp 3}"
+    }},
+    {"   m4       field=\"\\\"field\\\\4\\\"\"    ", #{
+        measurement => "m4",
+        tags => [],
+        fields => [{"field", {quoted, "\"field\\4\""}}],
+        timestamp => undefined
+    }},
+    {
+        " m5\\,mA,tag=\\=tag5\\=,\\,tag_a\\,=tag\\ 5a,tag_b=tag5b   \\ field\\ =field5,"
+        "field\\ _\\ a=field5a,\\,field_b\\ =\\=\\,\\ field5b   ${timestamp5}    ",
+        #{
+            measurement => "m5,mA",
+            tags => [{"tag", "=tag5="}, {",tag_a,", "tag 5a"}, {"tag_b", "tag5b"}],
+            fields => [
+                {" field ", "field5"}, {"field _ a", "field5a"}, {",field_b ", "=, field5b"}
+            ],
+            timestamp => "${timestamp5}"
+        }
+    },
+    {"  m6,tag=tag6,tag_a=tag6a,tag_b=tag6b   field=\"field6\",field_a=\"field6a\",field_b=\"field6b\"  ",
+        #{
+            measurement => "m6",
+            tags => [{"tag", "tag6"}, {"tag_a", "tag6a"}, {"tag_b", "tag6b"}],
+            fields => [
+                {"field", {quoted, "field6"}},
+                {"field_a", {quoted, "field6a"}},
+                {"field_b", {quoted, "field6b"}}
+            ],
+            timestamp => undefined
+        }}
+]).
+
+invalid_write_syntax_line_test_() ->
+    [?_assertThrow(_, to_influx_lines(L)) || L <- ?INVALID_LINES].
+
+invalid_write_syntax_multiline_test_() ->
+    LinesList = [
+        join("\n", ?INVALID_LINES),
+        join("\n\n\n", ?INVALID_LINES),
+        join("\n\n", lists:reverse(?INVALID_LINES))
+    ],
+    [?_assertThrow(_, to_influx_lines(Lines)) || Lines <- LinesList].
+
+valid_write_syntax_test_() ->
+    test_pairs(?VALID_LINE_PARSED_PAIRS).
+
+valid_write_syntax_with_extra_spaces_test_() ->
+    test_pairs(?VALID_LINE_EXTRA_SPACES_PARSED_PAIRS).
+
+valid_write_syntax_escaped_chars_test_() ->
+    test_pairs(?VALID_LINE_PARSED_ESCAPED_CHARS_PAIRS).
+
+valid_write_syntax_escaped_chars_with_extra_spaces_test_() ->
+    test_pairs(?VALID_LINE_PARSED_ESCAPED_CHARS_EXTRA_SPACES_PAIRS).
+
+test_pairs(PairsList) ->
+    {Lines, AllExpected} = lists:unzip(PairsList),
+    JoinedLines = join("\n", Lines),
+    JoinedLines1 = join("\n\n\n", Lines),
+    JoinedLines2 = join("\n\n", lists:reverse(Lines)),
+    SingleLineTests =
+        [
+            ?_assertEqual([Expected], to_influx_lines(Line))
+         || {Line, Expected} <- PairsList
+        ],
+    JoinedLinesTests =
+        [
+            ?_assertEqual(AllExpected, to_influx_lines(JoinedLines)),
+            ?_assertEqual(AllExpected, to_influx_lines(JoinedLines1)),
+            ?_assertEqual(lists:reverse(AllExpected), to_influx_lines(JoinedLines2))
+        ],
+    SingleLineTests ++ JoinedLinesTests.
+
+join(Sep, LinesList) ->
+    lists:flatten(lists:join(Sep, LinesList)).
+
+to_influx_lines(RawLines) ->
+    OldLevel = emqx_logger:get_primary_log_level(),
+    try
+        %% mute error logs from this call
+        emqx_logger:set_primary_log_level(none),
+        emqx_bridge_influxdb:to_influx_lines(RawLines)
+    after
+        emqx_logger:set_primary_log_level(OldLevel)
+    end.

+ 2 - 1
apps/emqx_connector/src/emqx_connector_info.erl

@@ -109,7 +109,8 @@ hard_coded_connector_info_modules_ee() ->
         emqx_bridge_syskeeper_connector_info,
         emqx_bridge_syskeeper_proxy_connector_info,
         emqx_bridge_tdengine_connector_info,
-        emqx_bridge_timescale_connector_info
+        emqx_bridge_timescale_connector_info,
+        emqx_bridge_datalayers_connector_info
     ].
 -else.
 hard_coded_connector_info_modules_ee() ->

+ 1 - 0
apps/emqx_machine/priv/reboot_lists.eterm

@@ -119,6 +119,7 @@
             emqx_bridge_oracle,
             emqx_bridge_rabbitmq,
             emqx_bridge_azure_event_hub,
+            emqx_bridge_datalayers,
             emqx_s3,
             emqx_bridge_s3,
             emqx_bridge_azure_blob_storage,

+ 1 - 0
rebar.config.erl

@@ -99,6 +99,7 @@ is_community_umbrella_app("apps/emqx_bridge_tdengine") -> false;
 is_community_umbrella_app("apps/emqx_bridge_timescale") -> false;
 is_community_umbrella_app("apps/emqx_bridge_oracle") -> false;
 is_community_umbrella_app("apps/emqx_bridge_sqlserver") -> false;
+is_community_umbrella_app("apps/emqx_bridge_datalayers") -> false;
 is_community_umbrella_app("apps/emqx_oracle") -> false;
 is_community_umbrella_app("apps/emqx_bridge_rabbitmq") -> false;
 is_community_umbrella_app("apps/emqx_ft") -> false;

+ 65 - 0
rel/i18n/emqx_bridge_datalayers.hocon

@@ -0,0 +1,65 @@
+emqx_bridge_datalayers {
+
+config_enable.desc:
+"""Enable or disable this bridge."""
+
+config_enable.label:
+"""Enable Or Disable Bridge"""
+
+desc_config.desc:
+"""Configuration for an Datalayers bridge."""
+
+desc_config.label:
+"""Datalayers Bridge Configuration"""
+
+desc_name.desc:
+"""Bridge name."""
+
+desc_name.label:
+"""Bridge Name"""
+
+desc_type.desc:
+"""The Bridge Type."""
+
+desc_type.label:
+"""Bridge Type"""
+
+local_topic.desc:
+"""The MQTT topic filter to be forwarded to the Datalayers. All MQTT 'PUBLISH' messages with the topic
+matching the local_topic will be forwarded.<br/>
+NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is
+configured, then both the data got from the rule and the MQTT messages that match local_topic
+will be forwarded."""
+
+local_topic.label:
+"""Local Topic"""
+
+write_syntax.desc:
+"""Conf of InfluxDB line protocol to write data points. It is a text-based format that provides the measurement, tag set, field set, and timestamp of a data point, and placeholder supported.
+See also [InfluxDB 2.3 Line Protocol](https://docs.influxdata.com/influxdb/v2.3/reference/syntax/line-protocol/) and
+[InfluxDB 1.8 Line Protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/) <br/>
+TLDR:<br/>
+```
+<measurement>[,<tag_key>=<tag_value>[,<tag_key>=<tag_value>]] <field_key>=<field_value>[,<field_key>=<field_value>] [<timestamp>]
+```
+Please note that a placeholder for an integer value must be annotated with a suffix `i`. For example `${payload.int_value}i`."""
+
+write_syntax.label:
+"""Write Syntax"""
+
+action_parameters.label:
+"""Action Parameters"""
+action_parameters.desc:
+"""Additional parameters specific to this action type"""
+
+connector.label:
+"""Datalayers Connector"""
+connector.desc:
+"""Datalayers Connector Configs"""
+
+datalayers_action.label:
+"""Datalayers Action"""
+datalayers_action.desc:
+"""Action to interact with a Datalayers connector"""
+
+}